code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
#!/usr/bin/env python
"""test_atom.py: Verify the Atom class functions as it's meant to."""
__author__ = "<NAME>"
__email__ = "<EMAIL>"
import unittest as ut
import numpy as np
from miniapp.system import atom
class TestAtom(ut.TestCase):
"""Unit tests for the Atom class.
"""
def test_1(self):
"""Make a Neon atom and verify the object construction.
"""
atom_type = "Ne"
position_vector = np.array([1.0, 1.0, 1.0])
test_atom = atom.Atom(atom_type, position_vector)
self.assertEqual(test_atom.atom_type, atom_type)
self.assertEqual(test_atom.pos.tolist(), position_vector.tolist())
self.assertEqual(test_atom.Z, atom.nuclear_charge[atom_type])
|
[
"miniapp.system.atom.Atom",
"numpy.array"
] |
[((450, 475), 'numpy.array', 'np.array', (['[1.0, 1.0, 1.0]'], {}), '([1.0, 1.0, 1.0])\n', (458, 475), True, 'import numpy as np\n'), ((497, 534), 'miniapp.system.atom.Atom', 'atom.Atom', (['atom_type', 'position_vector'], {}), '(atom_type, position_vector)\n', (506, 534), False, 'from miniapp.system import atom\n')]
|
# main.py
# Author: <NAME>
import app
app.UI().main()
|
[
"app.UI"
] |
[((40, 48), 'app.UI', 'app.UI', ([], {}), '()\n', (46, 48), False, 'import app\n')]
|
from django.shortcuts import (
render,
get_object_or_404,
HttpResponseRedirect,
redirect,
)
# Create your views here.
from .models import Patient
from .forms import PatientForm
def patient_create_view(request):
context = {}
template_name = "patient_form.html"
form = PatientForm(request.POST or None)
if form.is_valid():
form.save()
return redirect("home")
context["form"] = form
return render(request, template_name, context)
def patient_list_view(request):
context = {}
template_name = "patient_list.html"
context["patients"] = Patient.objects.all()
return render(request, template_name, context)
def patient_detail_view(request, id):
context = {}
template_name = "patient_detail.html"
context["patient"] = Patient.objects.get(id=id)
return render(request, template_name, context)
def patient_update_view(request, id):
context = {}
template_name = "patient_form.html"
obj = get_object_or_404(Patient, id=id)
form = PatientForm(request.POST or None, instance=obj)
if form.is_valid():
form.save()
return HttpResponseRedirect("/" + id)
context["form"] = form
return render(request, template_name, context)
def patient_delete_view(request, id):
context = {}
template_name = "patient_delete.html"
obj = get_object_or_404(Patient, id=id)
context["patient"] = obj
if request.method == "POST":
obj.delete()
return HttpResponseRedirect("/")
return render(request, template_name, context)
|
[
"django.shortcuts.render",
"django.shortcuts.get_object_or_404",
"django.shortcuts.redirect",
"django.shortcuts.HttpResponseRedirect"
] |
[((448, 487), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (454, 487), False, 'from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, redirect\n'), ((639, 678), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (645, 678), False, 'from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, redirect\n'), ((842, 881), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (848, 881), False, 'from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, redirect\n'), ((989, 1022), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Patient'], {'id': 'id'}), '(Patient, id=id)\n', (1006, 1022), False, 'from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, redirect\n'), ((1213, 1252), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (1219, 1252), False, 'from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, redirect\n'), ((1362, 1395), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Patient'], {'id': 'id'}), '(Patient, id=id)\n', (1379, 1395), False, 'from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, redirect\n'), ((1532, 1571), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (1538, 1571), False, 'from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, redirect\n'), ((392, 408), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (400, 408), False, 'from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, redirect\n'), ((1142, 1172), 'django.shortcuts.HttpResponseRedirect', 'HttpResponseRedirect', (["('/' + id)"], {}), "('/' + id)\n", (1162, 1172), False, 'from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, redirect\n'), ((1494, 1519), 'django.shortcuts.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/"""'], {}), "('/')\n", (1514, 1519), False, 'from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, redirect\n')]
|
import sys
from youtubesearchpython import VideosSearch as yt_video_search
def search(artist, song):
try:
video_search = yt_video_search(artist + ', ' + song, limit = 1)
return video_search.result()['result']
except: # catch *all* exceptions
e = sys.exc_info()[0]
print( "<p>Error: %s</p>" % e )
return None
|
[
"youtubesearchpython.VideosSearch",
"sys.exc_info"
] |
[((134, 180), 'youtubesearchpython.VideosSearch', 'yt_video_search', (["(artist + ', ' + song)"], {'limit': '(1)'}), "(artist + ', ' + song, limit=1)\n", (149, 180), True, 'from youtubesearchpython import VideosSearch as yt_video_search\n'), ((279, 293), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (291, 293), False, 'import sys\n')]
|
# Generated by Django 3.2.8 on 2021-10-18 20:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Task', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='notes',
name='title',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='notes',
name='note',
field=models.TextField(max_length=2000),
),
]
|
[
"django.db.models.CharField",
"django.db.models.TextField"
] |
[((318, 361), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(True)'}), '(max_length=100, null=True)\n', (334, 361), False, 'from django.db import migrations, models\n'), ((480, 513), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(2000)'}), '(max_length=2000)\n', (496, 513), False, 'from django.db import migrations, models\n')]
|
from django.db import models
import django.contrib.postgres.fields as dpg
from django.contrib.auth.models import User
# Create your models here.
class Ticket(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
draw_date = models.DateField(null=True)
|
[
"django.db.models.ForeignKey",
"django.db.models.DateField"
] |
[((186, 246), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'null': '(True)'}), '(User, on_delete=models.CASCADE, null=True)\n', (203, 246), False, 'from django.db import models\n'), ((263, 290), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)'}), '(null=True)\n', (279, 290), False, 'from django.db import models\n')]
|
from andes.core import (Algeb, ExtAlgeb, ExtParam, ExtService, ExtState,
IdxParam, Model, ModelData, NumParam,)
from andes.core.block import LagAntiWindupRate, PIAWHardLimit
from andes.core.service import PostInitService
class WTPTA1Data(ModelData):
"""
Pitch control model data.
"""
def __init__(self):
ModelData.__init__(self)
self.rea = IdxParam(mandatory=True,
info='Renewable aerodynamics model idx',
)
self.Kiw = NumParam(default=0.1, info='Pitch-control integral gain',
tex_name='K_{iw}',
unit='p.u.',
)
self.Kpw = NumParam(default=0.0, info='Pitch-control proportional gain',
tex_name='K_{pw}',
unit='p.u.',
)
self.Kic = NumParam(default=0.1, info='Pitch-compensation integral gain',
tex_name='K_{ic}',
unit='p.u.',
)
self.Kpc = NumParam(default=0.0, info='Pitch-compensation proportional gain',
tex_name='K_{pc}',
unit='p.u.',
)
self.Kcc = NumParam(default=0.0, info='Gain for P diff',
tex_name='K_{cc}',
unit='p.u.',
)
self.Tp = NumParam(default=0.3, info='Blade response time const.',
tex_name=r'T_{\theta}',
unit='s',
)
self.thmax = NumParam(default=30.0, info='Max. pitch angle',
tex_name=r'\theta_{max}',
unit='deg.',
vrange=(27, 30),
)
self.thmin = NumParam(default=0.0, info='Min. pitch angle',
tex_name=r'\theta_{min}',
unit='deg.',
)
self.dthmax = NumParam(default=5.0, info='Max. pitch angle rate',
tex_name=r'\theta_{max}',
unit='deg.',
vrange=(5, 10),
)
self.dthmin = NumParam(default=-5.0, info='Min. pitch angle rate',
tex_name=r'\theta_{min}',
unit='deg.',
vrange=(-10, -5),
)
class WTPTA1Model(Model):
"""
Pitch control model equations.
"""
def __init__(self, system, config):
Model.__init__(self, system, config)
self.flags.tds = True
self.group = 'RenPitch'
self.rego = ExtParam(model='RenAerodynamics', src='rego', indexer=self.rea,
export=False,
)
self.ree = ExtParam(model='RenGovernor', src='ree', indexer=self.rego,
export=False,
)
self.wt = ExtState(model='RenGovernor', src='wt', indexer=self.rego,
export=False,
)
self.theta0 = ExtService(model='RenAerodynamics', src='theta0', indexer=self.rea,
)
self.theta = ExtAlgeb(model='RenAerodynamics', src='theta', indexer=self.rea,
export=False,
e_str='-theta0 + LG_y',
ename='theta',
tex_ename=r'\theta',
)
self.Pord = ExtState(model='RenExciter', src='Pord', indexer=self.ree,
)
self.Pref = ExtAlgeb(model='RenExciter', src='Pref', indexer=self.ree,
)
self.PIc = PIAWHardLimit(u='Pord - Pref', kp=self.Kpc, ki=self.Kic,
aw_lower=self.thmin, aw_upper=self.thmax,
lower=self.thmin, upper=self.thmax,
tex_name='PI_c',
info='PI for active power diff compensation',
)
self.wref = Algeb(tex_name=r'\omega_{ref}',
info='optional speed reference',
e_str='wref0 - wref',
v_str='wt',
)
self.wref0 = PostInitService(v_str='wref', info='initial wref')
self.PIw = PIAWHardLimit(u='Kcc * (Pord - Pref) + wt - wref', kp=self.Kpw, ki=self.Kiw,
aw_lower=self.thmin, aw_upper=self.thmax,
lower=self.thmin, upper=self.thmax,
tex_name='PI_w',
info='PI for speed and active power deviation',
)
self.LG = LagAntiWindupRate(u='PIw_y + PIc_y', T=self.Tp, K=1.0,
lower=self.thmin, upper=self.thmax,
rate_lower=self.dthmin, rate_upper=self.dthmax,
tex_name='LG',
info='Output lag anti-windup rate limiter')
# remove warning when pitch angle==0
self.PIc_hl.warn_flags.pop(0)
self.PIc_aw.warn_flags.pop(0)
self.PIw_hl.warn_flags.pop(0)
self.PIw_aw.warn_flags.pop(0)
self.LG_lim.warn_flags.pop(0)
class WTPTA1(WTPTA1Data, WTPTA1Model):
"""
Wind turbine pitch control model.
"""
def __init__(self, system, config):
WTPTA1Data.__init__(self)
WTPTA1Model.__init__(self, system, config)
|
[
"andes.core.ExtAlgeb",
"andes.core.IdxParam",
"andes.core.ExtParam",
"andes.core.service.PostInitService",
"andes.core.Algeb",
"andes.core.NumParam",
"andes.core.block.PIAWHardLimit",
"andes.core.block.LagAntiWindupRate",
"andes.core.ModelData.__init__",
"andes.core.ExtState",
"andes.core.ExtService",
"andes.core.Model.__init__"
] |
[((355, 379), 'andes.core.ModelData.__init__', 'ModelData.__init__', (['self'], {}), '(self)\n', (373, 379), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((400, 465), 'andes.core.IdxParam', 'IdxParam', ([], {'mandatory': '(True)', 'info': '"""Renewable aerodynamics model idx"""'}), "(mandatory=True, info='Renewable aerodynamics model idx')\n", (408, 465), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((544, 637), 'andes.core.NumParam', 'NumParam', ([], {'default': '(0.1)', 'info': '"""Pitch-control integral gain"""', 'tex_name': '"""K_{iw}"""', 'unit': '"""p.u."""'}), "(default=0.1, info='Pitch-control integral gain', tex_name='K_{iw}',\n unit='p.u.')\n", (552, 637), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((740, 838), 'andes.core.NumParam', 'NumParam', ([], {'default': '(0.0)', 'info': '"""Pitch-control proportional gain"""', 'tex_name': '"""K_{pw}"""', 'unit': '"""p.u."""'}), "(default=0.0, info='Pitch-control proportional gain', tex_name=\n 'K_{pw}', unit='p.u.')\n", (748, 838), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((940, 1039), 'andes.core.NumParam', 'NumParam', ([], {'default': '(0.1)', 'info': '"""Pitch-compensation integral gain"""', 'tex_name': '"""K_{ic}"""', 'unit': '"""p.u."""'}), "(default=0.1, info='Pitch-compensation integral gain', tex_name=\n 'K_{ic}', unit='p.u.')\n", (948, 1039), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((1141, 1244), 'andes.core.NumParam', 'NumParam', ([], {'default': '(0.0)', 'info': '"""Pitch-compensation proportional gain"""', 'tex_name': '"""K_{pc}"""', 'unit': '"""p.u."""'}), "(default=0.0, info='Pitch-compensation proportional gain', tex_name\n ='K_{pc}', unit='p.u.')\n", (1149, 1244), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((1346, 1423), 'andes.core.NumParam', 'NumParam', ([], {'default': '(0.0)', 'info': '"""Gain for P diff"""', 'tex_name': '"""K_{cc}"""', 'unit': '"""p.u."""'}), "(default=0.0, info='Gain for P diff', tex_name='K_{cc}', unit='p.u.')\n", (1354, 1423), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((1529, 1624), 'andes.core.NumParam', 'NumParam', ([], {'default': '(0.3)', 'info': '"""Blade response time const."""', 'tex_name': '"""T_{\\\\theta}"""', 'unit': '"""s"""'}), "(default=0.3, info='Blade response time const.', tex_name=\n 'T_{\\\\theta}', unit='s')\n", (1537, 1624), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((1725, 1832), 'andes.core.NumParam', 'NumParam', ([], {'default': '(30.0)', 'info': '"""Max. pitch angle"""', 'tex_name': '"""\\\\theta_{max}"""', 'unit': '"""deg."""', 'vrange': '(27, 30)'}), "(default=30.0, info='Max. pitch angle', tex_name='\\\\theta_{max}',\n unit='deg.', vrange=(27, 30))\n", (1733, 1832), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((1972, 2061), 'andes.core.NumParam', 'NumParam', ([], {'default': '(0.0)', 'info': '"""Min. pitch angle"""', 'tex_name': '"""\\\\theta_{min}"""', 'unit': '"""deg."""'}), "(default=0.0, info='Min. pitch angle', tex_name='\\\\theta_{min}',\n unit='deg.')\n", (1980, 2061), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((2172, 2283), 'andes.core.NumParam', 'NumParam', ([], {'default': '(5.0)', 'info': '"""Max. pitch angle rate"""', 'tex_name': '"""\\\\theta_{max}"""', 'unit': '"""deg."""', 'vrange': '(5, 10)'}), "(default=5.0, info='Max. pitch angle rate', tex_name=\n '\\\\theta_{max}', unit='deg.', vrange=(5, 10))\n", (2180, 2283), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((2427, 2541), 'andes.core.NumParam', 'NumParam', ([], {'default': '(-5.0)', 'info': '"""Min. pitch angle rate"""', 'tex_name': '"""\\\\theta_{min}"""', 'unit': '"""deg."""', 'vrange': '(-10, -5)'}), "(default=-5.0, info='Min. pitch angle rate', tex_name=\n '\\\\theta_{min}', unit='deg.', vrange=(-10, -5))\n", (2435, 2541), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((2791, 2827), 'andes.core.Model.__init__', 'Model.__init__', (['self', 'system', 'config'], {}), '(self, system, config)\n', (2805, 2827), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((2912, 2989), 'andes.core.ExtParam', 'ExtParam', ([], {'model': '"""RenAerodynamics"""', 'src': '"""rego"""', 'indexer': 'self.rea', 'export': '(False)'}), "(model='RenAerodynamics', src='rego', indexer=self.rea, export=False)\n", (2920, 2989), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((3070, 3143), 'andes.core.ExtParam', 'ExtParam', ([], {'model': '"""RenGovernor"""', 'src': '"""ree"""', 'indexer': 'self.rego', 'export': '(False)'}), "(model='RenGovernor', src='ree', indexer=self.rego, export=False)\n", (3078, 3143), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((3221, 3293), 'andes.core.ExtState', 'ExtState', ([], {'model': '"""RenGovernor"""', 'src': '"""wt"""', 'indexer': 'self.rego', 'export': '(False)'}), "(model='RenGovernor', src='wt', indexer=self.rego, export=False)\n", (3229, 3293), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((3373, 3440), 'andes.core.ExtService', 'ExtService', ([], {'model': '"""RenAerodynamics"""', 'src': '"""theta0"""', 'indexer': 'self.rea'}), "(model='RenAerodynamics', src='theta0', indexer=self.rea)\n", (3383, 3440), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((3498, 3641), 'andes.core.ExtAlgeb', 'ExtAlgeb', ([], {'model': '"""RenAerodynamics"""', 'src': '"""theta"""', 'indexer': 'self.rea', 'export': '(False)', 'e_str': '"""-theta0 + LG_y"""', 'ename': '"""theta"""', 'tex_ename': '"""\\\\theta"""'}), "(model='RenAerodynamics', src='theta', indexer=self.rea, export=\n False, e_str='-theta0 + LG_y', ename='theta', tex_ename='\\\\theta')\n", (3506, 3641), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((3810, 3868), 'andes.core.ExtState', 'ExtState', ([], {'model': '"""RenExciter"""', 'src': '"""Pord"""', 'indexer': 'self.ree'}), "(model='RenExciter', src='Pord', indexer=self.ree)\n", (3818, 3868), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((3921, 3979), 'andes.core.ExtAlgeb', 'ExtAlgeb', ([], {'model': '"""RenExciter"""', 'src': '"""Pref"""', 'indexer': 'self.ree'}), "(model='RenExciter', src='Pref', indexer=self.ree)\n", (3929, 3979), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((4031, 4237), 'andes.core.block.PIAWHardLimit', 'PIAWHardLimit', ([], {'u': '"""Pord - Pref"""', 'kp': 'self.Kpc', 'ki': 'self.Kic', 'aw_lower': 'self.thmin', 'aw_upper': 'self.thmax', 'lower': 'self.thmin', 'upper': 'self.thmax', 'tex_name': '"""PI_c"""', 'info': '"""PI for active power diff compensation"""'}), "(u='Pord - Pref', kp=self.Kpc, ki=self.Kic, aw_lower=self.\n thmin, aw_upper=self.thmax, lower=self.thmin, upper=self.thmax,\n tex_name='PI_c', info='PI for active power diff compensation')\n", (4044, 4237), False, 'from andes.core.block import LagAntiWindupRate, PIAWHardLimit\n'), ((4417, 4520), 'andes.core.Algeb', 'Algeb', ([], {'tex_name': '"""\\\\omega_{ref}"""', 'info': '"""optional speed reference"""', 'e_str': '"""wref0 - wref"""', 'v_str': '"""wt"""'}), "(tex_name='\\\\omega_{ref}', info='optional speed reference', e_str=\n 'wref0 - wref', v_str='wt')\n", (4422, 4520), False, 'from andes.core import Algeb, ExtAlgeb, ExtParam, ExtService, ExtState, IdxParam, Model, ModelData, NumParam\n'), ((4643, 4693), 'andes.core.service.PostInitService', 'PostInitService', ([], {'v_str': '"""wref"""', 'info': '"""initial wref"""'}), "(v_str='wref', info='initial wref')\n", (4658, 4693), False, 'from andes.core.service import PostInitService\n'), ((4714, 4942), 'andes.core.block.PIAWHardLimit', 'PIAWHardLimit', ([], {'u': '"""Kcc * (Pord - Pref) + wt - wref"""', 'kp': 'self.Kpw', 'ki': 'self.Kiw', 'aw_lower': 'self.thmin', 'aw_upper': 'self.thmax', 'lower': 'self.thmin', 'upper': 'self.thmax', 'tex_name': '"""PI_w"""', 'info': '"""PI for speed and active power deviation"""'}), "(u='Kcc * (Pord - Pref) + wt - wref', kp=self.Kpw, ki=self.Kiw,\n aw_lower=self.thmin, aw_upper=self.thmax, lower=self.thmin, upper=self.\n thmax, tex_name='PI_w', info='PI for speed and active power deviation')\n", (4727, 4942), False, 'from andes.core.block import LagAntiWindupRate, PIAWHardLimit\n'), ((5120, 5325), 'andes.core.block.LagAntiWindupRate', 'LagAntiWindupRate', ([], {'u': '"""PIw_y + PIc_y"""', 'T': 'self.Tp', 'K': '(1.0)', 'lower': 'self.thmin', 'upper': 'self.thmax', 'rate_lower': 'self.dthmin', 'rate_upper': 'self.dthmax', 'tex_name': '"""LG"""', 'info': '"""Output lag anti-windup rate limiter"""'}), "(u='PIw_y + PIc_y', T=self.Tp, K=1.0, lower=self.thmin,\n upper=self.thmax, rate_lower=self.dthmin, rate_upper=self.dthmax,\n tex_name='LG', info='Output lag anti-windup rate limiter')\n", (5137, 5325), False, 'from andes.core.block import LagAntiWindupRate, PIAWHardLimit\n')]
|
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions for dealing with TCIA data."""
import csv
import StringIO
import httplib2
# Labels files that contain breast density labels and UIDs.
_LABEL_PATHS = [
"https://wiki.cancerimagingarchive.net/download/attachments/22516629/mass_case_description_train_set.csv",
"https://wiki.cancerimagingarchive.net/download/attachments/22516629/calc_case_description_train_set.csv",
"https://wiki.cancerimagingarchive.net/download/attachments/22516629/mass_case_description_test_set.csv",
"https://wiki.cancerimagingarchive.net/download/attachments/22516629/calc_case_description_test_set.csv",
]
_BREAST_DENSITY_COLUMN = {"breast_density", "breast density"}
_IMAGE_FILE_PATH_COLUMN = {"image file path"}
# Blacklist a set of study UIDs for training. These have duplicate images that
# cause warnings in AutoML.
_BLACKLISTED_STUDY_UIDS = {
"1.3.6.1.4.1.9590.100.1.2.311909379911326678329538827560440159485",
"1.3.6.1.4.1.9590.100.1.2.16561647310839362507344536782775923598",
"1.3.6.1.4.1.9590.100.1.2.166931122911456605424645792320616925399",
"1.3.6.1.4.1.9590.100.1.2.142065133612748878536601581221344377684",
"1.3.6.1.4.1.9590.100.1.2.67037509913271775014343029030378766029",
"1.3.6.1.4.1.9590.100.1.2.70050574512425734439842863973827726605",
"1.3.6.1.4.1.9590.100.1.2.329070348213711088642769330260826004891",
"1.3.6.1.4.1.9590.100.1.2.407621803913367629615253812872904106953",
"1.3.6.1.4.1.9590.100.1.2.170983510513291304519837838253728744250",
"1.3.6.1.4.1.9590.100.1.2.271834418311297478535408615823692973065",
}
def _GetStudyUIDMaps(has_study_uid=None):
"""Returns a map of Study UID to Series UID and Study UID to label.
Args:
has_study_uid: If set, it only returns instances that match this Study UID.
Returns:
A Dict of Study UID -> Series UID.
A Dict of Study UID -> label.
"""
# Download UIDs for breast density 2 and 3.
http = httplib2.Http(timeout=60, disable_ssl_certificate_validation=True)
study_uid_to_series_uid = {}
study_uid_to_label = {}
for path in _LABEL_PATHS:
resp, content = http.request(path, method="GET")
assert resp.status == 200, "Failed to download label files from: " + path
r = csv.reader(StringIO.StringIO(content), delimiter=",")
header = r.next()
breast_density_column = -1
image_file_path_column = -1
for idx, h in enumerate(header):
if h in _BREAST_DENSITY_COLUMN:
breast_density_column = idx
if h in _IMAGE_FILE_PATH_COLUMN:
image_file_path_column = idx
assert breast_density_column != -1, "breast_density column not found"
assert image_file_path_column != -1, "image file path column not found"
for row in r:
density = row[breast_density_column]
if density != "2" and density != "3":
continue
dicom_uids = row[image_file_path_column].split("/")
study_instance_uid, series_instance_uid = dicom_uids[1], dicom_uids[2]
if study_instance_uid in _BLACKLISTED_STUDY_UIDS:
continue
if has_study_uid and has_study_uid != study_instance_uid:
continue
study_uid_to_series_uid[study_instance_uid] = series_instance_uid
study_uid_to_label[study_instance_uid] = density
return study_uid_to_series_uid, study_uid_to_label
def GetStudyUIDToSeriesUIDMap(has_study_uid=None):
"""Returns a map of Study UID to Series UID.
Args:
has_study_uid: If set, it only returns instances that match this Study UID.
Returns:
A Dict of Study UID -> Series UID.
"""
return _GetStudyUIDMaps(has_study_uid)[0]
def GetStudyUIDToLabelMap(has_study_uid=None):
"""Returns a map of Study UID to label.
Args:
has_study_uid: If set, it only returns instances that match this Study UID.
Returns:
A Dict of Study UID -> label.
"""
return _GetStudyUIDMaps(has_study_uid)[1]
|
[
"httplib2.Http",
"StringIO.StringIO"
] |
[((2524, 2590), 'httplib2.Http', 'httplib2.Http', ([], {'timeout': '(60)', 'disable_ssl_certificate_validation': '(True)'}), '(timeout=60, disable_ssl_certificate_validation=True)\n', (2537, 2590), False, 'import httplib2\n'), ((2826, 2852), 'StringIO.StringIO', 'StringIO.StringIO', (['content'], {}), '(content)\n', (2843, 2852), False, 'import StringIO\n')]
|
import sys
import teca_py
import numpy as np
class teca_tc_wind_radii_stats(teca_py.teca_python_algorithm):
"""
Computes statistics using track wind radii
"""
def __init__(self):
self.basename = 'stats'
self.dpi = 100
self.interactive = False
self.wind_column = 'surface_wind'
self.output_prefix = ''
def __str__(self):
return 'basename=%s, dpi=%d, interactive=%s, rel_axes=%s'%( \
self.basename, self.dpi, str(self.interactive), str(self.rel_axes))
def set_basename(self, basename):
"""
All output files are prefixed by the basename. default 'stats'
"""
self.basename = basename
def set_dpi(self, dpi):
"""
set the DPI resolution for image output. default 100
"""
self.dpi = dpi
def set_interactive(self, interactive):
"""
plots are rendered to a an on screen window when enabled.
when disabled plots are written directly to disk. default False
"""
self.interactive = interactive
def set_wind_column(self, wind_column):
"""
set the name of the column to obtain wind speed from
"""
self.wind_column = wind_column
def set_output_prefix(self, output_prefix):
"""
set the path to prepend to output files
"""
self.output_prefix = output_prefix
def execute(self, port, data_in, req):
"""
expects a table with track data containing wind radii computed
along each point of the track. produces statistical plots showing
the global distribution of wind radii.
"""
track_table = teca_py.as_teca_table(data_in[0])
# plot stats
import matplotlib.pyplot as plt
import matplotlib.patches as plt_mp
from matplotlib.colors import LogNorm
red_cmap = ['#ffd2a3','#ffa749','#ff7c04', \
'#ea4f00','#c92500','#a80300']
km_per_deg_lat = 111
km_s_per_m_hr = 3.6
fig = plt.figure(figsize=(9.25,6.75),dpi=self.dpi)
# scatter
plt.subplot('331')
if not track_table.has_column(self.wind_column):
sys.stderr.write('ERROR: track table missing %s\n'%(self.wind_column))
sys.exit(-1)
year = track_table.get_column('year').as_array()
month = track_table.get_column('month').as_array()
day = track_table.get_column('day').as_array()
ws = km_s_per_m_hr*track_table.get_column(self.wind_column).as_array()
wr = []
nwr = 0
while track_table.has_column('wind_radius_%d'%(nwr)):
wr.append(km_per_deg_lat*track_table.get_column('wind_radius_%d'%(nwr)).as_array())
nwr += 1
i = 0
while i < nwr:
wc = teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph(i-1)
wri = wr[i]
ii = np.where(wri > 0.0)
plt.scatter(wri[ii], ws[ii], c=red_cmap[i], alpha=0.25, marker='.', zorder=3+i)
i += 1
plt.ylabel('Wind speed (km/hr)', fontweight='normal', fontsize=10)
plt.title('R0 - R5 vs Wind speed', fontweight='bold', fontsize=11)
plt.grid(True)
ax = plt.gca()
ax.set_xlim([0.0, 6.0*km_per_deg_lat])
# all
plt.subplot('332')
i = 0
while i < nwr:
wc = teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph(i-1)
wri = wr[i]
n,bins,pats = plt.hist(wri[np.where(wri > 0.0)], 32, range=[0,6.0*km_per_deg_lat], \
facecolor=red_cmap[i], alpha=0.95, edgecolor='black', \
linewidth=2, zorder=3+i)
i += 1
plt.ylabel('Number', fontweight='normal', fontsize=10)
plt.title('All R0 - R5', fontweight='bold', fontsize=11)
plt.grid(True)
ax = plt.gca()
ax.set_xlim([0.0, 6.0*km_per_deg_lat])
# r0 - r5
i = 0
while i < nwr:
plt.subplot(333+i)
wc = teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph(i-1)
wri = wr[i]
wrii=wri[np.where(wri > 0.0)]
n,bins,pats = plt.hist(wrii, 32, \
facecolor=red_cmap[i], alpha=1.00, edgecolor='black', \
linewidth=2, zorder=3)
if ((i % 3) == 1):
plt.ylabel('Number', fontweight='normal', fontsize=10)
if (i >= 3):
plt.xlabel('Radius (km)', fontweight='normal', fontsize=10)
plt.title('R%d (%0.1f km/hr)'%(i,wc), fontweight='bold', fontsize=11)
plt.grid(True)
ax = plt.gca()
try:
ax.set_xlim([np.min(wrii), np.max(wrii)])
except:
pass
i += 1
# legend
plt.subplot('339')
red_cmap_pats = []
q = 0
while q < nwr:
red_cmap_pats.append( \
plt_mp.Patch(color=red_cmap[q], label='R%d'%(q)))
q += 1
l = plt.legend(handles=red_cmap_pats, loc=2, bbox_to_anchor=(-0.1, 1.0), fancybox=True)
plt.axis('off')
plt.suptitle('Wind Radii %s/%d/%d - %s/%d/%d'%(month[0],day[0],year[0], \
month[-1],day[-1],year[-1]), fontweight='bold', fontsize=12)
plt.subplots_adjust(hspace=0.35, wspace=0.35, top=0.90)
plt.savefig(self.output_prefix + 'wind_radii_stats.png')
fig = plt.figure(figsize=(7.5,4.0),dpi=100)
# peak radius
pr = km_per_deg_lat*track_table.get_column('peak_radius').as_array()
# peak radius is only valid if one of the other wind radii
# exist
kk = wr[0] > 1.0e-6
q = 1
while q < nwr:
kk = np.logical_or(kk, wr[q] > 1.0e-6)
q += 1
pr = pr[kk]
plt.subplot(121)
n,bins,pats = plt.hist(pr[np.where(pr > 0.0)], 24, \
facecolor='steelblue', alpha=0.95, edgecolor='black', \
linewidth=2, zorder=3)
plt.ylabel('Number', fontweight='normal', fontsize=10)
plt.xlabel('Radius (km)', fontweight='normal', fontsize=10)
plt.title('RP (radius at peak wind)', fontweight='bold', fontsize=11)
plt.grid(True)
ax = plt.gca()
ax.set_xlim([0.0, np.max(pr)])
# scatter
plt.subplot('122')
ii = np.where(pr > 0.0)
cnts,xe,ye,im = plt.hist2d(pr[ii], ws[ii], bins=24, norm=LogNorm(), zorder=2)
plt.ylabel('Wind speed (km/hr)', fontweight='normal', fontsize=10)
plt.xlabel('Radius (km)', fontweight='normal', fontsize=10)
plt.title('RP vs Wind speed', fontweight='bold', fontsize=11)
plt.grid(True)
ax = plt.gca()
ax.set_xlim([0.0, np.max(pr)])
fig.subplots_adjust(right=0.85)
cbar_ax = fig.add_axes([0.88, 0.35, 0.05, 0.5])
fig.colorbar(im, cax=cbar_ax)
plt.suptitle('Wind Radii %s/%d/%d - %s/%d/%d'%(month[0],day[0],year[0], \
month[-1],day[-1],year[-1]), fontweight='bold', fontsize=12)
plt.subplots_adjust(hspace=0.3, wspace=0.3, top=0.85)
plt.savefig(self.output_prefix + 'peak_radius_stats.png')
if self.interactive:
plt.show()
# send data downstream
return track_table
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.suptitle",
"matplotlib.pyplot.figure",
"matplotlib.colors.LogNorm",
"matplotlib.pyplot.gca",
"matplotlib.patches.Patch",
"numpy.max",
"teca_py.as_teca_table",
"matplotlib.pyplot.show",
"matplotlib.pyplot.legend",
"teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph",
"numpy.min",
"matplotlib.pyplot.subplots_adjust",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.grid",
"sys.exit",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.hist",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.axis",
"numpy.where",
"numpy.logical_or",
"sys.stderr.write",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig"
] |
[((1697, 1730), 'teca_py.as_teca_table', 'teca_py.as_teca_table', (['data_in[0]'], {}), '(data_in[0])\n', (1718, 1730), False, 'import teca_py\n'), ((2053, 2099), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(9.25, 6.75)', 'dpi': 'self.dpi'}), '(figsize=(9.25, 6.75), dpi=self.dpi)\n', (2063, 2099), True, 'import matplotlib.pyplot as plt\n'), ((2125, 2143), 'matplotlib.pyplot.subplot', 'plt.subplot', (['"""331"""'], {}), "('331')\n", (2136, 2143), True, 'import matplotlib.pyplot as plt\n'), ((3068, 3134), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Wind speed (km/hr)"""'], {'fontweight': '"""normal"""', 'fontsize': '(10)'}), "('Wind speed (km/hr)', fontweight='normal', fontsize=10)\n", (3078, 3134), True, 'import matplotlib.pyplot as plt\n'), ((3143, 3209), 'matplotlib.pyplot.title', 'plt.title', (['"""R0 - R5 vs Wind speed"""'], {'fontweight': '"""bold"""', 'fontsize': '(11)'}), "('R0 - R5 vs Wind speed', fontweight='bold', fontsize=11)\n", (3152, 3209), True, 'import matplotlib.pyplot as plt\n'), ((3218, 3232), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (3226, 3232), True, 'import matplotlib.pyplot as plt\n'), ((3246, 3255), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3253, 3255), True, 'import matplotlib.pyplot as plt\n'), ((3326, 3344), 'matplotlib.pyplot.subplot', 'plt.subplot', (['"""332"""'], {}), "('332')\n", (3337, 3344), True, 'import matplotlib.pyplot as plt\n'), ((3717, 3771), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Number"""'], {'fontweight': '"""normal"""', 'fontsize': '(10)'}), "('Number', fontweight='normal', fontsize=10)\n", (3727, 3771), True, 'import matplotlib.pyplot as plt\n'), ((3780, 3836), 'matplotlib.pyplot.title', 'plt.title', (['"""All R0 - R5"""'], {'fontweight': '"""bold"""', 'fontsize': '(11)'}), "('All R0 - R5', fontweight='bold', fontsize=11)\n", (3789, 3836), True, 'import matplotlib.pyplot as plt\n'), ((3845, 3859), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (3853, 3859), True, 'import matplotlib.pyplot as plt\n'), ((3873, 3882), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3880, 3882), True, 'import matplotlib.pyplot as plt\n'), ((4815, 4833), 'matplotlib.pyplot.subplot', 'plt.subplot', (['"""339"""'], {}), "('339')\n", (4826, 4833), True, 'import matplotlib.pyplot as plt\n'), ((5031, 5118), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'handles': 'red_cmap_pats', 'loc': '(2)', 'bbox_to_anchor': '(-0.1, 1.0)', 'fancybox': '(True)'}), '(handles=red_cmap_pats, loc=2, bbox_to_anchor=(-0.1, 1.0),\n fancybox=True)\n', (5041, 5118), True, 'import matplotlib.pyplot as plt\n'), ((5123, 5138), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (5131, 5138), True, 'import matplotlib.pyplot as plt\n'), ((5149, 5291), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (["('Wind Radii %s/%d/%d - %s/%d/%d' % (month[0], day[0], year[0], month[-1],\n day[-1], year[-1]))"], {'fontweight': '"""bold"""', 'fontsize': '(12)'}), "('Wind Radii %s/%d/%d - %s/%d/%d' % (month[0], day[0], year[0],\n month[-1], day[-1], year[-1]), fontweight='bold', fontsize=12)\n", (5161, 5291), True, 'import matplotlib.pyplot as plt\n'), ((5304, 5358), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.35)', 'wspace': '(0.35)', 'top': '(0.9)'}), '(hspace=0.35, wspace=0.35, top=0.9)\n', (5323, 5358), True, 'import matplotlib.pyplot as plt\n'), ((5369, 5425), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(self.output_prefix + 'wind_radii_stats.png')"], {}), "(self.output_prefix + 'wind_radii_stats.png')\n", (5380, 5425), True, 'import matplotlib.pyplot as plt\n'), ((5441, 5480), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(7.5, 4.0)', 'dpi': '(100)'}), '(figsize=(7.5, 4.0), dpi=100)\n', (5451, 5480), True, 'import matplotlib.pyplot as plt\n'), ((5825, 5841), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(121)'], {}), '(121)\n', (5836, 5841), True, 'import matplotlib.pyplot as plt\n'), ((6014, 6068), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Number"""'], {'fontweight': '"""normal"""', 'fontsize': '(10)'}), "('Number', fontweight='normal', fontsize=10)\n", (6024, 6068), True, 'import matplotlib.pyplot as plt\n'), ((6077, 6136), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Radius (km)"""'], {'fontweight': '"""normal"""', 'fontsize': '(10)'}), "('Radius (km)', fontweight='normal', fontsize=10)\n", (6087, 6136), True, 'import matplotlib.pyplot as plt\n'), ((6145, 6214), 'matplotlib.pyplot.title', 'plt.title', (['"""RP (radius at peak wind)"""'], {'fontweight': '"""bold"""', 'fontsize': '(11)'}), "('RP (radius at peak wind)', fontweight='bold', fontsize=11)\n", (6154, 6214), True, 'import matplotlib.pyplot as plt\n'), ((6223, 6237), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (6231, 6237), True, 'import matplotlib.pyplot as plt\n'), ((6251, 6260), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6258, 6260), True, 'import matplotlib.pyplot as plt\n'), ((6327, 6345), 'matplotlib.pyplot.subplot', 'plt.subplot', (['"""122"""'], {}), "('122')\n", (6338, 6345), True, 'import matplotlib.pyplot as plt\n'), ((6359, 6377), 'numpy.where', 'np.where', (['(pr > 0.0)'], {}), '(pr > 0.0)\n', (6367, 6377), True, 'import numpy as np\n'), ((6472, 6538), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Wind speed (km/hr)"""'], {'fontweight': '"""normal"""', 'fontsize': '(10)'}), "('Wind speed (km/hr)', fontweight='normal', fontsize=10)\n", (6482, 6538), True, 'import matplotlib.pyplot as plt\n'), ((6547, 6606), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Radius (km)"""'], {'fontweight': '"""normal"""', 'fontsize': '(10)'}), "('Radius (km)', fontweight='normal', fontsize=10)\n", (6557, 6606), True, 'import matplotlib.pyplot as plt\n'), ((6615, 6676), 'matplotlib.pyplot.title', 'plt.title', (['"""RP vs Wind speed"""'], {'fontweight': '"""bold"""', 'fontsize': '(11)'}), "('RP vs Wind speed', fontweight='bold', fontsize=11)\n", (6624, 6676), True, 'import matplotlib.pyplot as plt\n'), ((6685, 6699), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (6693, 6699), True, 'import matplotlib.pyplot as plt\n'), ((6713, 6722), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6720, 6722), True, 'import matplotlib.pyplot as plt\n'), ((6906, 7048), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (["('Wind Radii %s/%d/%d - %s/%d/%d' % (month[0], day[0], year[0], month[-1],\n day[-1], year[-1]))"], {'fontweight': '"""bold"""', 'fontsize': '(12)'}), "('Wind Radii %s/%d/%d - %s/%d/%d' % (month[0], day[0], year[0],\n month[-1], day[-1], year[-1]), fontweight='bold', fontsize=12)\n", (6918, 7048), True, 'import matplotlib.pyplot as plt\n'), ((7061, 7114), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.3)', 'wspace': '(0.3)', 'top': '(0.85)'}), '(hspace=0.3, wspace=0.3, top=0.85)\n', (7080, 7114), True, 'import matplotlib.pyplot as plt\n'), ((7125, 7182), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(self.output_prefix + 'peak_radius_stats.png')"], {}), "(self.output_prefix + 'peak_radius_stats.png')\n", (7136, 7182), True, 'import matplotlib.pyplot as plt\n'), ((2214, 2284), 'sys.stderr.write', 'sys.stderr.write', (["('ERROR: track table missing %s\\n' % self.wind_column)"], {}), "('ERROR: track table missing %s\\n' % self.wind_column)\n", (2230, 2284), False, 'import sys\n'), ((2297, 2309), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (2305, 2309), False, 'import sys\n'), ((2830, 2888), 'teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph', 'teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph', (['(i - 1)'], {}), '(i - 1)\n', (2881, 2888), False, 'import teca_py\n'), ((2928, 2947), 'numpy.where', 'np.where', (['(wri > 0.0)'], {}), '(wri > 0.0)\n', (2936, 2947), True, 'import numpy as np\n'), ((2960, 3046), 'matplotlib.pyplot.scatter', 'plt.scatter', (['wri[ii]', 'ws[ii]'], {'c': 'red_cmap[i]', 'alpha': '(0.25)', 'marker': '"""."""', 'zorder': '(3 + i)'}), "(wri[ii], ws[ii], c=red_cmap[i], alpha=0.25, marker='.', zorder=\n 3 + i)\n", (2971, 3046), True, 'import matplotlib.pyplot as plt\n'), ((3399, 3457), 'teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph', 'teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph', (['(i - 1)'], {}), '(i - 1)\n', (3450, 3457), False, 'import teca_py\n'), ((3998, 4018), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(333 + i)'], {}), '(333 + i)\n', (4009, 4018), True, 'import matplotlib.pyplot as plt\n'), ((4034, 4092), 'teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph', 'teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph', (['(i - 1)'], {}), '(i - 1)\n', (4085, 4092), False, 'import teca_py\n'), ((4183, 4281), 'matplotlib.pyplot.hist', 'plt.hist', (['wrii', '(32)'], {'facecolor': 'red_cmap[i]', 'alpha': '(1.0)', 'edgecolor': '"""black"""', 'linewidth': '(2)', 'zorder': '(3)'}), "(wrii, 32, facecolor=red_cmap[i], alpha=1.0, edgecolor='black',\n linewidth=2, zorder=3)\n", (4191, 4281), True, 'import matplotlib.pyplot as plt\n'), ((4530, 4602), 'matplotlib.pyplot.title', 'plt.title', (["('R%d (%0.1f km/hr)' % (i, wc))"], {'fontweight': '"""bold"""', 'fontsize': '(11)'}), "('R%d (%0.1f km/hr)' % (i, wc), fontweight='bold', fontsize=11)\n", (4539, 4602), True, 'import matplotlib.pyplot as plt\n'), ((4612, 4626), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (4620, 4626), True, 'import matplotlib.pyplot as plt\n'), ((4644, 4653), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4651, 4653), True, 'import matplotlib.pyplot as plt\n'), ((5743, 5775), 'numpy.logical_or', 'np.logical_or', (['kk', '(wr[q] > 1e-06)'], {}), '(kk, wr[q] > 1e-06)\n', (5756, 5775), True, 'import numpy as np\n'), ((7225, 7235), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7233, 7235), True, 'import matplotlib.pyplot as plt\n'), ((4136, 4155), 'numpy.where', 'np.where', (['(wri > 0.0)'], {}), '(wri > 0.0)\n', (4144, 4155), True, 'import numpy as np\n'), ((4362, 4416), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Number"""'], {'fontweight': '"""normal"""', 'fontsize': '(10)'}), "('Number', fontweight='normal', fontsize=10)\n", (4372, 4416), True, 'import matplotlib.pyplot as plt\n'), ((4458, 4517), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Radius (km)"""'], {'fontweight': '"""normal"""', 'fontsize': '(10)'}), "('Radius (km)', fontweight='normal', fontsize=10)\n", (4468, 4517), True, 'import matplotlib.pyplot as plt\n'), ((4950, 4998), 'matplotlib.patches.Patch', 'plt_mp.Patch', ([], {'color': 'red_cmap[q]', 'label': "('R%d' % q)"}), "(color=red_cmap[q], label='R%d' % q)\n", (4962, 4998), True, 'import matplotlib.patches as plt_mp\n'), ((5876, 5894), 'numpy.where', 'np.where', (['(pr > 0.0)'], {}), '(pr > 0.0)\n', (5884, 5894), True, 'import numpy as np\n'), ((6287, 6297), 'numpy.max', 'np.max', (['pr'], {}), '(pr)\n', (6293, 6297), True, 'import numpy as np\n'), ((6443, 6452), 'matplotlib.colors.LogNorm', 'LogNorm', ([], {}), '()\n', (6450, 6452), False, 'from matplotlib.colors import LogNorm\n'), ((6749, 6759), 'numpy.max', 'np.max', (['pr'], {}), '(pr)\n', (6755, 6759), True, 'import numpy as np\n'), ((3519, 3538), 'numpy.where', 'np.where', (['(wri > 0.0)'], {}), '(wri > 0.0)\n', (3527, 3538), True, 'import numpy as np\n'), ((4700, 4712), 'numpy.min', 'np.min', (['wrii'], {}), '(wrii)\n', (4706, 4712), True, 'import numpy as np\n'), ((4714, 4726), 'numpy.max', 'np.max', (['wrii'], {}), '(wrii)\n', (4720, 4726), True, 'import numpy as np\n')]
|
"""The tests for the mFi sensor platform."""
import unittest
import unittest.mock as mock
import requests
from homeassistant.bootstrap import setup_component
import homeassistant.components.sensor as sensor
import homeassistant.components.sensor.mfi as mfi
from homeassistant.const import TEMP_CELSIUS
from tests.common import get_test_home_assistant
class TestMfiSensorSetup(unittest.TestCase):
"""Test the mFi sensor platform."""
PLATFORM = mfi
COMPONENT = sensor
THING = 'sensor'
GOOD_CONFIG = {
'sensor': {
'platform': 'mfi',
'host': 'foo',
'port': 6123,
'username': 'user',
'password': '<PASSWORD>',
'ssl': True,
'verify_ssl': True,
}
}
def setup_method(self, method):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_missing_config(self):
"""Test setup with missing configuration."""
config = {
'sensor': {
'platform': 'mfi',
}
}
self.assertFalse(self.PLATFORM.setup_platform(self.hass, config, None))
@mock.patch('mficlient.client')
def test_setup_failed_login(self, mock_client):
"""Test setup with login failure."""
mock_client.FailedToLogin = Exception()
mock_client.MFiClient.side_effect = mock_client.FailedToLogin
self.assertFalse(
self.PLATFORM.setup_platform(
self.hass, dict(self.GOOD_CONFIG), None))
@mock.patch('mficlient.client')
def test_setup_failed_connect(self, mock_client):
"""Test setup with conection failure."""
mock_client.FailedToLogin = Exception()
mock_client.MFiClient.side_effect = requests.exceptions.ConnectionError
self.assertFalse(
self.PLATFORM.setup_platform(
self.hass, dict(self.GOOD_CONFIG), None))
@mock.patch('mficlient.client.MFiClient')
def test_setup_minimum(self, mock_client):
"""Test setup with minimum configuration."""
config = dict(self.GOOD_CONFIG)
del config[self.THING]['port']
assert setup_component(self.hass, self.COMPONENT.DOMAIN, config)
mock_client.assert_called_once_with(
'foo', 'user', 'pass', port=6443, use_tls=True, verify=True)
@mock.patch('mficlient.client.MFiClient')
def test_setup_with_port(self, mock_client):
"""Test setup with port."""
config = dict(self.GOOD_CONFIG)
config[self.THING]['port'] = 6123
assert setup_component(self.hass, self.COMPONENT.DOMAIN, config)
mock_client.assert_called_once_with(
'foo', 'user', 'pass', port=6123, use_tls=True, verify=True)
@mock.patch('mficlient.client.MFiClient')
def test_setup_with_tls_disabled(self, mock_client):
"""Test setup without TLS."""
config = dict(self.GOOD_CONFIG)
del config[self.THING]['port']
config[self.THING]['ssl'] = False
config[self.THING]['verify_ssl'] = False
assert setup_component(self.hass, self.COMPONENT.DOMAIN, config)
mock_client.assert_called_once_with(
'foo', 'user', 'pass', port=6080, use_tls=False, verify=False)
@mock.patch('mficlient.client.MFiClient')
@mock.patch('homeassistant.components.sensor.mfi.MfiSensor')
def test_setup_adds_proper_devices(self, mock_sensor, mock_client):
"""Test if setup adds devices."""
ports = {i: mock.MagicMock(model=model)
for i, model in enumerate(mfi.SENSOR_MODELS)}
ports['bad'] = mock.MagicMock(model='notasensor')
print(ports['bad'].model)
mock_client.return_value.get_devices.return_value = \
[mock.MagicMock(ports=ports)]
assert setup_component(self.hass, sensor.DOMAIN, self.GOOD_CONFIG)
for ident, port in ports.items():
if ident != 'bad':
mock_sensor.assert_any_call(port, self.hass)
assert mock.call(ports['bad'], self.hass) not in mock_sensor.mock_calls
class TestMfiSensor(unittest.TestCase):
"""Test for mFi sensor platform."""
def setup_method(self, method):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.port = mock.MagicMock()
self.sensor = mfi.MfiSensor(self.port, self.hass)
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_name(self):
"""Test the name."""
self.assertEqual(self.port.label, self.sensor.name)
def test_uom_temp(self):
"""Test the UOM temperature."""
self.port.tag = 'temperature'
self.assertEqual(TEMP_CELSIUS, self.sensor.unit_of_measurement)
def test_uom_power(self):
"""Test the UOEM power."""
self.port.tag = 'active_pwr'
self.assertEqual('Watts', self.sensor.unit_of_measurement)
def test_uom_digital(self):
"""Test the UOM digital input."""
self.port.model = 'Input Digital'
self.assertEqual('State', self.sensor.unit_of_measurement)
def test_uom_unknown(self):
"""Test the UOM."""
self.port.tag = 'balloons'
self.assertEqual('balloons', self.sensor.unit_of_measurement)
def test_uom_uninitialized(self):
"""Test that the UOM defaults if not initialized."""
type(self.port).tag = mock.PropertyMock(side_effect=ValueError)
self.assertEqual('State', self.sensor.unit_of_measurement)
def test_state_digital(self):
"""Test the digital input."""
self.port.model = 'Input Digital'
self.port.value = 0
self.assertEqual(mfi.STATE_OFF, self.sensor.state)
self.port.value = 1
self.assertEqual(mfi.STATE_ON, self.sensor.state)
self.port.value = 2
self.assertEqual(mfi.STATE_ON, self.sensor.state)
def test_state_digits(self):
"""Test the state of digits."""
self.port.tag = 'didyoucheckthedict?'
self.port.value = 1.25
with mock.patch.dict(mfi.DIGITS, {'didyoucheckthedict?': 1}):
self.assertEqual(1.2, self.sensor.state)
with mock.patch.dict(mfi.DIGITS, {}):
self.assertEqual(1.0, self.sensor.state)
def test_state_uninitialized(self):
"""Test the state of uninitialized sensors."""
type(self.port).tag = mock.PropertyMock(side_effect=ValueError)
self.assertEqual(mfi.STATE_OFF, self.sensor.state)
def test_update(self):
"""Test the update."""
self.sensor.update()
self.port.refresh.assert_called_once_with()
|
[
"homeassistant.bootstrap.setup_component",
"tests.common.get_test_home_assistant",
"unittest.mock.MagicMock",
"unittest.mock.PropertyMock",
"homeassistant.components.sensor.mfi.MfiSensor",
"unittest.mock.patch.dict",
"unittest.mock.patch",
"unittest.mock.call"
] |
[((1312, 1342), 'unittest.mock.patch', 'mock.patch', (['"""mficlient.client"""'], {}), "('mficlient.client')\n", (1322, 1342), True, 'import unittest.mock as mock\n'), ((1690, 1720), 'unittest.mock.patch', 'mock.patch', (['"""mficlient.client"""'], {}), "('mficlient.client')\n", (1700, 1720), True, 'import unittest.mock as mock\n'), ((2084, 2124), 'unittest.mock.patch', 'mock.patch', (['"""mficlient.client.MFiClient"""'], {}), "('mficlient.client.MFiClient')\n", (2094, 2124), True, 'import unittest.mock as mock\n'), ((2501, 2541), 'unittest.mock.patch', 'mock.patch', (['"""mficlient.client.MFiClient"""'], {}), "('mficlient.client.MFiClient')\n", (2511, 2541), True, 'import unittest.mock as mock\n'), ((2906, 2946), 'unittest.mock.patch', 'mock.patch', (['"""mficlient.client.MFiClient"""'], {}), "('mficlient.client.MFiClient')\n", (2916, 2946), True, 'import unittest.mock as mock\n'), ((3411, 3451), 'unittest.mock.patch', 'mock.patch', (['"""mficlient.client.MFiClient"""'], {}), "('mficlient.client.MFiClient')\n", (3421, 3451), True, 'import unittest.mock as mock\n'), ((3457, 3516), 'unittest.mock.patch', 'mock.patch', (['"""homeassistant.components.sensor.mfi.MfiSensor"""'], {}), "('homeassistant.components.sensor.mfi.MfiSensor')\n", (3467, 3516), True, 'import unittest.mock as mock\n'), ((890, 915), 'tests.common.get_test_home_assistant', 'get_test_home_assistant', ([], {}), '()\n', (913, 915), False, 'from tests.common import get_test_home_assistant\n'), ((2319, 2376), 'homeassistant.bootstrap.setup_component', 'setup_component', (['self.hass', 'self.COMPONENT.DOMAIN', 'config'], {}), '(self.hass, self.COMPONENT.DOMAIN, config)\n', (2334, 2376), False, 'from homeassistant.bootstrap import setup_component\n'), ((2724, 2781), 'homeassistant.bootstrap.setup_component', 'setup_component', (['self.hass', 'self.COMPONENT.DOMAIN', 'config'], {}), '(self.hass, self.COMPONENT.DOMAIN, config)\n', (2739, 2781), False, 'from homeassistant.bootstrap import setup_component\n'), ((3227, 3284), 'homeassistant.bootstrap.setup_component', 'setup_component', (['self.hass', 'self.COMPONENT.DOMAIN', 'config'], {}), '(self.hass, self.COMPONENT.DOMAIN, config)\n', (3242, 3284), False, 'from homeassistant.bootstrap import setup_component\n'), ((3765, 3799), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'model': '"""notasensor"""'}), "(model='notasensor')\n", (3779, 3799), True, 'import unittest.mock as mock\n'), ((3953, 4012), 'homeassistant.bootstrap.setup_component', 'setup_component', (['self.hass', 'sensor.DOMAIN', 'self.GOOD_CONFIG'], {}), '(self.hass, sensor.DOMAIN, self.GOOD_CONFIG)\n', (3968, 4012), False, 'from homeassistant.bootstrap import setup_component\n'), ((4427, 4452), 'tests.common.get_test_home_assistant', 'get_test_home_assistant', ([], {}), '()\n', (4450, 4452), False, 'from tests.common import get_test_home_assistant\n'), ((4473, 4489), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (4487, 4489), True, 'import unittest.mock as mock\n'), ((4512, 4547), 'homeassistant.components.sensor.mfi.MfiSensor', 'mfi.MfiSensor', (['self.port', 'self.hass'], {}), '(self.port, self.hass)\n', (4525, 4547), True, 'import homeassistant.components.sensor.mfi as mfi\n'), ((5606, 5647), 'unittest.mock.PropertyMock', 'mock.PropertyMock', ([], {'side_effect': 'ValueError'}), '(side_effect=ValueError)\n', (5623, 5647), True, 'import unittest.mock as mock\n'), ((6588, 6629), 'unittest.mock.PropertyMock', 'mock.PropertyMock', ([], {'side_effect': 'ValueError'}), '(side_effect=ValueError)\n', (6605, 6629), True, 'import unittest.mock as mock\n'), ((3651, 3678), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'model': 'model'}), '(model=model)\n', (3665, 3678), True, 'import unittest.mock as mock\n'), ((3909, 3936), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'ports': 'ports'}), '(ports=ports)\n', (3923, 3936), True, 'import unittest.mock as mock\n'), ((4162, 4196), 'unittest.mock.call', 'mock.call', (["ports['bad']", 'self.hass'], {}), "(ports['bad'], self.hass)\n", (4171, 4196), True, 'import unittest.mock as mock\n'), ((6253, 6308), 'unittest.mock.patch.dict', 'mock.patch.dict', (['mfi.DIGITS', "{'didyoucheckthedict?': 1}"], {}), "(mfi.DIGITS, {'didyoucheckthedict?': 1})\n", (6268, 6308), True, 'import unittest.mock as mock\n'), ((6376, 6407), 'unittest.mock.patch.dict', 'mock.patch.dict', (['mfi.DIGITS', '{}'], {}), '(mfi.DIGITS, {})\n', (6391, 6407), True, 'import unittest.mock as mock\n')]
|
# Copyright (c) 2016, <NAME> <github: philipperemy>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import print_function
import os
from argparse import ArgumentParser
from subprocess import Popen
from sys import argv
from sys import stderr
import nltk
stoplistlines = open("inputs/stopwords.txt",'r').readlines()
stoplist = []
for i in stoplistlines:
stoplist.append(i.strip().lower())
JAVA_BIN_PATH = 'java'
DOT_BIN_PATH = 'dot'
STANFORD_IE_FOLDER = 'stanford-openie'
tmp_folder = '/tmp/openie/'
if not os.path.exists(tmp_folder):
os.makedirs(tmp_folder)
def arg_parse():
arg_p = ArgumentParser('Stanford IE Python Wrapper')
arg_p.add_argument('-f', '--filename', type=str, default=None)
arg_p.add_argument('-v', '--verbose', action='store_true')
arg_p.add_argument('-g', '--generate_graph', action='store_true')
return arg_p
def debug_print(log, verbose):
if verbose:
print(log)
def process_entity_relations(entity_relations_str, verbose=True):
# format is ollie.
entity_relations = list()
for s in entity_relations_str:
entity_relations.append(s[s.find("(") + 1:s.find(")")].split(';'))
return entity_relations
def generate_graphviz_graph(entity_relations, verbose=True):
"""digraph G {
# a -> b [ label="a to b" ];
# b -> c [ label="another label"];
}"""
graph = list()
graph.append('digraph {')
for er in entity_relations:
graph.append('"{}" -> "{}" [ label="{}" ];'.format(er[0], er[2], er[1]))
graph.append('}')
out_dot = tmp_folder + 'out.dot'
with open(out_dot, 'w') as output_file:
output_file.writelines(graph)
out_png = tmp_folder + 'out.png'
command = '{} -Tpng {} -o {}'.format(DOT_BIN_PATH, out_dot, out_png)
debug_print('Executing command = {}'.format(command), verbose)
dot_process = Popen(command, stdout=stderr, shell=True)
dot_process.wait()
assert not dot_process.returncode, 'ERROR: Call to dot exited with a non-zero code status.'
print('Wrote graph to {} and {}'.format(out_dot, out_png))
def stanford_ie(input_filename, verbose=True, generate_graphviz=True):
out = tmp_folder + 'out.txt'
input_filename = input_filename.replace(',', ' ')
new_filename = ''
for filename in input_filename.split():
if filename.startswith('/'): # absolute path.
new_filename += '{} '.format(filename)
else:
new_filename += '../{} '.format(filename)
absolute_path_to_script = os.path.dirname(os.path.realpath(__file__)) + '/'
command = 'cd {};'.format(absolute_path_to_script)
command += 'cd {}; {} -mx4g -cp "stanford-openie.jar:stanford-openie-models.jar:lib/*" ' \
'edu.stanford.nlp.naturalli.OpenIE {} -format ollie > {}'. \
format(STANFORD_IE_FOLDER, JAVA_BIN_PATH, new_filename, out)
if verbose:
debug_print('Executing command = {}'.format(command), verbose)
java_process = Popen(command, stdout=stderr, shell=True)
else:
java_process = Popen(command, stdout=stderr, stderr=open(os.devnull, 'w'), shell=True)
java_process.wait()
assert not java_process.returncode, 'ERROR: Call to stanford_ie exited with a non-zero code status.'
with open(out, 'r') as output_file:
results_str = output_file.readlines()
os.remove(out)
results = process_entity_relations(results_str, verbose)
if generate_graphviz:
generate_graphviz_graph(results, verbose)
return results
args = argv[:]
arg_p = arg_parse().parse_args(args[1:])
filename = arg_p.filename
verbose = arg_p.verbose
generate_graphviz = arg_p.generate_graph
if filename is None:
print('please provide a text file containing your input. Program will exit.')
exit(1)
if verbose:
debug_print('filename = {}'.format(filename), verbose)
global entities_relations
entities_relations = stanford_ie(filename, verbose, generate_graphviz)
global relations
relations = entities_relations[:]
#for i in relations:
#print (i)
|
[
"subprocess.Popen",
"os.remove",
"os.makedirs",
"argparse.ArgumentParser",
"os.path.realpath",
"os.path.exists"
] |
[((1199, 1225), 'os.path.exists', 'os.path.exists', (['tmp_folder'], {}), '(tmp_folder)\n', (1213, 1225), False, 'import os\n'), ((1228, 1251), 'os.makedirs', 'os.makedirs', (['tmp_folder'], {}), '(tmp_folder)\n', (1239, 1251), False, 'import os\n'), ((1280, 1324), 'argparse.ArgumentParser', 'ArgumentParser', (['"""Stanford IE Python Wrapper"""'], {}), "('Stanford IE Python Wrapper')\n", (1294, 1324), False, 'from argparse import ArgumentParser\n'), ((2439, 2480), 'subprocess.Popen', 'Popen', (['command'], {'stdout': 'stderr', 'shell': '(True)'}), '(command, stdout=stderr, shell=True)\n', (2444, 2480), False, 'from subprocess import Popen\n'), ((3797, 3811), 'os.remove', 'os.remove', (['out'], {}), '(out)\n', (3806, 3811), False, 'import os\n'), ((3457, 3498), 'subprocess.Popen', 'Popen', (['command'], {'stdout': 'stderr', 'shell': '(True)'}), '(command, stdout=stderr, shell=True)\n', (3462, 3498), False, 'from subprocess import Popen\n'), ((3057, 3083), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (3073, 3083), False, 'import os\n')]
|
import scrape
import os
import json
from datetime import datetime
import hashlib
page_count = 0
book_count = 0
next_page = scrape.base_url
# initialize scraper
max_page = 2 # adjust to scrape fewer or more pages
scrape_images = True # toggle to download or not download image for books
image_folder = "./images"
if not os.path.exists(image_folder):
os.mkdir(image_folder)
# it's more convinient to store result as jsonlines file
# jsonlines file could be read with pandas by: pandas.read_json(path,lines=True)
f = open("books.jl","w")
while (page_count < max_page) and next_page:
result = scrape.scrape_list(next_page)
links = result["links"]
page_count+=1
print(f"Scraped page {page_count}: {next_page}")
next_page = result["next"]
for link in links:
book_info = scrape.scrape_book(link)
if scrape_images:
dest = "images/"
image_path = scrape.download_image(book_info["image_url"],dest)
book_info["image_path"] = image_path
f.write(json.dumps(book_info) + "\n")
book_count +=1
print(f"Scraped book {book_count}: {book_info['title']}")
f.close()
|
[
"os.mkdir",
"scrape.scrape_list",
"os.path.exists",
"scrape.scrape_book",
"json.dumps",
"scrape.download_image"
] |
[((321, 349), 'os.path.exists', 'os.path.exists', (['image_folder'], {}), '(image_folder)\n', (335, 349), False, 'import os\n'), ((355, 377), 'os.mkdir', 'os.mkdir', (['image_folder'], {}), '(image_folder)\n', (363, 377), False, 'import os\n'), ((601, 630), 'scrape.scrape_list', 'scrape.scrape_list', (['next_page'], {}), '(next_page)\n', (619, 630), False, 'import scrape\n'), ((805, 829), 'scrape.scrape_book', 'scrape.scrape_book', (['link'], {}), '(link)\n', (823, 829), False, 'import scrape\n'), ((911, 962), 'scrape.download_image', 'scrape.download_image', (["book_info['image_url']", 'dest'], {}), "(book_info['image_url'], dest)\n", (932, 962), False, 'import scrape\n'), ((1040, 1061), 'json.dumps', 'json.dumps', (['book_info'], {}), '(book_info)\n', (1050, 1061), False, 'import json\n')]
|
# Generated by Django 3.2.8 on 2021-12-27 16:04
import django.core.files.storage
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Parameters',
fields=[
('runid', models.AutoField(primary_key=True, serialize=False)),
('upload_timestamp', models.DateTimeField(default=None)),
('file_hash', models.CharField(default=None, max_length=32)),
('inputFile', models.FileField(default=None, storage=django.core.files.storage.FileSystemStorage(location='/Users/boorish/Documents/programowanie/engineer/code/cross-validation-web-app/backend/server/media'), upload_to='uploads')),
('paramList', models.TextField(default=None)),
('targetColumn', models.CharField(default=None, max_length=64)),
('adjust', models.FloatField(blank=True, default=None, null=True)),
('round', models.IntegerField(default=5)),
('fixed', models.CharField(blank=True, default=None, max_length=32, null=True)),
('threshold', models.IntegerField(blank=True, default=5, null=True)),
],
),
migrations.CreateModel(
name='Results',
fields=[
('resultid', models.AutoField(primary_key=True, serialize=False)),
('file_hash', models.CharField(default=None, max_length=64)),
('upload_timestamp', models.DateTimeField(default=None)),
('results_timestamp', models.DateTimeField(default=None)),
('rkf_scores', models.CharField(default=None, max_length=255)),
('rkf_mean', models.FloatField(default=None)),
('rkf_stddev', models.FloatField(default=None)),
('cv_scores', models.CharField(default=None, max_length=255)),
('cv_mean', models.FloatField(default=None)),
('cv_stddev', models.FloatField(default=None)),
('coefs_ols', models.TextField(default=None)),
('coefs_nnls', models.TextField(default=None)),
('outliers_ols', models.TextField(default=None)),
('mean_abs_percentage_error_ols', models.FloatField(default=None)),
('percentage_error_vect_ols', models.TextField(default=None)),
('mean_percentage_error_ols', models.FloatField(default=None)),
('median_percentage_error_ols', models.FloatField(default=None)),
('rmsre_ols', models.FloatField(default=None)),
('stddev_abs_percentage_error_ols', models.FloatField(default=None)),
('stddev_relative_error_ols', models.FloatField(default=None)),
('rmse_ols', models.FloatField(default=None)),
('r2_score_ols', models.FloatField(default=None)),
('outliers_nnls', models.TextField(default=None)),
('mean_abs_percentage_error_nnls', models.FloatField(default=None)),
('percentage_error_vect_nnls', models.TextField(default=None)),
('mean_percentage_error_nnls', models.FloatField(default=None)),
('median_percentage_error_nnls', models.FloatField(default=None)),
('rmsre_nnls', models.FloatField(default=None)),
('stddev_abs_percentage_error_nnls', models.FloatField(default=None)),
('stddev_relative_error_nnls', models.FloatField(default=None)),
('rmse_nnls', models.FloatField(default=None)),
('r2_score_nnls', models.FloatField(default=None)),
('y', models.TextField(default=None)),
('runid', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='results', to='MyAPI.parameters')),
],
),
]
|
[
"django.db.models.TextField",
"django.db.models.OneToOneField",
"django.db.models.CharField",
"django.db.models.FloatField",
"django.db.models.AutoField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] |
[((375, 426), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (391, 426), False, 'from django.db import migrations, models\n'), ((466, 500), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'None'}), '(default=None)\n', (486, 500), False, 'from django.db import migrations, models\n'), ((533, 578), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'None', 'max_length': '(32)'}), '(default=None, max_length=32)\n', (549, 578), False, 'from django.db import migrations, models\n'), ((859, 889), 'django.db.models.TextField', 'models.TextField', ([], {'default': 'None'}), '(default=None)\n', (875, 889), False, 'from django.db import migrations, models\n'), ((925, 970), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'None', 'max_length': '(64)'}), '(default=None, max_length=64)\n', (941, 970), False, 'from django.db import migrations, models\n'), ((1000, 1054), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'default': 'None', 'null': '(True)'}), '(blank=True, default=None, null=True)\n', (1017, 1054), False, 'from django.db import migrations, models\n'), ((1083, 1113), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(5)'}), '(default=5)\n', (1102, 1113), False, 'from django.db import migrations, models\n'), ((1142, 1210), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': 'None', 'max_length': '(32)', 'null': '(True)'}), '(blank=True, default=None, max_length=32, null=True)\n', (1158, 1210), False, 'from django.db import migrations, models\n'), ((1243, 1296), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'default': '(5)', 'null': '(True)'}), '(blank=True, default=5, null=True)\n', (1262, 1296), False, 'from django.db import migrations, models\n'), ((1435, 1486), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (1451, 1486), False, 'from django.db import migrations, models\n'), ((1519, 1564), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'None', 'max_length': '(64)'}), '(default=None, max_length=64)\n', (1535, 1564), False, 'from django.db import migrations, models\n'), ((1604, 1638), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'None'}), '(default=None)\n', (1624, 1638), False, 'from django.db import migrations, models\n'), ((1679, 1713), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'None'}), '(default=None)\n', (1699, 1713), False, 'from django.db import migrations, models\n'), ((1747, 1793), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'None', 'max_length': '(255)'}), '(default=None, max_length=255)\n', (1763, 1793), False, 'from django.db import migrations, models\n'), ((1825, 1856), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (1842, 1856), False, 'from django.db import migrations, models\n'), ((1890, 1921), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (1907, 1921), False, 'from django.db import migrations, models\n'), ((1954, 2000), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'None', 'max_length': '(255)'}), '(default=None, max_length=255)\n', (1970, 2000), False, 'from django.db import migrations, models\n'), ((2031, 2062), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (2048, 2062), False, 'from django.db import migrations, models\n'), ((2095, 2126), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (2112, 2126), False, 'from django.db import migrations, models\n'), ((2159, 2189), 'django.db.models.TextField', 'models.TextField', ([], {'default': 'None'}), '(default=None)\n', (2175, 2189), False, 'from django.db import migrations, models\n'), ((2223, 2253), 'django.db.models.TextField', 'models.TextField', ([], {'default': 'None'}), '(default=None)\n', (2239, 2253), False, 'from django.db import migrations, models\n'), ((2289, 2319), 'django.db.models.TextField', 'models.TextField', ([], {'default': 'None'}), '(default=None)\n', (2305, 2319), False, 'from django.db import migrations, models\n'), ((2372, 2403), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (2389, 2403), False, 'from django.db import migrations, models\n'), ((2452, 2482), 'django.db.models.TextField', 'models.TextField', ([], {'default': 'None'}), '(default=None)\n', (2468, 2482), False, 'from django.db import migrations, models\n'), ((2531, 2562), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (2548, 2562), False, 'from django.db import migrations, models\n'), ((2613, 2644), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (2630, 2644), False, 'from django.db import migrations, models\n'), ((2677, 2708), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (2694, 2708), False, 'from django.db import migrations, models\n'), ((2763, 2794), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (2780, 2794), False, 'from django.db import migrations, models\n'), ((2843, 2874), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (2860, 2874), False, 'from django.db import migrations, models\n'), ((2906, 2937), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (2923, 2937), False, 'from django.db import migrations, models\n'), ((2973, 3004), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (2990, 3004), False, 'from django.db import migrations, models\n'), ((3041, 3071), 'django.db.models.TextField', 'models.TextField', ([], {'default': 'None'}), '(default=None)\n', (3057, 3071), False, 'from django.db import migrations, models\n'), ((3125, 3156), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (3142, 3156), False, 'from django.db import migrations, models\n'), ((3206, 3236), 'django.db.models.TextField', 'models.TextField', ([], {'default': 'None'}), '(default=None)\n', (3222, 3236), False, 'from django.db import migrations, models\n'), ((3286, 3317), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (3303, 3317), False, 'from django.db import migrations, models\n'), ((3369, 3400), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (3386, 3400), False, 'from django.db import migrations, models\n'), ((3434, 3465), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (3451, 3465), False, 'from django.db import migrations, models\n'), ((3521, 3552), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (3538, 3552), False, 'from django.db import migrations, models\n'), ((3602, 3633), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (3619, 3633), False, 'from django.db import migrations, models\n'), ((3666, 3697), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (3683, 3697), False, 'from django.db import migrations, models\n'), ((3734, 3765), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': 'None'}), '(default=None)\n', (3751, 3765), False, 'from django.db import migrations, models\n'), ((3790, 3820), 'django.db.models.TextField', 'models.TextField', ([], {'default': 'None'}), '(default=None)\n', (3806, 3820), False, 'from django.db import migrations, models\n'), ((3849, 3965), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""results"""', 'to': '"""MyAPI.parameters"""'}), "(on_delete=django.db.models.deletion.CASCADE,\n related_name='results', to='MyAPI.parameters')\n", (3869, 3965), False, 'from django.db import migrations, models\n')]
|
# -*- coding: utf-8 -*-
import re
import pytest
from mimesis import Hardware
from mimesis.data import (
CPU,
CPU_CODENAMES,
CPU_MODEL_CODES,
GENERATION,
GRAPHICS,
HDD_SSD,
MANUFACTURERS,
PHONE_MODELS,
RAM_SIZES,
RAM_TYPES,
RESOLUTIONS,
SCREEN_SIZES,
)
from . import patterns
class TestHardware(object):
@pytest.fixture
def hard(self):
return Hardware()
def test_str(self, hard):
assert re.match(patterns.PROVIDER_STR_REGEX, str(hard))
def test_resolution(self, hard):
result = hard.resolution()
assert result in RESOLUTIONS
def test_screen_size(self, hard):
result = hard.screen_size()
assert result in SCREEN_SIZES
def test_generation(self, hard):
result = hard.generation()
assert result in GENERATION
assert isinstance(result, str)
def test_cpu_model_code(self, hard):
result = hard.cpu_model_code()
assert result in CPU_MODEL_CODES
assert isinstance(result, str)
def test_cpu_frequency(self, hard):
result = hard.cpu_frequency().split("G")[0]
assert float(result) < 4.4
def test_cpu(self, hard):
result = hard.cpu()
assert result in CPU
def test_cpu_codename(self, hard):
result = hard.cpu_codename()
assert result in CPU_CODENAMES
def test_ram_type(self, hard):
result = hard.ram_type()
assert result in RAM_TYPES
def test_ram_size(self, hard):
result = hard.ram_size()
assert result in RAM_SIZES
def test_ssd_or_hdd(self, hard):
result = hard.ssd_or_hdd()
assert result in HDD_SSD
def test_graphics(self, hard):
result = hard.graphics()
assert result in GRAPHICS
def test_manufacturer(self, hard):
result = hard.manufacturer()
assert result in MANUFACTURERS
def test_phone_model(self, hard):
result = hard.phone_model()
assert result in PHONE_MODELS
class TestSeededHardware(object):
@pytest.fixture
def h1(self, seed):
return Hardware(seed=seed)
@pytest.fixture
def h2(self, seed):
return Hardware(seed=seed)
def test_resolution(self, h1, h2):
assert h1.resolution() == h2.resolution()
def test_screen_size(self, h1, h2):
assert h1.screen_size() == h2.screen_size()
def test_generation(self, h1, h2):
assert h1.generation() == h2.generation()
def test_cpu_model_code(self, h1, h2):
assert h1.cpu_model_code() == h2.cpu_model_code()
def test_cpu_frequency(self, h1, h2):
assert h1.cpu_frequency() == h2.cpu_frequency()
def test_cpu(self, h1, h2):
assert h1.cpu() == h2.cpu()
def test_cpu_codename(self, h1, h2):
assert h1.cpu_codename() == h2.cpu_codename()
def test_ram_type(self, h1, h2):
assert h1.ram_type() == h2.ram_type()
def test_ram_size(self, h1, h2):
assert h1.ram_size() == h2.ram_size()
def test_ssd_or_hdd(self, h1, h2):
assert h1.ssd_or_hdd() == h2.ssd_or_hdd()
def test_graphics(self, h1, h2):
assert h1.graphics() == h2.graphics()
def test_manufacturer(self, h1, h2):
assert h1.manufacturer() == h2.manufacturer()
def test_phone_model(self, h1, h2):
assert h1.phone_model() == h2.phone_model()
|
[
"mimesis.Hardware"
] |
[((410, 420), 'mimesis.Hardware', 'Hardware', ([], {}), '()\n', (418, 420), False, 'from mimesis import Hardware\n'), ((2121, 2140), 'mimesis.Hardware', 'Hardware', ([], {'seed': 'seed'}), '(seed=seed)\n', (2129, 2140), False, 'from mimesis import Hardware\n'), ((2201, 2220), 'mimesis.Hardware', 'Hardware', ([], {'seed': 'seed'}), '(seed=seed)\n', (2209, 2220), False, 'from mimesis import Hardware\n')]
|
import os
import subprocess
from astropy import units as u
from threading import Event
from threading import Timer
from pocs.utils import current_time
from pocs.utils import error
from pocs.utils.images import cr2 as cr2_utils
from pocs.camera import AbstractGPhotoCamera
class Camera(AbstractGPhotoCamera):
def __init__(self, *args, **kwargs):
kwargs['readout_time'] = 6.0
kwargs['file_extension'] = 'cr2'
super().__init__(*args, **kwargs)
self.logger.debug("Connecting GPhoto2 camera")
self.connect()
self.logger.debug("{} connected".format(self.name))
def connect(self):
"""Connect to Canon DSLR
Gets the serial number from the camera and sets various settings
"""
self.logger.debug('Connecting to camera')
# Get serial number
_serial_number = self.get_property('serialnumber')
if not _serial_number:
raise error.CameraNotFound("Camera not responding: {}".format(self))
self._serial_number = _serial_number
# Properties to be set upon init.
prop2index = {
'/main/actions/viewfinder': 1, # Screen off
'/main/capturesettings/autoexposuremode': 3, # 3 - Manual; 4 - Bulb
'/main/capturesettings/continuousaf': 0, # No auto-focus
'/main/capturesettings/drivemode': 0, # Single exposure
'/main/capturesettings/focusmode': 0, # Manual (don't try to focus)
'/main/capturesettings/shutterspeed': 0, # Bulb
'/main/imgsettings/imageformat': 9, # RAW
'/main/imgsettings/imageformatcf': 9, # RAW
'/main/imgsettings/imageformatsd': 9, # RAW
'/main/imgsettings/iso': 1, # ISO 100
'/main/settings/autopoweroff': 0, # Don't power off
'/main/settings/capturetarget': 0, # Capture to RAM, for download
'/main/settings/datetime': 'now', # Current datetime
'/main/settings/datetimeutc': 'now', # Current datetime
'/main/settings/reviewtime': 0, # Screen off after taking pictures
}
owner_name = 'Project PANOPTES'
artist_name = self.config.get('unit_id', owner_name)
copyright = 'owner_name {}'.format(owner_name, current_time().datetime.year)
prop2value = {
'/main/settings/artist': artist_name,
'/main/settings/copyright': copyright,
'/main/settings/ownername': owner_name,
}
self.set_properties(prop2index, prop2value)
self._connected = True
def take_observation(self, observation, headers=None, filename=None, *args, **kwargs):
"""Take an observation
Gathers various header information, sets the file path, and calls
`take_exposure`. Also creates a `threading.Event` object and a
`threading.Timer` object. The timer calls `process_exposure` after the
set amount of time is expired (`observation.exptime + self.readout_time`).
Note:
If a `filename` is passed in it can either be a full path that includes
the extension, or the basename of the file, in which case the directory
path and extension will be added to the `filename` for output
Args:
observation (~pocs.scheduler.observation.Observation): Object
describing the observation
headers (dict): Header data to be saved along with the file
filename (str, optional): Filename for saving, defaults to ISOT time stamp
**kwargs (dict): Optional keyword arguments (`exptime`)
Returns:
threading.Event: An event to be set when the image is done processing
"""
# To be used for marking when exposure is complete (see `process_exposure`)
camera_event = Event()
exptime, file_path, image_id, metadata = self._setup_observation(observation,
headers,
filename,
*args,
**kwargs)
proc = self.take_exposure(seconds=exptime, filename=file_path)
# Add most recent exposure to list
if self.is_primary:
if 'POINTING' in headers:
observation.pointing_images[image_id] = file_path.replace('.cr2', '.fits')
else:
observation.exposure_list[image_id] = file_path.replace('.cr2', '.fits')
# Process the image after a set amount of time
wait_time = exptime + self.readout_time
t = Timer(wait_time, self.process_exposure, (metadata, camera_event, proc))
t.name = '{}Thread'.format(self.name)
t.start()
return camera_event
def _start_exposure(self, seconds, filename, dark, header, *args, **kwargs):
"""Take an exposure for given number of seconds and saves to provided filename
Note:
See `scripts/take_pic.sh`
Tested With:
* Canon EOS 100D
Args:
seconds (u.second, optional): Length of exposure
filename (str, optional): Image is saved to this filename
"""
script_path = '{}/scripts/take_pic.sh'.format(os.getenv('POCS'))
run_cmd = [script_path, self.port, str(seconds), filename]
# Take Picture
try:
proc = subprocess.Popen(run_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
except error.InvalidCommand as e:
self.logger.warning(e)
except subprocess.TimeoutExpired:
self.logger.debug("Still waiting for camera")
proc.kill()
outs, errs = proc.communicate(timeout=10)
if errs is not None:
self.logger.warning(errs)
finally:
readout_args = (filename, header)
return readout_args
def _readout(self, cr2_path, info):
"""Reads out the image as a CR2 and converts to FITS"""
self.logger.debug("Converting CR2 -> FITS: {}".format(cr2_path))
fits_path = cr2_utils.cr2_to_fits(cr2_path, headers=info, remove_cr2=False)
return fits_path
|
[
"subprocess.Popen",
"threading.Timer",
"threading.Event",
"pocs.utils.current_time",
"os.getenv",
"pocs.utils.images.cr2.cr2_to_fits"
] |
[((3978, 3985), 'threading.Event', 'Event', ([], {}), '()\n', (3983, 3985), False, 'from threading import Event\n'), ((4897, 4968), 'threading.Timer', 'Timer', (['wait_time', 'self.process_exposure', '(metadata, camera_event, proc)'], {}), '(wait_time, self.process_exposure, (metadata, camera_event, proc))\n', (4902, 4968), False, 'from threading import Timer\n'), ((6528, 6591), 'pocs.utils.images.cr2.cr2_to_fits', 'cr2_utils.cr2_to_fits', (['cr2_path'], {'headers': 'info', 'remove_cr2': '(False)'}), '(cr2_path, headers=info, remove_cr2=False)\n', (6549, 6591), True, 'from pocs.utils.images import cr2 as cr2_utils\n'), ((5555, 5572), 'os.getenv', 'os.getenv', (['"""POCS"""'], {}), "('POCS')\n", (5564, 5572), False, 'import os\n'), ((5698, 5800), 'subprocess.Popen', 'subprocess.Popen', (['run_cmd'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'universal_newlines': '(True)'}), '(run_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n universal_newlines=True)\n', (5714, 5800), False, 'import subprocess\n'), ((2411, 2425), 'pocs.utils.current_time', 'current_time', ([], {}), '()\n', (2423, 2425), False, 'from pocs.utils import current_time\n')]
|
# set the matplotlib backend so figures can be saved in the background
import matplotlib
matplotlib.use("Agg")
# import the necessary packages
from pyimagesearch.convautoencoder import ConvAutoencoder
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.datasets import mnist
import matplotlib.pyplot as plt
import numpy as np
import argparse
import cv2
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-s", "--samples", type=int, default=8,
help="# number of samples to visualize when decoding")
ap.add_argument("-o", "--output", type=str, default="output.png",
help="path to output visualization file")
ap.add_argument("-p", "--plot", type=str, default="plot.png",
help="path to output plot file")
args = vars(ap.parse_args())
# initialize the number of epochs to train for and batch size
EPOCHS = 25
BS = 32
# load the MNIST dataset
print("[INFO] loading MNIST dataset...")
((trainX, _), (testX, _)) = mnist.load_data()
# add a channel dimension to every image in the dataset, then scale
# the pixel intensities to the range [0, 1]
trainX = np.expand_dims(trainX, axis=-1)
testX = np.expand_dims(testX, axis=-1)
trainX = trainX.astype("float32") / 255.0
testX = testX.astype("float32") / 255.0
# construct our convolutional autoencoder
print("[INFO] building autoencoder...")
(encoder, decoder, autoencoder) = ConvAutoencoder.build(28, 28, 1)
opt = Adam(lr=1e-3)
autoencoder.compile(loss="mse", optimizer=opt)
# train the convolutional autoencoder
H = autoencoder.fit(
trainX, trainX,
validation_data=(testX, testX),
epochs=EPOCHS,
batch_size=BS)
# construct a plot that plots and saves the training history
N = np.arange(0, EPOCHS)
plt.style.use("ggplot")
plt.figure()
plt.plot(N, H.history["loss"], label="train_loss")
plt.plot(N, H.history["val_loss"], label="val_loss")
plt.title("Training Loss and Accuracy")
plt.xlabel("Epoch #")
plt.ylabel("Loss/Accuracy")
plt.legend(loc="lower left")
plt.savefig(args["plot"])
# use the convolutional autoencoder to make predictions on the
# testing images, then initialize our list of output images
print("[INFO] making predictions...")
decoded = autoencoder.predict(testX)
outputs = None
# loop over our number of output samples
for i in range(0, args["samples"]):
# grab the original image and reconstructed image
original = (testX[i] * 255).astype("uint8")
recon = (decoded[i] * 255).astype("uint8")
# stack the original and reconstructed image side-by-side
output = np.hstack([original, recon])
# if the outputs array is empty, initialize it as the current
# side-by-side image display
if outputs is None:
outputs = output
# otherwise, vertically stack the outputs
else:
outputs = np.vstack([outputs, output])
# save the outputs image to disk
cv2.imwrite(args["output"], outputs)
|
[
"matplotlib.pyplot.title",
"pyimagesearch.convautoencoder.ConvAutoencoder.build",
"argparse.ArgumentParser",
"matplotlib.pyplot.plot",
"cv2.imwrite",
"matplotlib.pyplot.legend",
"numpy.expand_dims",
"tensorflow.keras.datasets.mnist.load_data",
"numpy.hstack",
"matplotlib.pyplot.style.use",
"matplotlib.use",
"tensorflow.keras.optimizers.Adam",
"numpy.arange",
"matplotlib.pyplot.figure",
"numpy.vstack",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig"
] |
[((89, 110), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (103, 110), False, 'import matplotlib\n'), ((430, 455), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (453, 455), False, 'import argparse\n'), ((980, 997), 'tensorflow.keras.datasets.mnist.load_data', 'mnist.load_data', ([], {}), '()\n', (995, 997), False, 'from tensorflow.keras.datasets import mnist\n'), ((1120, 1151), 'numpy.expand_dims', 'np.expand_dims', (['trainX'], {'axis': '(-1)'}), '(trainX, axis=-1)\n', (1134, 1151), True, 'import numpy as np\n'), ((1160, 1190), 'numpy.expand_dims', 'np.expand_dims', (['testX'], {'axis': '(-1)'}), '(testX, axis=-1)\n', (1174, 1190), True, 'import numpy as np\n'), ((1390, 1422), 'pyimagesearch.convautoencoder.ConvAutoencoder.build', 'ConvAutoencoder.build', (['(28)', '(28)', '(1)'], {}), '(28, 28, 1)\n', (1411, 1422), False, 'from pyimagesearch.convautoencoder import ConvAutoencoder\n'), ((1429, 1443), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.001)'}), '(lr=0.001)\n', (1433, 1443), False, 'from tensorflow.keras.optimizers import Adam\n'), ((1698, 1718), 'numpy.arange', 'np.arange', (['(0)', 'EPOCHS'], {}), '(0, EPOCHS)\n', (1707, 1718), True, 'import numpy as np\n'), ((1719, 1742), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""ggplot"""'], {}), "('ggplot')\n", (1732, 1742), True, 'import matplotlib.pyplot as plt\n'), ((1743, 1755), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1753, 1755), True, 'import matplotlib.pyplot as plt\n'), ((1756, 1806), 'matplotlib.pyplot.plot', 'plt.plot', (['N', "H.history['loss']"], {'label': '"""train_loss"""'}), "(N, H.history['loss'], label='train_loss')\n", (1764, 1806), True, 'import matplotlib.pyplot as plt\n'), ((1807, 1859), 'matplotlib.pyplot.plot', 'plt.plot', (['N', "H.history['val_loss']"], {'label': '"""val_loss"""'}), "(N, H.history['val_loss'], label='val_loss')\n", (1815, 1859), True, 'import matplotlib.pyplot as plt\n'), ((1860, 1899), 'matplotlib.pyplot.title', 'plt.title', (['"""Training Loss and Accuracy"""'], {}), "('Training Loss and Accuracy')\n", (1869, 1899), True, 'import matplotlib.pyplot as plt\n'), ((1900, 1921), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epoch #"""'], {}), "('Epoch #')\n", (1910, 1921), True, 'import matplotlib.pyplot as plt\n'), ((1922, 1949), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss/Accuracy"""'], {}), "('Loss/Accuracy')\n", (1932, 1949), True, 'import matplotlib.pyplot as plt\n'), ((1950, 1978), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""lower left"""'}), "(loc='lower left')\n", (1960, 1978), True, 'import matplotlib.pyplot as plt\n'), ((1979, 2004), 'matplotlib.pyplot.savefig', 'plt.savefig', (["args['plot']"], {}), "(args['plot'])\n", (1990, 2004), True, 'import matplotlib.pyplot as plt\n'), ((2796, 2832), 'cv2.imwrite', 'cv2.imwrite', (["args['output']", 'outputs'], {}), "(args['output'], outputs)\n", (2807, 2832), False, 'import cv2\n'), ((2507, 2535), 'numpy.hstack', 'np.hstack', (['[original, recon]'], {}), '([original, recon])\n', (2516, 2535), True, 'import numpy as np\n'), ((2733, 2761), 'numpy.vstack', 'np.vstack', (['[outputs, output]'], {}), '([outputs, output])\n', (2742, 2761), True, 'import numpy as np\n')]
|
# Copyright 2012-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Compute v2 Server action implementations"""
import importlib
from osc_lib.command import command
from osc_lib import exceptions
from osc_lib import utils
from openstackclient.i18n import _
class CreateServerBackup(command.ShowOne):
_description = _("Create a server backup image")
IMAGE_API_VERSIONS = {
"1": "openstackclient.image.v1.image",
"2": "openstackclient.image.v2.image",
}
def get_parser(self, prog_name):
parser = super(CreateServerBackup, self).get_parser(prog_name)
parser.add_argument(
'server',
metavar='<server>',
help=_('Server to back up (name or ID)'),
)
parser.add_argument(
'--name',
metavar='<image-name>',
help=_('Name of the backup image (default: server name)'),
)
parser.add_argument(
'--type',
metavar='<backup-type>',
help=_(
'Used to populate the backup_type property of the backup '
'image (default: empty)'
),
)
parser.add_argument(
'--rotate',
metavar='<count>',
type=int,
help=_('Number of backups to keep (default: 1)'),
)
parser.add_argument(
'--wait',
action='store_true',
help=_('Wait for backup image create to complete'),
)
return parser
def take_action(self, parsed_args):
def _show_progress(progress):
if progress:
self.app.stderr.write('\rProgress: %s' % progress)
self.app.stderr.flush()
compute_client = self.app.client_manager.sdk_connection.compute
server = compute_client.find_server(parsed_args.server)
# Set sane defaults as this API wants all mouths to be fed
if parsed_args.name is None:
backup_name = server.name
else:
backup_name = parsed_args.name
if parsed_args.type is None:
backup_type = ""
else:
backup_type = parsed_args.type
if parsed_args.rotate is None:
backup_rotation = 1
else:
backup_rotation = parsed_args.rotate
compute_client.backup_server(
server.id,
backup_name,
backup_type,
backup_rotation,
)
image_client = self.app.client_manager.image
image = image_client.find_image(backup_name, ignore_missing=False)
if parsed_args.wait:
if utils.wait_for_status(
image_client.get_image,
image.id,
callback=_show_progress,
):
self.app.stdout.write('\n')
else:
msg = _('Error creating server backup: %s') % parsed_args.name
raise exceptions.CommandError(msg)
if self.app.client_manager._api_version['image'] == '1':
info = {}
info.update(image._info)
info['properties'] = utils.format_dict(info.get('properties', {}))
else:
# Get the right image module to format the output
image_module = importlib.import_module(
self.IMAGE_API_VERSIONS[
self.app.client_manager._api_version['image']
]
)
info = image_module._format_image(image)
return zip(*sorted(info.items()))
|
[
"openstackclient.i18n._",
"osc_lib.exceptions.CommandError",
"osc_lib.utils.wait_for_status",
"importlib.import_module"
] |
[((872, 905), 'openstackclient.i18n._', '_', (['"""Create a server backup image"""'], {}), "('Create a server backup image')\n", (873, 905), False, 'from openstackclient.i18n import _\n'), ((3197, 3282), 'osc_lib.utils.wait_for_status', 'utils.wait_for_status', (['image_client.get_image', 'image.id'], {'callback': '_show_progress'}), '(image_client.get_image, image.id, callback=_show_progress\n )\n', (3218, 3282), False, 'from osc_lib import utils\n'), ((3841, 3941), 'importlib.import_module', 'importlib.import_module', (["self.IMAGE_API_VERSIONS[self.app.client_manager._api_version['image']]"], {}), "(self.IMAGE_API_VERSIONS[self.app.client_manager.\n _api_version['image']])\n", (3864, 3941), False, 'import importlib\n'), ((1243, 1278), 'openstackclient.i18n._', '_', (['"""Server to back up (name or ID)"""'], {}), "('Server to back up (name or ID)')\n", (1244, 1278), False, 'from openstackclient.i18n import _\n'), ((1394, 1446), 'openstackclient.i18n._', '_', (['"""Name of the backup image (default: server name)"""'], {}), "('Name of the backup image (default: server name)')\n", (1395, 1446), False, 'from openstackclient.i18n import _\n'), ((1563, 1651), 'openstackclient.i18n._', '_', (['"""Used to populate the backup_type property of the backup image (default: empty)"""'], {}), "('Used to populate the backup_type property of the backup image (default: empty)'\n )\n", (1564, 1651), False, 'from openstackclient.i18n import _\n'), ((1830, 1873), 'openstackclient.i18n._', '_', (['"""Number of backups to keep (default: 1)"""'], {}), "('Number of backups to keep (default: 1)')\n", (1831, 1873), False, 'from openstackclient.i18n import _\n'), ((1986, 2031), 'openstackclient.i18n._', '_', (['"""Wait for backup image create to complete"""'], {}), "('Wait for backup image create to complete')\n", (1987, 2031), False, 'from openstackclient.i18n import _\n'), ((3505, 3533), 'osc_lib.exceptions.CommandError', 'exceptions.CommandError', (['msg'], {}), '(msg)\n', (3528, 3533), False, 'from osc_lib import exceptions\n'), ((3426, 3463), 'openstackclient.i18n._', '_', (['"""Error creating server backup: %s"""'], {}), "('Error creating server backup: %s')\n", (3427, 3463), False, 'from openstackclient.i18n import _\n')]
|
# Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from typing import Any, Dict
from twisted.web import resource
from twisted.web.http import Request
from hathor.api_util import set_cors
from hathor.cli.openapi_files.register import register_resource
from hathor.conf import HathorSettings
settings = HathorSettings()
@register_resource
class TokenResource(resource.Resource):
""" Implements a web server API to return token information.
You must run with option `--status <PORT>`.
"""
isLeaf = True
def __init__(self, manager):
self.manager = manager
def get_one_token_data(self, token_uid: bytes) -> Dict[str, Any]:
# Get one token data specified in id
try:
token_info = self.manager.tx_storage.tokens_index.get_token_info(token_uid)
except KeyError:
return {'success': False, 'message': 'Unknown token'}
mint = []
melt = []
transactions_count = self.manager.tx_storage.tokens_index.get_transactions_count(token_uid)
for tx_hash, index in token_info.mint:
mint.append({
'tx_id': tx_hash.hex(),
'index': index
})
for tx_hash, index in token_info.melt:
melt.append({
'tx_id': tx_hash.hex(),
'index': index
})
data = {
'name': token_info.name,
'symbol': token_info.symbol,
'success': True,
'mint': mint,
'melt': melt,
'total': token_info.total,
'transactions_count': transactions_count,
}
return data
def get_list_token_data(self) -> Dict[str, Any]:
# XXX We should change this in the future so we don't return all tokens in one request
# XXX Right now, the way we have the tokens index is not easy to do it but in the future
# XXX when the number of tokens grow we should refactor this resource
# XXX For now we only set a fixed limit of 200 tokens to return
# Get all tokens
all_tokens = self.manager.tx_storage.tokens_index.tokens
tokens = []
count = 0
limit = 200
truncated = False
for uid, token_info in all_tokens.items():
if uid is settings.HATHOR_TOKEN_UID:
continue
if count >= limit:
truncated = True
break
tokens.append(
{
'uid': uid.hex(),
'name': token_info.name,
'symbol': token_info.symbol,
}
)
count += 1
data = {
'success': True,
'tokens': tokens,
'truncated': truncated,
}
return data
def render_GET(self, request: Request) -> bytes:
""" GET request for /thin_wallet/token/
Expects 'id' (hash) as GET parameter of the queried token
:rtype: string (json)
"""
request.setHeader(b'content-type', b'application/json; charset=utf-8')
set_cors(request, 'GET')
if not self.manager.tx_storage.tokens_index:
request.setResponseCode(503)
return json.dumps({'success': False}).encode('utf-8')
if b'id' in request.args:
try:
token_uid_str = request.args[b'id'][0].decode('utf-8')
token_uid = bytes.fromhex(token_uid_str)
except (ValueError, AttributeError):
return json.dumps({'success': False, 'message': 'Invalid token id'}).encode('utf-8')
data = self.get_one_token_data(token_uid)
else:
data = self.get_list_token_data()
return json.dumps(data).encode('utf-8')
TokenResource.openapi = {
'/thin_wallet/token': {
'x-visibility': 'public',
'x-rate-limit': {
'global': [
{
'rate': '100r/s',
'burst': 100,
'delay': 50
}
],
'per-ip': [
{
'rate': '3r/s',
'burst': 10,
'delay': 3
}
]
},
'get': {
'tags': ['wallet'],
'operationId': 'token',
'summary': 'Get information about a token if send token ID, otherwise return list of tokens',
'parameters': [
{
'name': 'id',
'in': 'query',
'description': 'Token id',
'required': True,
'schema': {
'type': 'string'
}
},
],
'responses': {
'200': {
'description': 'Success',
'content': {
'application/json': {
'examples': {
'success': {
'summary': 'Success',
'value': {
'success': True,
'name': 'MyCoin',
'symbol': 'MYC',
'mint': [
{
"tx_id": "00000299670db5814f69cede8b347f83"
"0f73985eaa4cd1ce87c9a7c793771336",
"index": 0
}
],
'melt': [
{
"tx_id": "00000299670db5814f69cede8b347f83"
"0f73985eaa4cd1ce87c9a7c793771336",
"index": 1
}
],
'total': 50000,
'transactions_count': 3,
}
},
'error': {
'summary': 'Invalid token id',
'value': {
'success': False,
'message': 'Invalid token id',
}
},
'success_list': {
'summary': 'List of tokens success',
'value': {
'success': True,
'truncated': False,
'tokens': [
{
'uid': "00000b1b8b1df522489f9aa38cba82a4"
"<KEY>",
'name': 'MyCoin',
'symbol': 'MYC',
},
{
'uid': "00000093f76f44c664907a017bbf9ef6"
"bb289692e30c7cf7361e6872c5ee1796",
'name': '<NAME>',
'symbol': 'NTK',
},
],
}
},
}
}
}
}
}
}
}
}
|
[
"hathor.api_util.set_cors",
"hathor.conf.HathorSettings",
"json.dumps"
] |
[((840, 856), 'hathor.conf.HathorSettings', 'HathorSettings', ([], {}), '()\n', (854, 856), False, 'from hathor.conf import HathorSettings\n'), ((3657, 3681), 'hathor.api_util.set_cors', 'set_cors', (['request', '"""GET"""'], {}), "(request, 'GET')\n", (3665, 3681), False, 'from hathor.api_util import set_cors\n'), ((4304, 4320), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (4314, 4320), False, 'import json\n'), ((3796, 3826), 'json.dumps', 'json.dumps', (["{'success': False}"], {}), "({'success': False})\n", (3806, 3826), False, 'import json\n'), ((4095, 4156), 'json.dumps', 'json.dumps', (["{'success': False, 'message': 'Invalid token id'}"], {}), "({'success': False, 'message': 'Invalid token id'})\n", (4105, 4156), False, 'import json\n')]
|
from django.urls import path, re_path
from . import views
app_name = "core"
urlpatterns = [
path("", views.home, name="home"),
path("url/<int:pk>", views.modify, name="modify"),
re_path(r"^([a-zA-Z0-9_-]+)/$", views.forward, name="forward"),
]
|
[
"django.urls.re_path",
"django.urls.path"
] |
[((98, 131), 'django.urls.path', 'path', (['""""""', 'views.home'], {'name': '"""home"""'}), "('', views.home, name='home')\n", (102, 131), False, 'from django.urls import path, re_path\n'), ((137, 186), 'django.urls.path', 'path', (['"""url/<int:pk>"""', 'views.modify'], {'name': '"""modify"""'}), "('url/<int:pk>', views.modify, name='modify')\n", (141, 186), False, 'from django.urls import path, re_path\n'), ((192, 253), 'django.urls.re_path', 're_path', (['"""^([a-zA-Z0-9_-]+)/$"""', 'views.forward'], {'name': '"""forward"""'}), "('^([a-zA-Z0-9_-]+)/$', views.forward, name='forward')\n", (199, 253), False, 'from django.urls import path, re_path\n')]
|
# Generated by Django 3.0.5 on 2020-05-05 23:02
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('abstracts', '0058_conference_editing_user'),
]
operations = [
migrations.AlterModelOptions(
name='organizer',
options={'ordering': ['abbreviation']},
),
]
|
[
"django.db.migrations.AlterModelOptions"
] |
[((234, 325), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""organizer"""', 'options': "{'ordering': ['abbreviation']}"}), "(name='organizer', options={'ordering': [\n 'abbreviation']})\n", (262, 325), False, 'from django.db import migrations\n')]
|
import dace
import dace.graph.labeling
import sys
import time
print(time.time(), 'loading')
a = dace.SDFG.from_file(sys.argv[1])
print(time.time(), 'propagating')
dace.graph.labeling.propagate_labels_sdfg(a)
print(time.time(), 'drawing')
a.draw_to_file()
exit()
a.apply_strict_transformations()
a.apply_strict_transformations()
a.draw_to_file()
|
[
"dace.SDFG.from_file",
"dace.graph.labeling.propagate_labels_sdfg",
"time.time"
] |
[((97, 129), 'dace.SDFG.from_file', 'dace.SDFG.from_file', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (116, 129), False, 'import dace\n'), ((164, 208), 'dace.graph.labeling.propagate_labels_sdfg', 'dace.graph.labeling.propagate_labels_sdfg', (['a'], {}), '(a)\n', (205, 208), False, 'import dace\n'), ((69, 80), 'time.time', 'time.time', ([], {}), '()\n', (78, 80), False, 'import time\n'), ((136, 147), 'time.time', 'time.time', ([], {}), '()\n', (145, 147), False, 'import time\n'), ((215, 226), 'time.time', 'time.time', ([], {}), '()\n', (224, 226), False, 'import time\n')]
|
from common import db
from discord.ext import commands
from modules.utils import isAdmin
class Notify(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name="notify")
async def toggle_notify(self, ctx):
if isAdmin(ctx.message.author, ctx.guild.id):
notify = db.toggle_notify(ctx.guild.id)
if notify:
await ctx.send(
"Notifications have been set to **ON** for this server.\n"
"Use this command again to turn them off."
)
else:
await ctx.send(
"Notifications have been set to **OFF** for this server.\n"
"Use this command again to turn them on."
)
|
[
"modules.utils.isAdmin",
"common.db.toggle_notify",
"discord.ext.commands.command"
] |
[((177, 208), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""notify"""'}), "(name='notify')\n", (193, 208), False, 'from discord.ext import commands\n'), ((260, 301), 'modules.utils.isAdmin', 'isAdmin', (['ctx.message.author', 'ctx.guild.id'], {}), '(ctx.message.author, ctx.guild.id)\n', (267, 301), False, 'from modules.utils import isAdmin\n'), ((324, 354), 'common.db.toggle_notify', 'db.toggle_notify', (['ctx.guild.id'], {}), '(ctx.guild.id)\n', (340, 354), False, 'from common import db\n')]
|
import os, sys
import re
def check_url(url):
""" Check if the url available """
ret = 0
code = 404
try:
from urllib2 import urlopen
except:
from urllib.request import urlopen
try:
resp = urlopen(url)
code = resp.getcode()
except:
pass
if code != 200:
ret = 1
return ret
def download_tools(url, dest_dir):
""" Download tools to dest_dir """
ret = 0
print ("kconfig tools missing, start download ...")
print ("%s -> %s" % (url, dest_dir))
cmd = "git clone %s %s" % (url, dest_dir)
ret = os.system(cmd)
return ret
def main():
if len(sys.argv) < 3:
print ("Usage: %s <url> <dest_dir>" % sys.argv[0])
return 1
url = sys.argv[1]
dest_dir = sys.argv[2]
#if check_url(url):
# print ("\nCan't reach url: %s" % url)
# print ("Please check your network and download it manually:\n")
# print (" $ git clone %s %s\n" % (url, dest_dir))
# return 1
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
return download_tools(url, dest_dir)
if __name__ == "__main__":
ret = main()
sys.exit(ret)
|
[
"os.makedirs",
"os.path.exists",
"os.system",
"urllib.request.urlopen",
"sys.exit"
] |
[((600, 614), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (609, 614), False, 'import os, sys\n'), ((1177, 1190), 'sys.exit', 'sys.exit', (['ret'], {}), '(ret)\n', (1185, 1190), False, 'import os, sys\n'), ((238, 250), 'urllib.request.urlopen', 'urlopen', (['url'], {}), '(url)\n', (245, 250), False, 'from urllib.request import urlopen\n'), ((1030, 1054), 'os.path.exists', 'os.path.exists', (['dest_dir'], {}), '(dest_dir)\n', (1044, 1054), False, 'import os, sys\n'), ((1064, 1085), 'os.makedirs', 'os.makedirs', (['dest_dir'], {}), '(dest_dir)\n', (1075, 1085), False, 'import os, sys\n')]
|
from setuptools import setup
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=False)
setup(
name = "lrs",
version = "0.0.0",
author = "ADL",
packages=['lrs'],
install_requires=[str(ir.req) for ir in install_reqs],
)
|
[
"pip.req.parse_requirements"
] |
[((84, 137), 'pip.req.parse_requirements', 'parse_requirements', (['"""requirements.txt"""'], {'session': '(False)'}), "('requirements.txt', session=False)\n", (102, 137), False, 'from pip.req import parse_requirements\n')]
|
from profiles.models import *
from django.contrib import admin
admin.site.register(FriendGroup)
admin.site.register(UserProfile)
|
[
"django.contrib.admin.site.register"
] |
[((64, 96), 'django.contrib.admin.site.register', 'admin.site.register', (['FriendGroup'], {}), '(FriendGroup)\n', (83, 96), False, 'from django.contrib import admin\n'), ((97, 129), 'django.contrib.admin.site.register', 'admin.site.register', (['UserProfile'], {}), '(UserProfile)\n', (116, 129), False, 'from django.contrib import admin\n')]
|
from typing import List, Tuple, Type
import numpy as np
from continuum.datasets.base import _ContinuumDataset
from continuum.datasets.pytorch import (CIFAR10, CIFAR100, KMNIST, MNIST, FashionMNIST)
class Fellowship(_ContinuumDataset):
def __init__(
self,
dataset_list: List[Type[_ContinuumDataset]],
data_path: str = "",
download: bool = True,
):
super().__init__(data_path, download)
self.datasets = [dataset(data_path, download) for dataset in dataset_list]
def init(self, train: bool) -> Tuple[np.ndarray, np.ndarray, None]:
x, y = [], []
class_counter = 0
for dataset in self.datasets:
data = dataset.init(train)
x.append(data[0])
y.append(data[1] + class_counter)
class_counter += len(np.unique(data[1]))
x = np.concatenate(x)
y = np.concatenate(y)
return x, y, None
class MNISTFellowship(Fellowship):
def __init__(self, data_path: str = "", download: bool = True) -> None:
super().__init__([MNIST, FashionMNIST, KMNIST], data_path, download)
class CIFARFellowship(Fellowship):
def __init__(self, data_path: str = "", download: bool = True) -> None:
super().__init__([CIFAR10, CIFAR100], data_path, download)
|
[
"numpy.unique",
"numpy.concatenate"
] |
[((865, 882), 'numpy.concatenate', 'np.concatenate', (['x'], {}), '(x)\n', (879, 882), True, 'import numpy as np\n'), ((895, 912), 'numpy.concatenate', 'np.concatenate', (['y'], {}), '(y)\n', (909, 912), True, 'import numpy as np\n'), ((832, 850), 'numpy.unique', 'np.unique', (['data[1]'], {}), '(data[1])\n', (841, 850), True, 'import numpy as np\n')]
|
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
from math import log
from random import randint #, shuffle, sample
from functools import reduce
from operator import __or__
import numpy as np
import pytest
from mmgroup.bitfunctions import bit_mat_mul, bit_mat_inverse
from mmgroup.clifford12 import bitmatrix64_t
from mmgroup.clifford12 import bitmatrix64_echelon_h
from mmgroup.clifford12 import bitmatrix64_echelon_l
from mmgroup.clifford12 import bitmatrix64_cap_h
from mmgroup.clifford12 import bitmatrix64_mul
from mmgroup.clifford12 import bitmatrix64_inv
from mmgroup.clifford12 import bitmatrix64_error_pool
#####################################################################
# Test function bitmatrix64_t()
#####################################################################
def rand_bit_matrix(rows, cols):
m = (1 << cols) - 1
return [randint(0, m) for i in range(rows)]
def as_bit_array(m, cols):
a = np.zeros( (len(m), cols), dtype = np.uint8)
for i in range(len(m)):
for j in range(cols):
a[i,j] = (int(m[i]) >> j) & 1
return a
def create_bitmatrices():
"""yield a bit matrix as a list of integers """
for rows in range(8):
for cols in range(8):
m = rand_bit_matrix(rows, cols)
yield m, cols
@pytest.mark.qstate
def test_bitmatrix_t(verbose = 0):
"""Test the transposition of a bit matrix"""
for ntest, (m, cols) in enumerate(create_bitmatrices()):
t = bitmatrix64_t(m, cols)
m1 = as_bit_array(m, cols)
#print(m1, "shp", m1.shape)
t1 = as_bit_array(t, len(m))
if verbose:
print("Test %d: " % ntest)
print("Transpose %s bit matrix, m =" % str(m1.shape))
print(m1, "\nTransposed: %s\n%s" % (str(t1.shape), t1))
assert m1.T.shape == t1.shape, (m1.shape, t1.shape)
assert (m1.T == t1).all()
#####################################################################
# Test functions bitmatrix64_echelon_h(), bitmatrix64_echelon_l()
#####################################################################
def create_echelon_matrices():
"""yield a bit matrix as a list of integers """
for rows in range(8):
for _ in range(8):
m = rand_bit_matrix(rows, 64)
i, j = randint(0,63), randint(0,63)
yield m, min(i,j), max(i,j)
def ref_echelon(m, j0 = 64, n = 64, high = True):
m = [int(x) for x in m]
row = 0
if high:
assert n <= j0
range_ = range(j0-1, j0-n-1, -1)
mask = ((1 << n) - 1) << (j0 - n)
else:
range_ = range(j0, j0+n)
mask = ((1 << n) - 1) << (j0)
for col in range_:
col_mask = 1 << col
for i in range(len(m)-1, row-1, -1):
if m[i] & col_mask:
for i1 in range(i-1, -1, -1):
if m[i1] & col_mask:
m[i1] ^= m[i]
m[i], m[row] = m[row], m[i]
row += 1
break
return row, m
@pytest.mark.qstate
def test_bitmatrix_echelon(verbose = 0):
for ntest, (m, imin, imax) in enumerate(create_echelon_matrices()):
m1h = np.array(m, dtype = np.uint64, copy = True)
j0h, nh = imax + 1, imax - imin
lenh = bitmatrix64_echelon_h(m1h, len(m1h), j0h, nh)
m1h = list(m1h)
lenh_ref, m1h_ref = ref_echelon(m, j0h, nh)
ok_h = m1h == m1h_ref and lenh == lenh_ref
m1l = np.array(m, dtype = np.uint64, copy = True)
j0l, nl = imin, imax - imin
lenl = bitmatrix64_echelon_l(m1l, len(m1l), j0l, nl)
m1l = list(m1l)
lenl_ref, m1l_ref = ref_echelon(m, j0l, nl, False)
ok_l = m1l == m1l_ref and lenl == lenl_ref
ok = ok_h and ok_l
if verbose or not ok:
print("Test ", ntest)
print("m =", [hex(x) for x in m], j0h, nh)
print("echelon h", [hex(x) for x in m1h], lenh)
print("expected", [hex(x) for x in m1h_ref], lenh_ref)
if not ok_h:
err = "Error in function bitmatrix64_echelon_h"
raise ValueError(err)
print("m =", [hex(x) for x in m], j0l, nl)
print("echelon l", [hex(x) for x in m1l], lenl)
print("expected", [hex(x) for x in m1l_ref], lenl_ref)
if not ok_l:
err = "Error in function bitmatrix64_echelon_l"
raise ValueError(err)
#####################################################################
# Test functions bitmatrix64_cap_h()
#####################################################################
def create_cap_matrices():
"""yield a bit matrix as a list of integers """
test_matrices = [
([3,7,11], [3,1], 0,4)
]
for t in test_matrices:
yield t
for rows1 in range(5):
for rows2 in range(5):
m1 = rand_bit_matrix(rows1, 5)
m2 = rand_bit_matrix(rows2, 5)
i, j = randint(0,6), randint(0,6)
yield m1, m2, min(i,j), max(i,j)
for rows1 in range(1,65,7):
for rows2 in range(1,65,7):
m1 = rand_bit_matrix(rows1, 64)
m2 = rand_bit_matrix(rows2, 64)
i, j = randint(0,63), randint(0,63)
yield m1, m2, min(i,j), max(i,j)
def basis_to_set(b):
if len(b) == 0:
return set([0])
s0 = basis_to_set(b[1:])
b0 = b[0]
return s0 | set((x ^ b0 for x in s0))
def as_set(m, mask):
m = [int(x) & int(mask) for x in m]
return basis_to_set(m)
@pytest.mark.qstate
def test_bitmatrix_cap(verbose = 0):
for ntest, (m1, m2, imin, imax) in enumerate(create_cap_matrices()):
if verbose:
print("Test", ntest+1, "imin =", imin, "imax =", imax )
print("m1 =", [hex(x) for x in m1])
print("m2 =", [hex(x) for x in m2])
m1h = np.array(m1, dtype = np.uint64, copy = True)
m2h = np.array(m2, dtype = np.uint64, copy = True)
j0h, nh = imax + 1, imax - imin + 1
l1h, l2h = bitmatrix64_cap_h(m1h, m2h, j0h, nh)
m1h, m2h = list(m1h), list (m2h)
if verbose:
print("Non intersecting parts of m1 and m2")
print("out1 =", [hex(x) for x in m1h[:l1h]])
print("out2 =", [hex(x) for x in m2h[:l2h]])
mask = (1 << (imax + 1)) - (1 << imin)
print("Intersecting parts (mask = %s):" % hex(mask))
print("out1 =", [hex(x) for x in m1h[l1h:]])
print("out2 =", [hex(x) for x in m2h[l2h:]])
if verbose > 1:
print("Intermediate results")
pool = np.zeros(20, dtype = np.uint64)
bitmatrix64_error_pool(pool, 20)
for i in range(20): print(i, hex(pool[i]))
assert len(m1) == len(m1h) and len(m2) == len(m2h)
assert ref_echelon(m1) == ref_echelon(m1h)
assert ref_echelon(m2) == ref_echelon(m2h)
mask = (1 << (imax + 1)) - (1 << imin)
l1, l2 = len(m1), len(m2)
while l1 and int(m1h[l1-1]) & mask == 0:
l1 -= 1
while l2 and int(m2h[l2-1]) & mask == 0:
l2 -= 1
assert l1-l1h == l2-l2h, (l1, l1h, l2, l2h)
if len(m1) < 5 and len(m2) < 5:
set1 = as_set(m1h, mask)
set2 = as_set(m2h, mask)
cap = set1 & set2
set_cap1 = as_set(m1h[l1h:], mask)
set_cap2 = as_set(m2h[l2h:], mask)
assert cap == set_cap1 , (set1, set2, cap, set_cap1)
assert cap == set_cap2 , (set1, set2, cap, set_cap2)
if verbose:
print("Intersection testd successfully")
#####################################################################
# Test functions bitmatrix64_mul() and bitmatrix64_inv()
#####################################################################
# This tests also function bitmatrix64_mask_rows()
# and bitmatrix64_add_diag().
def create_mul_inv_matrices():
"""yield a bit matrix as a list of integers """
for i in [1, 3, 5, 7, 17, 24, 31, 32]:
m2i = None
for _ in range(1000):
m2 = rand_bit_matrix(i, i)
try:
m2i = bit_mat_inverse(m2)
break
except ZeroDivisionError:
pass
for j in [1,2,7,32, 63, 64]:
m1 = rand_bit_matrix(j, i)
yield m1, m2, m2i
m2i = None
@pytest.mark.qstate
def test_bitmatrix_mul_inv(verbose = 0):
for ntest, (m1, m2, m2i) in enumerate(create_mul_inv_matrices()):
#print(m1, m2, m2i)
m1a = np.array(m1, dtype = np.uint64, copy = True)
m2a = np.array(m2, dtype = np.uint64, copy = True)
m3 = bit_mat_mul(m1, m2)
m3a = bitmatrix64_mul(m1a, m2a)
assert list(m3a) == m3
if m2i is not None:
m2ia = bitmatrix64_inv(m2)
assert list(m2ia) == m2i
|
[
"mmgroup.clifford12.bitmatrix64_error_pool",
"random.randint",
"mmgroup.clifford12.bitmatrix64_cap_h",
"mmgroup.bitfunctions.bit_mat_inverse",
"mmgroup.clifford12.bitmatrix64_mul",
"mmgroup.clifford12.bitmatrix64_t",
"numpy.zeros",
"mmgroup.clifford12.bitmatrix64_inv",
"numpy.array",
"mmgroup.bitfunctions.bit_mat_mul"
] |
[((929, 942), 'random.randint', 'randint', (['(0)', 'm'], {}), '(0, m)\n', (936, 942), False, 'from random import randint\n'), ((1575, 1597), 'mmgroup.clifford12.bitmatrix64_t', 'bitmatrix64_t', (['m', 'cols'], {}), '(m, cols)\n', (1588, 1597), False, 'from mmgroup.clifford12 import bitmatrix64_t\n'), ((3304, 3343), 'numpy.array', 'np.array', (['m'], {'dtype': 'np.uint64', 'copy': '(True)'}), '(m, dtype=np.uint64, copy=True)\n', (3312, 3343), True, 'import numpy as np\n'), ((3598, 3637), 'numpy.array', 'np.array', (['m'], {'dtype': 'np.uint64', 'copy': '(True)'}), '(m, dtype=np.uint64, copy=True)\n', (3606, 3637), True, 'import numpy as np\n'), ((6050, 6090), 'numpy.array', 'np.array', (['m1'], {'dtype': 'np.uint64', 'copy': '(True)'}), '(m1, dtype=np.uint64, copy=True)\n', (6058, 6090), True, 'import numpy as np\n'), ((6110, 6150), 'numpy.array', 'np.array', (['m2'], {'dtype': 'np.uint64', 'copy': '(True)'}), '(m2, dtype=np.uint64, copy=True)\n', (6118, 6150), True, 'import numpy as np\n'), ((6221, 6257), 'mmgroup.clifford12.bitmatrix64_cap_h', 'bitmatrix64_cap_h', (['m1h', 'm2h', 'j0h', 'nh'], {}), '(m1h, m2h, j0h, nh)\n', (6238, 6257), False, 'from mmgroup.clifford12 import bitmatrix64_cap_h\n'), ((8837, 8877), 'numpy.array', 'np.array', (['m1'], {'dtype': 'np.uint64', 'copy': '(True)'}), '(m1, dtype=np.uint64, copy=True)\n', (8845, 8877), True, 'import numpy as np\n'), ((8897, 8937), 'numpy.array', 'np.array', (['m2'], {'dtype': 'np.uint64', 'copy': '(True)'}), '(m2, dtype=np.uint64, copy=True)\n', (8905, 8937), True, 'import numpy as np\n'), ((8957, 8976), 'mmgroup.bitfunctions.bit_mat_mul', 'bit_mat_mul', (['m1', 'm2'], {}), '(m1, m2)\n', (8968, 8976), False, 'from mmgroup.bitfunctions import bit_mat_mul, bit_mat_inverse\n'), ((8992, 9017), 'mmgroup.clifford12.bitmatrix64_mul', 'bitmatrix64_mul', (['m1a', 'm2a'], {}), '(m1a, m2a)\n', (9007, 9017), False, 'from mmgroup.clifford12 import bitmatrix64_mul\n'), ((6822, 6851), 'numpy.zeros', 'np.zeros', (['(20)'], {'dtype': 'np.uint64'}), '(20, dtype=np.uint64)\n', (6830, 6851), True, 'import numpy as np\n'), ((6867, 6899), 'mmgroup.clifford12.bitmatrix64_error_pool', 'bitmatrix64_error_pool', (['pool', '(20)'], {}), '(pool, 20)\n', (6889, 6899), False, 'from mmgroup.clifford12 import bitmatrix64_error_pool\n'), ((9098, 9117), 'mmgroup.clifford12.bitmatrix64_inv', 'bitmatrix64_inv', (['m2'], {}), '(m2)\n', (9113, 9117), False, 'from mmgroup.clifford12 import bitmatrix64_inv\n'), ((2401, 2415), 'random.randint', 'randint', (['(0)', '(63)'], {}), '(0, 63)\n', (2408, 2415), False, 'from random import randint\n'), ((2416, 2430), 'random.randint', 'randint', (['(0)', '(63)'], {}), '(0, 63)\n', (2423, 2430), False, 'from random import randint\n'), ((5142, 5155), 'random.randint', 'randint', (['(0)', '(6)'], {}), '(0, 6)\n', (5149, 5155), False, 'from random import randint\n'), ((5156, 5169), 'random.randint', 'randint', (['(0)', '(6)'], {}), '(0, 6)\n', (5163, 5169), False, 'from random import randint\n'), ((5389, 5403), 'random.randint', 'randint', (['(0)', '(63)'], {}), '(0, 63)\n', (5396, 5403), False, 'from random import randint\n'), ((5404, 5418), 'random.randint', 'randint', (['(0)', '(63)'], {}), '(0, 63)\n', (5411, 5418), False, 'from random import randint\n'), ((8411, 8430), 'mmgroup.bitfunctions.bit_mat_inverse', 'bit_mat_inverse', (['m2'], {}), '(m2)\n', (8426, 8430), False, 'from mmgroup.bitfunctions import bit_mat_mul, bit_mat_inverse\n')]
|
from py2neo import Graph
from py2neo.packages.httpstream import http
class Neo4J_Mathcat:
__conn = ''
def __init__(self, url):
http.socket_timeout = 9999
self.__conn = Graph(url)
def __escape(self, string):
return string.replace('\\', '\\\\').replace('"', '\\"').replace("'", "\\'")
def __encodeUnicode(self, lst):
newlst = []
for ele in lst:
newlst.append(ele.encode('utf-8'))
return newlst
def __joinForArray(self, lst):
#gpid:["%s"]
return self.__escape('","'.join(self.__encodeUnicode(lst)))
def __joinForFulltext(self, lst):
return self.__escape(' ### '.join(self.__encodeUnicode(lst)))
def __create_node_statement(self, mid, doc, mathml):
return 'CREATE (:Math {mid:"%s", doc:"%s", mathml:"%s"})' % (mid, doc, self.__escape(mathml))
def create_nodes(self, nodes):
'''
nodes is {doc: {mid: mathml}} --> math expressions with attributes (gmid, gpid, gdoc, tex, context, description)
'''
tx = self.__conn.cypher.begin()
batches = 10000
cursize = 0
for doc, maths in nodes.iteritems():
for mid, mathml in maths.iteritems():
tx.append(self.__create_node_statement(mid, doc, mathml))
cursize += 1
if cursize == batches:
tx.process()
cursize = 0
tx.commit()
def __create_relationship_statement(self, midsource, midtarget, doc, heuristics):
heuristics_text = ', '.join(['%s:"%s"' % (heur, is_used) for heur, is_used in heuristics.iteritems()])
# return 'start source=node:node_auto_index(mid="%s", doc="%s"), target=node:node_auto_index(mid = "%s", doc="%s") CREATE (source)-[:R {%s}]->(target)' % (midsource, doc, midtarget, doc, heuristics_text)
return 'MATCH (source:Math), (target:Math) WHERE source.mid = "%s" AND source.doc = "%s" AND target.mid = "%s" AND target.doc = "%s" CREATE (source)-[:R {%s}]->(target)' % (midsource, doc, midtarget, doc, heuristics_text)
def create_relationships(self, relationships):
'''
relationships is {doc: [(source, target), ]}
'''
tx = self.__conn.cypher.begin()
batches = 10000
cursize = 0
for doc, relations in relationships.iteritems():
for relation, heuristics in relations.iteritems():
midsource = relation[0]
midtarget = relation[1]
tx.append(self.__create_relationship_statement(midsource, midtarget, doc, heuristics))
cursize += 1
if cursize == batches:
tx.process()
cursize = 0
tx.commit()
|
[
"py2neo.Graph"
] |
[((194, 204), 'py2neo.Graph', 'Graph', (['url'], {}), '(url)\n', (199, 204), False, 'from py2neo import Graph\n')]
|
from sklearn.decomposition import FactorAnalysis
from sklearn.decomposition import FastICA
from sklearn.decomposition import LatentDirichletAllocation
from sklearn.decomposition import TruncatedSVD
from sklearn.decomposition import NMF
from sklearn.manifold import Isomap
from sklearn.manifold import MDS
from sklearn.manifold import LocallyLinearEmbedding
from sklearn.manifold import SpectralEmbedding
from sklearn.manifold import TSNE
from umap import UMAP
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from ml.analysis.pca import PCA
import keras
import numpy as np
import pandas as pd
class Autoencoder:
def __init__(self, n_components, n_layers = 1, **kwargs):
self.n_components = n_components
self.n_layers = n_layers
self.kwargs = kwargs
def fit(self, X, y = None):
input_ = keras.layers.Input(shape=(X.shape[1]))
encoded = keras.layers.Dense(self.n_components, activation='relu')(input_)
decoded = keras.layers.Dense(X.shape[1], activation='relu')(encoded)
self.autoencoder = keras.Model(input_,decoded)
self.encoder = keras.Model(input_, encoded)
self.autoencoder.compile(loss = keras.losses.MeanSquaredError())
print(X.shape[1])
self.autoencoder.fit(X, X, epochs = 100, batch_size = 64, shuffle=True)
def transform(self, X, y = None):
return self.encoder.predict(X)
def fit_transform(self, X, y = None):
self.fit(X)
return self.encoder.predict(X)
class DimensionalityReducer:
def __init__(self, reducer, **kwargs):
"""
Constructor
Parameters
----------
selector : str
name of algorithm to be applied
**kwargs :
optional and positional arguments of the choosen algorithm (selector)
Returns
-------
FeatureSelector
Examples
---------
variance thresholding: f = FeatureSelector('variance', threshold=0.3) #Instantiating
f.fit(X[,y]) #fitting (y is optional for variance thresholding)
X = f.transform(X) #transforming
filter-based, k best (MAD): f = FeatureSelector('univariate_kbest', score_func=FeatureSelector.mean_abs_diff, k=2) #Instantiating
#score_func can be any function f: R^n -> R^n (n = number of columns)
f.fit(X,y) #fitting
X = f.transform(X) #transforming
wrapper, recursive: f = FeatureSelector('recursive', estimator = LinearSVC(), n_features_to_select=2) #Instantiating
#estimator should be an instance of a classification or regression model class from scikit-learn
#one can use a custom class but it must be compatible with scikit-learn arquitecture
f.fit(X,y) #fitting
X = f.transform(X) #transforming
wrapper, sequential: f = FeatureSelector('sequential', estimator = LinearSVC(), direction='forward') #Instantiating
#estimator should be an instance of a classification or regression model class from scikit-learn
#one can use a custom class but it must be compatible with scikit-learn arquitecture
f.fit(X,y) #fitting
X = f.transform(X) #transforming
to better understand the optional arguments of each algorithm see: https://scikit-learn.org/stable/modules/feature_selection.html
"""
self.reducer = reducer
self.reducers = {'factor_analysis': FactorAnalysis,
'pca': PCA,
'ica': FastICA,
'isomap': Isomap,
'locally_linear_embedding': LocallyLinearEmbedding,
'spectral_embedding': SpectralEmbedding,
'tsne': TSNE,
'mds':MDS,
'umap':UMAP,
'latent_dirichlet':LatentDirichletAllocation,
'truncated_svd':TruncatedSVD,
'nmf':NMF,
'linear_discriminant':LinearDiscriminantAnalysis,
'autoencoder':Autoencoder}
self.kwargs = kwargs
self.fitted = False
self.reduction = self.reducers[self.reducer](**self.kwargs)
def fit(self, X: pd.DataFrame, y = None):
"""
Identify the features to be selected.
Parameters
----------
X : pd.DataFrame
features to be selected
y : pd.DataFrame
target values
Returns
-------
None
"""
self.columns = X.columns
self.reduction.fit(X,y)
self.fitted = True
def transform(self, df: pd.DataFrame, y = None):
"""
Select features based on fit
Parameters
----------
pd.DataFrame
dataframe with features to be selected
Returns
-------
df : pd.DataFrame
dataframe with selected features only
"""
if not self.fitted:
raise Exception("Not yet trained.")
return self.reduction.transform(df)
def fit_transform(self, df: pd.DataFrame, y = None):
"""
Select features based on fit
Parameters
----------
pd.DataFrame
dataframe with features to be selected
Returns
-------
df : pd.DataFrame
dataframe with selected features only
"""
return self.reduction.fit_transform(df, y)
def inverse_transform(self, df: pd.DataFrame):
"""
Apply the invese_transform of vectorizer to each column
Options: index, bag_of_words and tf_idf
Parameters
----------
df : pd.DataFrame
dataframe with columns to be unvectorizer
Returns
-------
pd.DataFrame
"""
if not self.fitted:
raise Exception("Not yet trained.")
return self.reduction.inverse_transform(df)
|
[
"keras.layers.Input",
"keras.layers.Dense",
"keras.Model",
"keras.losses.MeanSquaredError"
] |
[((853, 889), 'keras.layers.Input', 'keras.layers.Input', ([], {'shape': 'X.shape[1]'}), '(shape=X.shape[1])\n', (871, 889), False, 'import keras\n'), ((1080, 1108), 'keras.Model', 'keras.Model', (['input_', 'decoded'], {}), '(input_, decoded)\n', (1091, 1108), False, 'import keras\n'), ((1131, 1159), 'keras.Model', 'keras.Model', (['input_', 'encoded'], {}), '(input_, encoded)\n', (1142, 1159), False, 'import keras\n'), ((910, 966), 'keras.layers.Dense', 'keras.layers.Dense', (['self.n_components'], {'activation': '"""relu"""'}), "(self.n_components, activation='relu')\n", (928, 966), False, 'import keras\n'), ((993, 1042), 'keras.layers.Dense', 'keras.layers.Dense', (['X.shape[1]'], {'activation': '"""relu"""'}), "(X.shape[1], activation='relu')\n", (1011, 1042), False, 'import keras\n'), ((1200, 1231), 'keras.losses.MeanSquaredError', 'keras.losses.MeanSquaredError', ([], {}), '()\n', (1229, 1231), False, 'import keras\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from sklearn import datasets
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import LeaveOneOut
from sklearn.metrics import accuracy_score
def main():
# アイリスデータセットを読み込む
dataset = datasets.load_iris()
# 教師データとラベルデータを取り出す
features = dataset.data
targets = dataset.target
# 判定したラベルデータを入れるリスト
predicted_labels = []
# LOO 法で汎化性能を調べる
loo = LeaveOneOut()
for train, test in loo.split(features):
# 学習に使うデータ
train_data = features[train]
target_data = targets[train]
# モデルを学習させる
clf = DecisionTreeClassifier()
clf.fit(train_data, target_data)
# テストに使うデータを正しく判定できるか
predicted_label = clf.predict(features[test])
predicted_labels.append(predicted_label)
# テストデータでの正解率 (汎化性能) を出力する
score = accuracy_score(targets, predicted_labels)
print(score)
if __name__ == '__main__':
main()
|
[
"sklearn.datasets.load_iris",
"sklearn.metrics.accuracy_score",
"sklearn.tree.DecisionTreeClassifier",
"sklearn.model_selection.LeaveOneOut"
] |
[((265, 285), 'sklearn.datasets.load_iris', 'datasets.load_iris', ([], {}), '()\n', (283, 285), False, 'from sklearn import datasets\n'), ((450, 463), 'sklearn.model_selection.LeaveOneOut', 'LeaveOneOut', ([], {}), '()\n', (461, 463), False, 'from sklearn.model_selection import LeaveOneOut\n'), ((880, 921), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['targets', 'predicted_labels'], {}), '(targets, predicted_labels)\n', (894, 921), False, 'from sklearn.metrics import accuracy_score\n'), ((636, 660), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ([], {}), '()\n', (658, 660), False, 'from sklearn.tree import DecisionTreeClassifier\n')]
|
import json
from datetime import timedelta
from django.conf import settings
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from django.utils import timezone
from apps.core.models import Service, Statistic
class Command(BaseCommand):
help = 'Get statistics of users'
def build_template(self, services):
self.stats = {}
for service in services:
self.stats[service] = {
'account': {
'all': 0,
'email': 0,
'fb': 0,
'tw': 0,
'kaist': 0,
'test': 0,
},
'gender': {
'male': 0,
'female': 0,
'hide': 0,
'etc': 0,
},
'birth_year': {},
'kaist': {
'start_year': {},
'birth_year': {},
'gender': {
'male': 0,
'female': 0,
},
'department': {},
'employee': 0,
'professor': 0,
},
}
def add_basic_stat(self, service, user):
stat = self.stats[service]
stat['account']['all'] += 1
if user.profile.email_authed:
stat['account']['email'] += 1
if user.profile.facebook_id:
stat['account']['fb'] += 1
if user.profile.twitter_id:
stat['account']['tw'] += 1
if user.profile.kaist_id:
stat['account']['kaist'] += 1
if user.profile.test_enabled:
stat['account']['test'] += 1
if user.profile.gender == '*M':
stat['gender']['male'] += 1
elif user.profile.gender == '*F':
stat['gender']['female'] += 1
elif user.profile.gender == '*H':
stat['gender']['hide'] += 1
else:
stat['gender']['etc'] += 1
if user.profile.birthday:
year = user.profile.birthday.year
if year in stat['birth_year']:
stat['birth_year'][year] += 1
else:
stat['birth_year'][year] = 1
def add_kaist_stat(self, service, kaist_info):
kaist_stat = self.stats[service]['kaist']
if 'ku_std_no' in kaist_info and \
len(kaist_info['ku_std_no']) == 8:
start_year = kaist_info['ku_std_no'][:4]
if start_year in kaist_stat['start_year']:
kaist_stat['start_year'][start_year] += 1
else:
kaist_stat['start_year'][start_year] = 1
if 'ku_born_date' in kaist_info:
birth_year = kaist_info['ku_born_date'][:4]
if birth_year in kaist_stat['birth_year']:
kaist_stat['birth_year'][birth_year] += 1
else:
kaist_stat['birth_year'][birth_year] = 1
if 'ku_sex' in kaist_info:
gender = 'male' if kaist_info['ku_sex'] == 'M' else 'female'
kaist_stat['gender'][gender] += 1
if 'ku_kaist_org_id' in kaist_info:
department = kaist_info['ku_kaist_org_id']
if department in kaist_stat['department']:
kaist_stat['department'][department] += 1
else:
kaist_stat['department'][department] = 1
if 'employeeType' in kaist_info:
p_type = kaist_info['employeeType']
if 'E' in p_type:
kaist_stat['employee'] += 1
elif 'P' in p_type:
kaist_stat['professor'] += 1
def handle(self, *args, **options):
# get logs that over two month ago
two_month = (timezone.now() - timedelta(days=60)) \
.replace(hour=0, minute=0, second=0, microsecond=0)
old_stats = Statistic.objects.filter(time__lt=two_month)
with open(settings.STAT_FILE, 'a') as f:
for old_stat in old_stats:
f.write(old_stat.pretty() + '\n')
old_stats.delete()
# collect statistics of all services and all users
services = list(map(lambda x: x.name,
Service.objects.all())) + ['all']
users = User.objects.exclude(profile__expire_time__isnull=False) \
.exclude(profile__test_only=True)
self.build_template(services)
for user in users:
services = list(map(lambda x: x.service.name,
filter(lambda x: not x.unregister_time,
user.services.all()))) + ['all']
for service in services:
self.add_basic_stat(service, user)
if user.profile.kaist_info:
kaist_info = json.loads(user.profile.kaist_info)
self.add_kaist_stat(service, kaist_info)
Statistic(time=timezone.now(), data=json.dumps(self.stats)).save()
|
[
"json.loads",
"django.utils.timezone.now",
"apps.core.models.Statistic.objects.filter",
"json.dumps",
"datetime.timedelta",
"django.contrib.auth.models.User.objects.exclude",
"apps.core.models.Service.objects.all"
] |
[((3938, 3982), 'apps.core.models.Statistic.objects.filter', 'Statistic.objects.filter', ([], {'time__lt': 'two_month'}), '(time__lt=two_month)\n', (3962, 3982), False, 'from apps.core.models import Service, Statistic\n'), ((4332, 4388), 'django.contrib.auth.models.User.objects.exclude', 'User.objects.exclude', ([], {'profile__expire_time__isnull': '(False)'}), '(profile__expire_time__isnull=False)\n', (4352, 4388), False, 'from django.contrib.auth.models import User\n'), ((3815, 3829), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (3827, 3829), False, 'from django.utils import timezone\n'), ((3832, 3850), 'datetime.timedelta', 'timedelta', ([], {'days': '(60)'}), '(days=60)\n', (3841, 3850), False, 'from datetime import timedelta\n'), ((4282, 4303), 'apps.core.models.Service.objects.all', 'Service.objects.all', ([], {}), '()\n', (4301, 4303), False, 'from apps.core.models import Service, Statistic\n'), ((4886, 4921), 'json.loads', 'json.loads', (['user.profile.kaist_info'], {}), '(user.profile.kaist_info)\n', (4896, 4921), False, 'import json\n'), ((5007, 5021), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (5019, 5021), False, 'from django.utils import timezone\n'), ((5028, 5050), 'json.dumps', 'json.dumps', (['self.stats'], {}), '(self.stats)\n', (5038, 5050), False, 'import json\n')]
|
# IMPORTATION STANDARD
import requests
import logging
from typing import Optional
# IMPORTATION THIRD PARTY
# IMPORTATION INTERNAL
import degiro_connector.core.constants.urls as urls
from degiro_connector.quotecast.models.quotecast_pb2 import (
Quotecast,
)
from degiro_connector.core.abstracts.abstract_action import AbstractAction
class ActionSubscribe(AbstractAction):
@staticmethod
def quotecast_request_to_api(request: Quotecast.Request) -> str:
payload = '{"controlData":"'
for vwd_id in request.subscriptions:
for metric_name in request.subscriptions[vwd_id]:
payload += "a_req(" + vwd_id + "." + metric_name + ");"
for vwd_id in request.unsubscriptions:
for metric_name in request.unsubscriptions[vwd_id]:
payload += "a_rel(" + vwd_id + "." + metric_name + ");"
payload += '"}'
return payload
@classmethod
def subscribe(
cls,
request: Quotecast.Request,
session_id: str,
session: requests.Session = None,
logger: logging.Logger = None,
) -> Optional[bool]:
"""Adds/removes metric from the data-stream.
Args:
request (QuotecastAPI.Request):
List of subscriptions & unsubscriptions to do.
Example :
request = Quotecast.Request()
request.subscriptions['360015751'].extend([
'LastPrice',
'LastVolume',
])
request.subscriptions['AAPL.BATS,E'].extend([
'LastPrice',
'LastVolume',
])
request.unsubscriptions['360015751'].extend([
'LastPrice',
'LastVolume',
])
session_id (str):
API's session id.
session (requests.Session, optional):
This object will be generated if None.
Defaults to None.
logger (logging.Logger, optional):
This object will be generated if None.
Defaults to None.
Raises:
BrokenPipeError:
A new "session_id" is required.
Returns:
bool:
Whether or not the subscription succeeded.
"""
if logger is None:
logger = cls.build_logger()
if session is None:
session = cls.build_session()
url = urls.QUOTECAST
url = f"{url}/{session_id}"
data = cls.quotecast_request_to_api(request=request)
logger.info("subscribe:data %s", data[:100])
session_request = requests.Request(method="POST", url=url, data=data)
prepped = session.prepare_request(request=session_request)
response = False
try:
raw_response = session.send(request=prepped, verify=False)
if raw_response.text == '[{"m":"sr"}]':
raise BrokenPipeError('A new "session_id" is required.')
else:
response = True
except Exception as e:
logger.fatal(e)
return None
return response
def call(self, request: Quotecast.Request) -> Optional[bool]:
session_id = self.connection_storage.session_id
session = self.session_storage.session
logger = self.logger
return self.subscribe(
request=request,
session_id=session_id,
session=session,
logger=logger,
)
|
[
"requests.Request"
] |
[((2754, 2805), 'requests.Request', 'requests.Request', ([], {'method': '"""POST"""', 'url': 'url', 'data': 'data'}), "(method='POST', url=url, data=data)\n", (2770, 2805), False, 'import requests\n')]
|
import os
import time
from flask import Blueprint, render_template, redirect, url_for, flash, request, abort
from flask_login import current_user, login_required
from flask_ckeditor import upload_success, upload_fail
from approval_system.extensions import db, archives, student_permission
from approval_system.forms import ApplyForm, MyApplyForm, FileApplyForm, CommentForm, ReApplyForm
from approval_system.models import Apply, Comment, Notice
from approval_system.utils import flash_errors, upload_file, file_path
user = Blueprint('user', __name__)
@user.route('/')
def index():
page = int(request.args.get('page', 1))
per_page = int(request.args.get('per_page', 8))
paginate = Notice.query.order_by(Notice.id.desc()).paginate(page, per_page, error_out=False)
notice = paginate.items
# from approval_system.models import User
# user1 = User(number='201508090009', name='Student1', dept_id=2, role_id=1, phone='10010001000')
# user1.set_password('<PASSWORD>')
# user2 = User(number='201508090079', name='Student2', dept_id=2, role_id=1, phone='10010001000')
# user2.set_password('<PASSWORD>')
# user3 = User(number='20150001', name='Teacher1', dept_id=2, role_id=2, phone='10010001000')
# user3.set_password('<PASSWORD>')
# user4 = User(number='20150002', name='Teacher2', dept_id=2, role_id=2, phone='10010001000')
# user4.set_password('<PASSWORD>')
# user5 = User(number='20150099', name='College1', dept_id=2, role_id=3, phone='10010001000')
# user5.set_password('<PASSWORD>')
# user6 = User(number='00000001', name='School1', dept_id=1, role_id=4, phone='10010001000')
# user6.set_password('<PASSWORD>')
# db.session.add(user1)
# db.session.add(user2)
# db.session.add(user3)
# db.session.add(user4)
# db.session.add(user5)
# db.session.add(user6)
# db.session.commit()
# print('测试数据插入OK')
return render_template('user/index.html', paginate=paginate, notice=notice)
@user.route('/notice/<int:id>/', methods=['GET', 'POST'])
def notice_id(id):
notice = Notice.query.get_or_404(id)
return render_template('user/notice_id.html', notice=notice)
@user.route('/all_apply/', methods=['GET'])
def all_apply():
page = int(request.args.get('page', 1))
per_page = int(request.args.get('per_page', 8))
paginate = Apply.query.order_by(Apply.id.desc()).paginate(page, per_page, error_out=False)
apply = paginate.items
return render_template('user/all_apply.html', paginate=paginate, apply=apply)
@user.route('/my_apply/', methods=['GET', 'POST'])
@login_required
@student_permission.require(http_exception=403)
def my_apply():
my_apply = Apply.query.filter(Apply.u_id == current_user.id).all()
return render_template('user/my_apply.html', my_apply=my_apply)
@user.route('/my_apply/<int:id>/', methods=['GET', 'POST'])
@login_required
@student_permission.require(http_exception=403)
def my_apply_id(id):
apply = Apply.query.get_or_404(id)
comments = Comment.query.filter_by(apply_id=id).all()
form = MyApplyForm()
file_form = FileApplyForm()
comment_form = CommentForm()
if apply.status_id % 2 == 0:
reapply_form = ReApplyForm()
else:
reapply_form = None
if form.submit1.data and form.validate_on_submit():
apply.name = form.name.data
apply.info = form.info.data
db.session.commit()
flash('更改项目信息成功', 'success')
return redirect(url_for('user.my_apply_id', id=id))
if file_form.submit2.data and file_form.validate_on_submit():
last_time = time.strftime('%Y-%m-%d_%H:%M:%S', time.localtime())
upload_file(last_time, id=id)
apply.last_time = last_time
db.session.commit()
flash('上传项目文件成功', 'success')
return redirect(url_for('user.my_apply_id', id=id))
if comment_form.submit3.data and comment_form.validate_on_submit():
body = comment_form.body.data
new_comment = Comment(body=body, author_id=current_user.id, apply_id=id)
db.session.add(new_comment)
db.session.commit()
return redirect(url_for('user.my_apply_id', id=id))
if reapply_form and reapply_form.submit0.data and reapply_form.validate_on_submit():
apply.status_id = 1
apply.t_id = reapply_form.t_id.data
apply.s_id, apply.c_id = None, None
apply.last_time = time.strftime('%Y-%m-%d_%H:%M:%S', time.localtime())
for i in comments:
db.session.delete(i)
db.session.commit()
flash('项目已重新申请', 'success')
return redirect(url_for('user.my_apply'))
flash_errors(file_form)
form.name.data = apply.name
form.info.data = apply.info
files_list = os.listdir(file_path(apply.inner_path))
return render_template('user/my_apply_id.html', form=form, file_form=file_form, comment_form=comment_form,
reapply_form=reapply_form, apply=apply, comments=comments, files_list=files_list)
@user.route('/apply/', methods=['GET', 'POST'])
@login_required
@student_permission.require(http_exception=403)
def apply():
form = ApplyForm()
if form.validate_on_submit():
name = form.name.data
info = form.info.data
t_id = form.t_id.data
last_time = time.strftime('%Y-%m-%d_%H:%M:%S', time.localtime())
apply = Apply(name=name, info=info, status_id=1, u_id=current_user.id, t_id=t_id, last_time=last_time)
db.session.add(apply)
db.session.commit()
apply.inner_path = current_user.number+'/'+str(apply.id)
db.session.commit()
upload_file(last_time, id=apply.id)
flash('提交项目申请成功', 'success')
return redirect(url_for('.my_apply'))
flash_errors(form)
return render_template('user/apply.html', form=form)
@login_required
@user.route('/upload', methods=['POST'])
def upload_image():
f = request.files.get('upload')
inner_path = current_user.number
f.save(file_path(inner_path)+'/'+f.filename)
url = archives.url(inner_path+'/'+f.filename)
return upload_success(url, f.filename)
@login_required
@user.route('/my_apply/open/', methods=['GET'])
def open_file():
inner_path = request.args.get('inner_path')
filename = request.args.get('filename')
file_url = archives.url(inner_path+'/'+filename)
return redirect(file_url)
@login_required
@student_permission.require(http_exception=403)
@user.route('/my_apply/delete/', methods=['GET'])
def delete_file():
inner_path = request.args.get('inner_path')
filename = request.args.get('filename')
file = archives.path(filename, folder=inner_path)
os.remove(file)
return redirect(request.referrer)
@user.route('/delete/comment/<int:comment_id>', methods=['GET'])
@login_required
def delete_comment(comment_id):
comment = Comment.query.get_or_404(comment_id)
if current_user != comment.author:
abort(403)
db.session.delete(comment)
db.session.commit()
return redirect(request.referrer)
|
[
"approval_system.utils.upload_file",
"approval_system.models.Comment.query.filter_by",
"os.remove",
"flask.flash",
"flask.request.files.get",
"approval_system.forms.FileApplyForm",
"approval_system.extensions.student_permission.require",
"approval_system.utils.flash_errors",
"flask_ckeditor.upload_success",
"approval_system.models.Comment",
"approval_system.utils.file_path",
"flask.url_for",
"approval_system.models.Apply.query.get_or_404",
"approval_system.models.Notice.query.get_or_404",
"approval_system.models.Comment.query.get_or_404",
"approval_system.models.Apply",
"flask.request.args.get",
"flask.redirect",
"flask.abort",
"approval_system.extensions.db.session.commit",
"approval_system.forms.ReApplyForm",
"approval_system.extensions.db.session.add",
"approval_system.models.Notice.id.desc",
"approval_system.models.Apply.id.desc",
"flask.render_template",
"time.localtime",
"flask.Blueprint",
"approval_system.extensions.db.session.delete",
"approval_system.forms.CommentForm",
"approval_system.extensions.archives.url",
"approval_system.models.Apply.query.filter",
"approval_system.extensions.archives.path",
"approval_system.forms.MyApplyForm",
"approval_system.forms.ApplyForm"
] |
[((526, 553), 'flask.Blueprint', 'Blueprint', (['"""user"""', '__name__'], {}), "('user', __name__)\n", (535, 553), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((2598, 2644), 'approval_system.extensions.student_permission.require', 'student_permission.require', ([], {'http_exception': '(403)'}), '(http_exception=403)\n', (2624, 2644), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((2879, 2925), 'approval_system.extensions.student_permission.require', 'student_permission.require', ([], {'http_exception': '(403)'}), '(http_exception=403)\n', (2905, 2925), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((5042, 5088), 'approval_system.extensions.student_permission.require', 'student_permission.require', ([], {'http_exception': '(403)'}), '(http_exception=403)\n', (5068, 5088), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((6362, 6408), 'approval_system.extensions.student_permission.require', 'student_permission.require', ([], {'http_exception': '(403)'}), '(http_exception=403)\n', (6388, 6408), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((1911, 1979), 'flask.render_template', 'render_template', (['"""user/index.html"""'], {'paginate': 'paginate', 'notice': 'notice'}), "('user/index.html', paginate=paginate, notice=notice)\n", (1926, 1979), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((2072, 2099), 'approval_system.models.Notice.query.get_or_404', 'Notice.query.get_or_404', (['id'], {}), '(id)\n', (2095, 2099), False, 'from approval_system.models import Apply, Comment, Notice\n'), ((2111, 2164), 'flask.render_template', 'render_template', (['"""user/notice_id.html"""'], {'notice': 'notice'}), "('user/notice_id.html', notice=notice)\n", (2126, 2164), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((2457, 2527), 'flask.render_template', 'render_template', (['"""user/all_apply.html"""'], {'paginate': 'paginate', 'apply': 'apply'}), "('user/all_apply.html', paginate=paginate, apply=apply)\n", (2472, 2527), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((2743, 2799), 'flask.render_template', 'render_template', (['"""user/my_apply.html"""'], {'my_apply': 'my_apply'}), "('user/my_apply.html', my_apply=my_apply)\n", (2758, 2799), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((2959, 2985), 'approval_system.models.Apply.query.get_or_404', 'Apply.query.get_or_404', (['id'], {}), '(id)\n', (2981, 2985), False, 'from approval_system.models import Apply, Comment, Notice\n'), ((3055, 3068), 'approval_system.forms.MyApplyForm', 'MyApplyForm', ([], {}), '()\n', (3066, 3068), False, 'from approval_system.forms import ApplyForm, MyApplyForm, FileApplyForm, CommentForm, ReApplyForm\n'), ((3085, 3100), 'approval_system.forms.FileApplyForm', 'FileApplyForm', ([], {}), '()\n', (3098, 3100), False, 'from approval_system.forms import ApplyForm, MyApplyForm, FileApplyForm, CommentForm, ReApplyForm\n'), ((3120, 3133), 'approval_system.forms.CommentForm', 'CommentForm', ([], {}), '()\n', (3131, 3133), False, 'from approval_system.forms import ApplyForm, MyApplyForm, FileApplyForm, CommentForm, ReApplyForm\n'), ((4610, 4633), 'approval_system.utils.flash_errors', 'flash_errors', (['file_form'], {}), '(file_form)\n', (4622, 4633), False, 'from approval_system.utils import flash_errors, upload_file, file_path\n'), ((4766, 4955), 'flask.render_template', 'render_template', (['"""user/my_apply_id.html"""'], {'form': 'form', 'file_form': 'file_form', 'comment_form': 'comment_form', 'reapply_form': 'reapply_form', 'apply': 'apply', 'comments': 'comments', 'files_list': 'files_list'}), "('user/my_apply_id.html', form=form, file_form=file_form,\n comment_form=comment_form, reapply_form=reapply_form, apply=apply,\n comments=comments, files_list=files_list)\n", (4781, 4955), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((5113, 5124), 'approval_system.forms.ApplyForm', 'ApplyForm', ([], {}), '()\n', (5122, 5124), False, 'from approval_system.forms import ApplyForm, MyApplyForm, FileApplyForm, CommentForm, ReApplyForm\n'), ((5715, 5733), 'approval_system.utils.flash_errors', 'flash_errors', (['form'], {}), '(form)\n', (5727, 5733), False, 'from approval_system.utils import flash_errors, upload_file, file_path\n'), ((5745, 5790), 'flask.render_template', 'render_template', (['"""user/apply.html"""'], {'form': 'form'}), "('user/apply.html', form=form)\n", (5760, 5790), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((5878, 5905), 'flask.request.files.get', 'request.files.get', (['"""upload"""'], {}), "('upload')\n", (5895, 5905), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((6002, 6045), 'approval_system.extensions.archives.url', 'archives.url', (["(inner_path + '/' + f.filename)"], {}), "(inner_path + '/' + f.filename)\n", (6014, 6045), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((6053, 6084), 'flask_ckeditor.upload_success', 'upload_success', (['url', 'f.filename'], {}), '(url, f.filename)\n', (6067, 6084), False, 'from flask_ckeditor import upload_success, upload_fail\n'), ((6185, 6215), 'flask.request.args.get', 'request.args.get', (['"""inner_path"""'], {}), "('inner_path')\n", (6201, 6215), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((6231, 6259), 'flask.request.args.get', 'request.args.get', (['"""filename"""'], {}), "('filename')\n", (6247, 6259), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((6275, 6316), 'approval_system.extensions.archives.url', 'archives.url', (["(inner_path + '/' + filename)"], {}), "(inner_path + '/' + filename)\n", (6287, 6316), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((6324, 6342), 'flask.redirect', 'redirect', (['file_url'], {}), '(file_url)\n', (6332, 6342), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((6495, 6525), 'flask.request.args.get', 'request.args.get', (['"""inner_path"""'], {}), "('inner_path')\n", (6511, 6525), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((6541, 6569), 'flask.request.args.get', 'request.args.get', (['"""filename"""'], {}), "('filename')\n", (6557, 6569), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((6581, 6623), 'approval_system.extensions.archives.path', 'archives.path', (['filename'], {'folder': 'inner_path'}), '(filename, folder=inner_path)\n', (6594, 6623), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((6628, 6643), 'os.remove', 'os.remove', (['file'], {}), '(file)\n', (6637, 6643), False, 'import os\n'), ((6655, 6681), 'flask.redirect', 'redirect', (['request.referrer'], {}), '(request.referrer)\n', (6663, 6681), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((6811, 6847), 'approval_system.models.Comment.query.get_or_404', 'Comment.query.get_or_404', (['comment_id'], {}), '(comment_id)\n', (6835, 6847), False, 'from approval_system.models import Apply, Comment, Notice\n'), ((6910, 6936), 'approval_system.extensions.db.session.delete', 'db.session.delete', (['comment'], {}), '(comment)\n', (6927, 6936), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((6941, 6960), 'approval_system.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (6958, 6960), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((6972, 6998), 'flask.redirect', 'redirect', (['request.referrer'], {}), '(request.referrer)\n', (6980, 6998), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((601, 628), 'flask.request.args.get', 'request.args.get', (['"""page"""', '(1)'], {}), "('page', 1)\n", (617, 628), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((649, 680), 'flask.request.args.get', 'request.args.get', (['"""per_page"""', '(8)'], {}), "('per_page', 8)\n", (665, 680), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((2243, 2270), 'flask.request.args.get', 'request.args.get', (['"""page"""', '(1)'], {}), "('page', 1)\n", (2259, 2270), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((2291, 2322), 'flask.request.args.get', 'request.args.get', (['"""per_page"""', '(8)'], {}), "('per_page', 8)\n", (2307, 2322), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((3190, 3203), 'approval_system.forms.ReApplyForm', 'ReApplyForm', ([], {}), '()\n', (3201, 3203), False, 'from approval_system.forms import ApplyForm, MyApplyForm, FileApplyForm, CommentForm, ReApplyForm\n'), ((3378, 3397), 'approval_system.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3395, 3397), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((3406, 3434), 'flask.flash', 'flash', (['"""更改项目信息成功"""', '"""success"""'], {}), "('更改项目信息成功', 'success')\n", (3411, 3434), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((3642, 3671), 'approval_system.utils.upload_file', 'upload_file', (['last_time'], {'id': 'id'}), '(last_time, id=id)\n', (3653, 3671), False, 'from approval_system.utils import flash_errors, upload_file, file_path\n'), ((3716, 3735), 'approval_system.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3733, 3735), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((3744, 3772), 'flask.flash', 'flash', (['"""上传项目文件成功"""', '"""success"""'], {}), "('上传项目文件成功', 'success')\n", (3749, 3772), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((3965, 4023), 'approval_system.models.Comment', 'Comment', ([], {'body': 'body', 'author_id': 'current_user.id', 'apply_id': 'id'}), '(body=body, author_id=current_user.id, apply_id=id)\n', (3972, 4023), False, 'from approval_system.models import Apply, Comment, Notice\n'), ((4032, 4059), 'approval_system.extensions.db.session.add', 'db.session.add', (['new_comment'], {}), '(new_comment)\n', (4046, 4059), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((4068, 4087), 'approval_system.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4085, 4087), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((4500, 4519), 'approval_system.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4517, 4519), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((4528, 4555), 'flask.flash', 'flash', (['"""项目已重新申请"""', '"""success"""'], {}), "('项目已重新申请', 'success')\n", (4533, 4555), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((4726, 4753), 'approval_system.utils.file_path', 'file_path', (['apply.inner_path'], {}), '(apply.inner_path)\n', (4735, 4753), False, 'from approval_system.utils import flash_errors, upload_file, file_path\n'), ((5338, 5436), 'approval_system.models.Apply', 'Apply', ([], {'name': 'name', 'info': 'info', 'status_id': '(1)', 'u_id': 'current_user.id', 't_id': 't_id', 'last_time': 'last_time'}), '(name=name, info=info, status_id=1, u_id=current_user.id, t_id=t_id,\n last_time=last_time)\n', (5343, 5436), False, 'from approval_system.models import Apply, Comment, Notice\n'), ((5441, 5462), 'approval_system.extensions.db.session.add', 'db.session.add', (['apply'], {}), '(apply)\n', (5455, 5462), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((5471, 5490), 'approval_system.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (5488, 5490), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((5564, 5583), 'approval_system.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (5581, 5583), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((5592, 5627), 'approval_system.utils.upload_file', 'upload_file', (['last_time'], {'id': 'apply.id'}), '(last_time, id=apply.id)\n', (5603, 5627), False, 'from approval_system.utils import flash_errors, upload_file, file_path\n'), ((5636, 5664), 'flask.flash', 'flash', (['"""提交项目申请成功"""', '"""success"""'], {}), "('提交项目申请成功', 'success')\n", (5641, 5664), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((6895, 6905), 'flask.abort', 'abort', (['(403)'], {}), '(403)\n', (6900, 6905), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((2676, 2725), 'approval_system.models.Apply.query.filter', 'Apply.query.filter', (['(Apply.u_id == current_user.id)'], {}), '(Apply.u_id == current_user.id)\n', (2694, 2725), False, 'from approval_system.models import Apply, Comment, Notice\n'), ((3001, 3037), 'approval_system.models.Comment.query.filter_by', 'Comment.query.filter_by', ([], {'apply_id': 'id'}), '(apply_id=id)\n', (3024, 3037), False, 'from approval_system.models import Apply, Comment, Notice\n'), ((3459, 3493), 'flask.url_for', 'url_for', (['"""user.my_apply_id"""'], {'id': 'id'}), "('user.my_apply_id', id=id)\n", (3466, 3493), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((3616, 3632), 'time.localtime', 'time.localtime', ([], {}), '()\n', (3630, 3632), False, 'import time\n'), ((3797, 3831), 'flask.url_for', 'url_for', (['"""user.my_apply_id"""'], {'id': 'id'}), "('user.my_apply_id', id=id)\n", (3804, 3831), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((4112, 4146), 'flask.url_for', 'url_for', (['"""user.my_apply_id"""'], {'id': 'id'}), "('user.my_apply_id', id=id)\n", (4119, 4146), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((4414, 4430), 'time.localtime', 'time.localtime', ([], {}), '()\n', (4428, 4430), False, 'import time\n'), ((4471, 4491), 'approval_system.extensions.db.session.delete', 'db.session.delete', (['i'], {}), '(i)\n', (4488, 4491), False, 'from approval_system.extensions import db, archives, student_permission\n'), ((4580, 4604), 'flask.url_for', 'url_for', (['"""user.my_apply"""'], {}), "('user.my_apply')\n", (4587, 4604), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((5304, 5320), 'time.localtime', 'time.localtime', ([], {}), '()\n', (5318, 5320), False, 'import time\n'), ((5689, 5709), 'flask.url_for', 'url_for', (['""".my_apply"""'], {}), "('.my_apply')\n", (5696, 5709), False, 'from flask import Blueprint, render_template, redirect, url_for, flash, request, abort\n'), ((719, 735), 'approval_system.models.Notice.id.desc', 'Notice.id.desc', ([], {}), '()\n', (733, 735), False, 'from approval_system.models import Apply, Comment, Notice\n'), ((2360, 2375), 'approval_system.models.Apply.id.desc', 'Apply.id.desc', ([], {}), '()\n', (2373, 2375), False, 'from approval_system.models import Apply, Comment, Notice\n'), ((5954, 5975), 'approval_system.utils.file_path', 'file_path', (['inner_path'], {}), '(inner_path)\n', (5963, 5975), False, 'from approval_system.utils import flash_errors, upload_file, file_path\n')]
|
from math import sqrt
def total(arr):
sum=0
for i in range(2,len(arr)):
if isprime(i):
sum+=arr[i]
return sum
def isprime(num):
for i in range(2,int(sqrt(num))+1):
if num%i==0:
return False
return True
|
[
"math.sqrt"
] |
[((185, 194), 'math.sqrt', 'sqrt', (['num'], {}), '(num)\n', (189, 194), False, 'from math import sqrt\n')]
|
import os
from collections.abc import MutableMapping as DictMixin
import re
from unicodedata import normalize
from urllib.parse import unquote as _urlunquote
from functools import partial
from ..common_helpers import touni, HeaderDict, HeaderProperty, cached_property
from .. import errors
urlunquote = partial(_urlunquote, encoding='latin1')
# ------------------[ cachable property with a custom storage ] -------------------
def cache_in(attr, key=None, read_only=False):
# attr = 'environ[ PATH_INFO ]'
re_attr_key = re.compile(r'^(.+?)\[\s*([^\[\]]+?)\s*\]$')
if not key:
attr_key = re_attr_key.match(attr)
if attr_key:
attr, key = attr_key.groups()
def wrapper(getter):
if not key:
def fget(self):
try:
return getattr(self, attr)
except AttributeError:
try:
v = getter(self)
except AttributeError as err:
raise errors.PropertyGetterError(
f'AttributeError in getter of cache_in property `{getter.__name__}`, '
f'storage={attr}: {str(err)}'
)
setattr(self, attr, v)
return getattr(self, attr)
def fset(self, value):
if read_only:
raise AttributeError("Read-Only property.")
setattr(self, attr, value)
def fdel(self):
if read_only:
raise AttributeError("Read-Only property.")
delattr(self, attr)
else:
def fget(self):
storage = getattr(self, attr)
if key not in storage:
try:
storage[key] = getter(self)
except AttributeError as err:
raise errors.PropertyGetterError(
f'AttributeError in getter of cache_in property `{getter.__name__}`, '
f'storage={attr}[{key}]: {str(err)}'
)
return storage[key]
def fset(self, value):
if read_only:
raise AttributeError("Read-Only property.")
getattr(self, attr)[key] = value
def fdel(self):
if read_only:
raise AttributeError("Read-Only property.")
del getattr(self, attr)[key]
return property(fget, fset, fdel, 'cache_in')
return wrapper
def parse_qsl(qs, *, append: callable = None, setitem: callable = None):
container = None
if setitem:
_seen = dict()
_lists = dict()
def add(k, v):
vlist = _lists.get(k)
if vlist:
vlist.append(v)
elif k in _seen:
tmp = _lists[k] = [_seen[k], v]
setitem(k, tmp)
else:
setitem(k, _seen.setdefault(k, v))
elif append:
add = lambda k, v: append((k, v))
else:
container = []
_append = container.append
add = lambda k, v: _append((k, v))
L = len(qs)
i = 0
while i < L:
key = None
idx = 0; c = None
for idx, c in enumerate(qs[i:]):
if c == '=' or c == '&':
break
else:
idx += 1
j = i + idx
key = qs[i:j]
i = j + 1 # skip '=' or '&'
if not key:
continue
key = urlunquote(key.replace('+', ' '))
if c == '&':
value = ''
else:
idx = 0; c = None
for idx, c in enumerate(qs[i:]):
if c == '&':
break
else:
idx += 1
j = i + idx
value = urlunquote(qs[i:j].replace('+', ' '))
i = j + 1 # skip '&'
add(key, value)
return container
class FormsDict(dict):
''' This :class:`dict` subclass is used to store request form data.
Additionally to the normal dict-like item access methods (which return
unmodified data as native strings), this container also supports
attribute-like access to its values. Attributes are automatically de-
or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing
attributes default to an empty string. '''
#: Encoding used for attribute values.
input_encoding = 'utf8'
#: If true (default), unicode strings are first encoded with `latin1`
#: and then decoded to match :attr:`input_encoding`.
recode_unicode = True
def _fix(self, s, encoding=None):
if isinstance(s, str) and self.recode_unicode: # Python 3 WSGI
return s.encode('latin1').decode(encoding or self.input_encoding)
else:
return s
def decode(self, encoding=None):
''' Returns a copy with all keys and values de- or recoded to match
:attr:`input_encoding`. Some libraries (e.g. WTForms) want a
unicode dictionary. '''
copy = FormsDict()
enc = copy.input_encoding = encoding or self.input_encoding
copy.recode_unicode = False
for key, value in self.items():
copy[self._fix(key, enc)] = self._fix(value, enc)
return copy
def getunicode(self, name, default=None, encoding=None):
''' Return the value as a unicode string, or the default. '''
try:
return self._fix(self[name], encoding)
except (UnicodeError, KeyError):
return default
def __getattr__(self, name, default=str()):
# Without this guard, pickle generates a cryptic TypeError:
if name.startswith('__') and name.endswith('__'):
return super().__getattr__(name)
return self.getunicode(name, default=default)
class WSGIHeaderDict(DictMixin):
''' This dict-like class wraps a WSGI environ dict and provides convenient
access to HTTP_* fields. Keys and values are native strings
(2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI
environment contains non-native string values, these are de- or encoded
using a lossless 'latin1' character set.
'''
#: List of keys that do not have a ``HTTP_`` prefix.
cgikeys = {'CONTENT_TYPE', 'CONTENT_LENGTH'}
__slots__ = ('environ',)
def __init__(self, environ):
self.environ = environ
def _ekey(self, key):
''' Translate header field name to CGI/WSGI environ key. '''
key = key.replace('-', '_').upper()
if key in self.cgikeys:
return key
return 'HTTP_' + key
def raw(self, key, default=None):
''' Return the header value as is (may be bytes or unicode). '''
return self.environ.get(self._ekey(key), default)
def __getitem__(self, key):
return touni(self.environ[self._ekey(key)], 'latin1')
def __setitem__(self, key, value):
raise TypeError("%s is read-only." % self.__class__)
def __delitem__(self, key):
raise TypeError("%s is read-only." % self.__class__)
def __iter__(self):
for key in self.environ:
if key.startswith('HTTP_'):
yield key[5:].replace('_', '-').title()
elif key in self.cgikeys:
yield key.replace('_', '-').title()
def keys(self): return [x for x in self]
def __len__(self): return len(self.keys())
def __contains__(self, key): return self._ekey(key) in self.environ
class FileUpload:
__slots__ = ('file', 'name', 'raw_filename', 'headers', '__dict__')
content_type = HeaderProperty('Content-Type')
content_length = HeaderProperty('Content-Length', reader=int, default=-1)
def __init__(self, fileobj, name, filename, headers=None):
''' Wrapper for file uploads. '''
#: Open file(-like) object (BytesIO buffer or temporary file)
self.file = fileobj
#: Name of the upload form field
self.name = name
#: Raw filename as sent by the client (may contain unsafe characters)
self.raw_filename = filename
#: A :class:`HeaderDict` with additional headers (e.g. content-type)
self.headers = HeaderDict(headers) if headers else HeaderDict()
def get_header(self, name, default=None):
""" Return the value of a header within the multipart part. """
return self.headers.get(name, default)
@cached_property
def filename(self):
''' Name of the file on the client file system, but normalized to ensure
file system compatibility. An empty filename is returned as 'empty'.
Only ASCII letters, digits, dashes, underscores and dots are
allowed in the final filename. Accents are removed, if possible.
Whitespace is replaced by a single dash. Leading or tailing dots
or dashes are removed. The filename is limited to 255 characters.
'''
fname = self.raw_filename
if not isinstance(fname, str):
fname = fname.decode('utf8', 'ignore')
fname = normalize('NFKD', fname).encode('ASCII', 'ignore').decode('ASCII')
fname = os.path.basename(fname.replace('\\', os.path.sep))
fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip()
fname = re.sub(r'[-\s]+', '-', fname).strip('.-')
return fname[:255] or 'empty'
def _copy_file(self, fp, chunk_size=2**16):
read, write, offset = self.file.read, fp.write, self.file.tell()
while True:
buf = read(chunk_size)
if not buf:
break
write(buf)
self.file.seek(offset)
def save(self, destination, overwrite=False, chunk_size=2**16):
''' Save file to disk or copy its content to an open file(-like) object.
If *destination* is a directory, :attr:`filename` is added to the
path. Existing files are not overwritten by default (IOError).
:param destination: File path, directory or file(-like) object.
:param overwrite: If True, replace existing files. (default: False)
:param chunk_size: Bytes to read at a time. (default: 64kb)
'''
if isinstance(destination, str): # Except file-likes here
if os.path.isdir(destination):
destination = os.path.join(destination, self.filename)
if not overwrite and os.path.exists(destination):
raise IOError('File exists.')
with open(destination, 'wb') as fp:
self._copy_file(fp, chunk_size)
else:
self._copy_file(destination, chunk_size)
|
[
"functools.partial",
"unicodedata.normalize",
"os.path.join",
"os.path.isdir",
"os.path.exists",
"re.sub",
"re.compile"
] |
[((306, 345), 'functools.partial', 'partial', (['_urlunquote'], {'encoding': '"""latin1"""'}), "(_urlunquote, encoding='latin1')\n", (313, 345), False, 'from functools import partial\n'), ((533, 581), 're.compile', 're.compile', (['"""^(.+?)\\\\[\\\\s*([^\\\\[\\\\]]+?)\\\\s*\\\\]$"""'], {}), "('^(.+?)\\\\[\\\\s*([^\\\\[\\\\]]+?)\\\\s*\\\\]$')\n", (543, 581), False, 'import re\n'), ((10420, 10446), 'os.path.isdir', 'os.path.isdir', (['destination'], {}), '(destination)\n', (10433, 10446), False, 'import os\n'), ((9373, 9412), 're.sub', 're.sub', (['"""[^a-zA-Z0-9-_.\\\\s]"""', '""""""', 'fname'], {}), "('[^a-zA-Z0-9-_.\\\\s]', '', fname)\n", (9379, 9412), False, 'import re\n'), ((9437, 9466), 're.sub', 're.sub', (['"""[-\\\\s]+"""', '"""-"""', 'fname'], {}), "('[-\\\\s]+', '-', fname)\n", (9443, 9466), False, 'import re\n'), ((10478, 10518), 'os.path.join', 'os.path.join', (['destination', 'self.filename'], {}), '(destination, self.filename)\n', (10490, 10518), False, 'import os\n'), ((10552, 10579), 'os.path.exists', 'os.path.exists', (['destination'], {}), '(destination)\n', (10566, 10579), False, 'import os\n'), ((9223, 9247), 'unicodedata.normalize', 'normalize', (['"""NFKD"""', 'fname'], {}), "('NFKD', fname)\n", (9232, 9247), False, 'from unicodedata import normalize\n')]
|
#!/usr/bin/env python3
import csv
import numpy as np
import matplotlib.pyplot as plt
import accuBar as accuBar
import groupBar2 as groupBar2
def getLatecnyFromCSV(a):
print(a)
with open(a, 'r') as f:
reader = csv.reader(f)
#reader = [each for each in csv.DictReader(f, delimiter=',')]
result = list(reader)
rows=len(result)
print('rows=',rows)
firstRow = result[0]
#print(firstRow)
index=0
#define what may attract our interest
idxCpu=0
idxName=0
idxLatency=0
xt=[]
yt=[]
for i in firstRow:
#print(i)
if(i=='cpu'):
idxCpu=index
if(i=='name'):
idxName=index
if(i=='latency'):
idxLatency=index
index=index+1
#read the valid stages
vdataEntries=0
latencyList=[]
R1=0
R2=0
for k in range(1,rows):
if(result[k][idxName]=='load+reamp '):
R1=(int(result[k][idxLatency]))
if(result[k][idxName]=='write'):
R2=(int(result[k][idxLatency]))
return R1,R2
def main():
fileNames=[
'lz4_pipe2_LL',
'lz4_pipe2_LB',
'lz4_pipe2_BL',
'lz4_pipe2_BB',
]
paraTail='_pe.csv'
seqTail='_se.csv'
r1=[]
r2=[]
rp1=[]
rp2=[]
for i in range(len(fileNames)):
R1,R2=getLatecnyFromCSV(fileNames[i]+paraTail)
R3,R4=getLatecnyFromCSV(fileNames[i]+seqTail)
d1=R1-R3
d2=R2-R4
r1.append(d1)
r2.append(d2)
rp1.append(d1)
rp2.append(d2)
print(R1-R3)
print(R2-R4)
print([rp1,rp2])
print(rp2[1])
accuBar.DrawFigure(['LL','LB','BL','BB'],([r1,r2]),['load+remap','write'],'','additional cycles','lz4_pipe2_2stage_increased_latency',True,'')
accuBar.DrawPercentageFigure(['LL','LB','BL','BB'],accuBar.normalize([r1,r2]),['load+remap','write'],'','ac percentage','lz4_pipe2_2stage_increased_latency_per',True,'')
# groupBar2.DrawFigure(['LL','LB','BL','BB'],[rp1,rp2],['load+remap','write'],0,0,'x','y','hahaha',1)
if __name__ == "__main__":
main()
|
[
"accuBar.normalize",
"accuBar.DrawFigure",
"csv.reader"
] |
[((1773, 1933), 'accuBar.DrawFigure', 'accuBar.DrawFigure', (["['LL', 'LB', 'BL', 'BB']", '[r1, r2]', "['load+remap', 'write']", '""""""', '"""additional cycles"""', '"""lz4_pipe2_2stage_increased_latency"""', '(True)', '""""""'], {}), "(['LL', 'LB', 'BL', 'BB'], [r1, r2], ['load+remap',\n 'write'], '', 'additional cycles', 'lz4_pipe2_2stage_increased_latency',\n True, '')\n", (1791, 1933), True, 'import accuBar as accuBar\n'), ((225, 238), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (235, 238), False, 'import csv\n'), ((1971, 1998), 'accuBar.normalize', 'accuBar.normalize', (['[r1, r2]'], {}), '([r1, r2])\n', (1988, 1998), True, 'import accuBar as accuBar\n')]
|
# Copyright (c) 2018 <NAME>
# This code is available under the "Apache License 2.0"
# Please see the file COPYING in this distribution for license terms.
import tensorflow as tf
import os
import json
from layer import layer
from optimizer import optimizer
from dataset import train_dataset
def read_json(name):
with open('/models/{}/model.json'.format(name)) as f:
return json.loads(f.read())
def load_weights(name, model):
path = '/models/{}/weights.h5'.format(name)
if os.path.exists(path):
images, _ = next(iter(train_dataset().batch(10)))
model(images)
model.load_weights(path)
def create_model(model_json, train):
model = tf.keras.Sequential()
for l in model_json['layers']:
model.add(layer(l, train))
load_weights(model_json['name'], model)
return model, optimizer(model_json)
|
[
"layer.layer",
"optimizer.optimizer",
"dataset.train_dataset",
"os.path.exists",
"tensorflow.keras.Sequential"
] |
[((497, 517), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (511, 517), False, 'import os\n'), ((683, 704), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (702, 704), True, 'import tensorflow as tf\n'), ((837, 858), 'optimizer.optimizer', 'optimizer', (['model_json'], {}), '(model_json)\n', (846, 858), False, 'from optimizer import optimizer\n'), ((758, 773), 'layer.layer', 'layer', (['l', 'train'], {}), '(l, train)\n', (763, 773), False, 'from layer import layer\n'), ((549, 564), 'dataset.train_dataset', 'train_dataset', ([], {}), '()\n', (562, 564), False, 'from dataset import train_dataset\n')]
|
#!/usr/bin/env python2
# Source:
# https://github.com/zcutlip/bowcaster/blob/master/src/bowcaster/encoders/mips.py
#
# Copyright (c) 2013 <NAME> <<EMAIL>>,
# 2013 Tactical Network Solutions, LLC
#
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
from __future__ import division
from pwnlib import asm
from pwnlib import shellcraft
from pwnlib.context import context
from pwnlib.encoders.encoder import Encoder
from pwnlib.util.fiddling import xor_key
decoders = {
'little': ''.join([
"SIZ2SIZ1\x0e\x24", # li t6,-5
"\x27\x70\xc0\x01", # nor t6,t6,zero
"\xa3\xff\x0b\x24", # li t3,-93
"\x26\x40\xce\x01", # xor t0,t6,t6
"\xff\xff\x08\x21", # addi t0,t0,-1
"\xff\xff\x10\x05", # bltzal t0,14 <next>
"\x82\x82\x08\x28", # slti t0,zero,-32126
"\xe2\xff\xfd\x23", # addi sp,ra,-30
"\x27\x58\x60\x01", # nor t3,t3,zero
"\x21\xc8\xeb\x03", # addu t9,ra,t3
"\x82\x82\x17\x28", # slti s7,zero,-32126
"\xfc\xff\x31\x8f", # lw s1,-4(t9)
"\xfb\xff\x0c\x24", # li t4,-5
"\x27\x60\x80\x01", # nor t4,t4,zero
"\xfd\xff\x8f\x21", # addi t7,t4,-3
"\xfc\xff\x28\x8f", # lw t0,-4(t9)
"\x21\xb8\xef\x02", # addu s7,s7,t7
"\x26\x18\x11\x01", # xor v1,t0,s1
"\x2b\xf0\xee\x02", # sltu s8,s7,t6
"\xfc\xff\x23\xaf", # sw v1,-4(t9)
"\xfa\xff\x1e\x14", # bne zero,s8,3c <loop>
"\x21\xc8\x2c\x03", # addu t9,t9,t4
"\xfd\xff\x86\x21", # addi a2,t4,-3
"\xf8\xff\xa6\xaf", # sw a2,-8(sp)
"\x26\x28\xce\x01", # xor a1,t6,t6
"\xfc\xff\xa5\xaf", # sw a1,-4(sp)
"\xf8\xff\xa4\x27", # addiu a0,sp,-8
"\x46\x10\x02\x24", # li v0,4166
"\x0c\x54\x4a\x01" # syscall 0x52950
]),
'big': ''.join([
"\x24\x0eSIZ1SIZ2", # li t6,-5
"\x01\xc0\x70\x27", # nor t6,t6,zero
"\x24\x0b\xff\xa3", # li t3,-93
"\x01\xce\x40\x26", # xor t0,t6,t6
"\x21\x08\xff\xff", # addi t0,t0,-1
"\x05\x10\xff\xff", # bltzal t0,14 <next>
"\x28\x08\x82\x82", # slti t0,zero,-32126
"\x23\xfd\xff\xe2", # addi sp,ra,-30
"\x01\x60\x58\x27", # nor t3,t3,zero
"\x03\xeb\xc8\x21", # addu t9,ra,t3
"\x28\x17\x82\x82", # slti s7,zero,-32126
"\x8f\x31\xff\xfc", # lw s1,-4(t9)
"\x24\x0c\xff\xfb", # li t4,-5
"\x01\x80\x60\x27", # nor t4,t4,zero
"\x21\x8f\xff\xfd", # addi t7,t4,-3
"\x8f\x28\xff\xfc", # lw t0,-4(t9)
"\x02\xef\xb8\x21", # addu s7,s7,t7
"\x01\x11\x18\x26", # xor v1,t0,s1
"\x02\xee\xf0\x2b", # sltu s8,s7,t6
"\xaf\x23\xff\xfc", # sw v1,-4(t9)
"\x14\x1e\xff\xfa", # bne zero,s8,3c <loop>
"\x03\x2c\xc8\x21", # addu t9,t9,t4
"\x21\x86\xff\xfd", # addi a2,t4,-3
"\xaf\xa6\xff\xf8", # sw a2,-8(sp)
"\x01\xce\x28\x26", # xor a1,t6,t6
"\xaf\xa5\xff\xfc", # sw a1,-4(sp)
"\x27\xa4\xff\xf8", # addiu a0,sp,-8
"\x24\x02\x10\x46", # li v0,4166
"\x01\x4a\x54\x0c" # syscall 0x52950
])
}
class MipsXorEncoder(Encoder):
r"""Generates an XOR decoder for MIPS.
>>> context.clear(arch='mips')
>>> shellcode = asm(shellcraft.sh())
>>> avoid = '/bin/sh\x00'
>>> encoded = pwnlib.encoders.mips.xor.encode(shellcode, avoid)
>>> assert not any(c in encoded for c in avoid)
>>> p = run_shellcode(encoded)
>>> p.sendline('echo hello; exit')
>>> p.recvline()
'hello\n'
"""
arch = 'mips'
blacklist = cannot_avoid = set(''.join(v for v in decoders.values()))
def __call__(self, raw_bytes, avoid, pcreg=''):
assert 0 == len(raw_bytes) % context.bytes, "Payload is not aligned"
size = (len(raw_bytes) // 4) + 1
assert size < 0x10000, "Payload is too long"
size = size ^ 0xffff
sizelo = size & 0xff
sizehi = size >> 8
decoder = str(decoders[context.endian])
decoder = decoder.replace('SIZ1', chr(sizehi))
decoder = decoder.replace('SIZ2', chr(sizelo))
key, data = xor_key(raw_bytes, avoid=avoid)
return decoder + key + data
encode = MipsXorEncoder()
|
[
"pwnlib.util.fiddling.xor_key"
] |
[((5192, 5223), 'pwnlib.util.fiddling.xor_key', 'xor_key', (['raw_bytes'], {'avoid': 'avoid'}), '(raw_bytes, avoid=avoid)\n', (5199, 5223), False, 'from pwnlib.util.fiddling import xor_key\n')]
|
import time, requests, os
from bs4 import BeautifulSoup
from storages import Storage
from .base_archiver import Archiver, ArchiveResult
class WaybackArchiver(Archiver):
name = "wayback"
def __init__(self, storage: Storage):
super(WaybackArchiver, self).__init__(storage)
self.seen_urls = {}
def download(self, url, check_if_exists=False):
if check_if_exists and url in self.seen_urls:
return self.seen_urls[url]
ia_headers = {
"Accept": "application/json",
"Authorization": "LOW " + os.getenv('INTERNET_ARCHIVE_S3_KEY') + ":" + os.getenv('INTERNET_ARCHIVE_S3_SECRET')
}
r = requests.post(
'https://web.archive.org/save/', headers=ia_headers, data={'url': url})
if r.status_code != 200:
return ArchiveResult(status="Internet archive failed")
if 'job_id' not in r.json() and 'message' in r.json():
return ArchiveResult(status=f"Internet archive failed: {r.json()['message']}")
job_id = r.json()['job_id']
status_r = requests.get('https://web.archive.org/save/status/' + job_id, headers=ia_headers)
retries = 0
# wait 90-120 seconds for the archive job to finish
while (status_r.status_code != 200 or status_r.json()['status'] == 'pending') and retries < 30:
time.sleep(3)
try:
status_r = requests.get(
'https://web.archive.org/save/status/' + job_id, headers=ia_headers)
except:
time.sleep(1)
retries += 1
if status_r.status_code != 200:
return ArchiveResult(status="Internet archive failed")
status_json = status_r.json()
if status_json['status'] != 'success':
return ArchiveResult(status='Internet Archive failed: ' + str(status_json))
archive_url = 'https://web.archive.org/web/' + \
status_json['timestamp'] + '/' + status_json['original_url']
try:
r = requests.get(archive_url)
parsed = BeautifulSoup(r.content, 'html.parser')
title = parsed.find_all('title')[0].text
if title == 'Wayback Machine':
title = 'Could not get title'
except:
title = "Could not get title"
result = ArchiveResult(status='Internet Archive fallback', cdn_url=archive_url, title=title)
self.seen_urls[url] = result
return result
|
[
"time.sleep",
"requests.get",
"bs4.BeautifulSoup",
"requests.post",
"os.getenv"
] |
[((677, 767), 'requests.post', 'requests.post', (['"""https://web.archive.org/save/"""'], {'headers': 'ia_headers', 'data': "{'url': url}"}), "('https://web.archive.org/save/', headers=ia_headers, data={\n 'url': url})\n", (690, 767), False, 'import time, requests, os\n'), ((1089, 1175), 'requests.get', 'requests.get', (["('https://web.archive.org/save/status/' + job_id)"], {'headers': 'ia_headers'}), "('https://web.archive.org/save/status/' + job_id, headers=\n ia_headers)\n", (1101, 1175), False, 'import time, requests, os\n'), ((1369, 1382), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1379, 1382), False, 'import time, requests, os\n'), ((2051, 2076), 'requests.get', 'requests.get', (['archive_url'], {}), '(archive_url)\n', (2063, 2076), False, 'import time, requests, os\n'), ((2099, 2138), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.content', '"""html.parser"""'], {}), "(r.content, 'html.parser')\n", (2112, 2138), False, 'from bs4 import BeautifulSoup\n'), ((614, 653), 'os.getenv', 'os.getenv', (['"""INTERNET_ARCHIVE_S3_SECRET"""'], {}), "('INTERNET_ARCHIVE_S3_SECRET')\n", (623, 653), False, 'import time, requests, os\n'), ((1428, 1514), 'requests.get', 'requests.get', (["('https://web.archive.org/save/status/' + job_id)"], {'headers': 'ia_headers'}), "('https://web.archive.org/save/status/' + job_id, headers=\n ia_headers)\n", (1440, 1514), False, 'import time, requests, os\n'), ((1567, 1580), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1577, 1580), False, 'import time, requests, os\n'), ((569, 605), 'os.getenv', 'os.getenv', (['"""INTERNET_ARCHIVE_S3_KEY"""'], {}), "('INTERNET_ARCHIVE_S3_KEY')\n", (578, 605), False, 'import time, requests, os\n')]
|
##
# .protocol.version
##
"""
PQ version class used by startup messages.
"""
from struct import Struct
version_struct = Struct('!HH')
class Version(tuple):
"""
Version((major, minor)) -> Version
Version serializer and parser.
"""
major = property(fget = lambda s: s[0])
minor = property(fget = lambda s: s[1])
def __new__(subtype, major_minor):
(major, minor) = major_minor
major = int(major)
minor = int(minor)
# If it can't be packed like this, it's not a valid version.
try:
version_struct.pack(major, minor)
except Exception as e:
raise ValueError("unpackable major and minor") from e
return tuple.__new__(subtype, (major, minor))
def __int__(self):
return (self[0] << 16) | self[1]
def bytes(self):
return version_struct.pack(self[0], self[1])
def __repr__(self):
return '%d.%d' %(self[0], self[1])
def parse(self, data):
return self(version_struct.unpack(data))
parse = classmethod(parse)
CancelRequestCode = Version((1234, 5678))
NegotiateSSLCode = Version((1234, 5679))
V2_0 = Version((2, 0))
V3_0 = Version((3, 0))
|
[
"struct.Struct"
] |
[((120, 133), 'struct.Struct', 'Struct', (['"""!HH"""'], {}), "('!HH')\n", (126, 133), False, 'from struct import Struct\n')]
|
import _sk_fail; _sk_fail._("pyclbr")
|
[
"_sk_fail._"
] |
[((17, 37), '_sk_fail._', '_sk_fail._', (['"""pyclbr"""'], {}), "('pyclbr')\n", (27, 37), False, 'import _sk_fail\n')]
|
"""
Compare screenshots taken via the webdriver.
"""
# Copyright (c) 2013 contributors; see AUTHORS.
# Licensed under the Apache License, Version 2.0
# https://www.apache.org/licenses/LICENSE-2.0
import math
import operator
try:
# Pillow
from PIL import Image
from PIL import ImageChops
except ImportError: # pragma: no cover
# PIL
try:
import Image # pylint: disable=F0401
import ImageChops # pylint: disable=F0401
except ImportError:
raise ImportError('Could not import Pillow or PIL')
from gossamer import util, exc
def allowance(browser):
"""
Our image diffs below give some false alarms of diffs... The values
below are rmsdiff values for 100%-same ImageChops histograms (i.e.,
ImageChops' bounding box method showed no difference), and for
trivial rendering differences, e.g., a pixel within a button gradient
differing, or rendered text differing very slightly, we add a slight
allowance.
"""
margin = 1.001
margins = {
'default': 572.4334022399462*margin,
'chrome': 572.4334022399462*margin,
'firefox': 957.864291014*margin
}
try:
return margins[browser]
except KeyError:
return margins['default']
def images_identical(path1, path2, margin=None):
"""
Hacky test of images being identical. PIL can show incorrect diffs.
"""
util.log.debug('images_identical: %s, %s', path1, path2)
im1 = Image.open(path1)
im2 = Image.open(path2)
if ImageChops.difference(im1, im2).getbbox() is None:
util.log.debug('images_identical: bounding box ok')
identical = True
else:
rmsdiff = _rmsdiff_2011(im1, im2)
margin = margin or 0
if rmsdiff <= margin:
util.log.debug('images_identical: rmsdiff %s ok' % rmsdiff)
identical = True
else:
util.log.debug('images_identical: rmsdiff %s failed' % rmsdiff)
identical = False
return identical
def image_diff(path1, path2, outpath, diffcolor):
"""
Generate a diff image on a screenshot which has failed
:func:`.images_identical`.
"""
im1 = Image.open(path1)
im2 = Image.open(path2)
rmsdiff = _rmsdiff_2011(im1, im2)
pix1 = im1.load()
pix2 = im2.load()
if im1.mode != im2.mode:
raise exc.TestError(
'Different pixel modes between %r and %r' % \
(path1, path2)
)
if im1.size != im2.size:
raise exc.TestError(
'Different dimensions between %r (%r) and %r (%r)' % \
(path1, im1.size, path2, im2.size)
)
mode = im1.mode
if mode == '1':
value = 255
elif mode == 'L':
value = 255
elif mode == 'RGB':
value = diffcolor
elif mode == 'RGBA':
value = diffcolor + (255,)
elif mode == 'P':
raise NotImplementedError('Need to look up nearest palette color')
else:
raise NotImplementedError('Unexpected PNG mode')
width, height = im1.size
for y in range(height):
for x in range(width):
if pix1[x, y] != pix2[x, y]:
pix2[x, y] = value
im2.save(outpath)
return (rmsdiff, width, height)
def _rmsdiff_2011(im1, im2):
"Calculate the root-mean-square difference between two images"
h = ImageChops.difference(im1, im2).histogram()
rms = math.sqrt(
reduce(
operator.add,
map(lambda h, i: h*(i**2), h, range(len(h))) # pylint: disable=W0110,W0141
) / (float(im1.size[0]) * im1.size[1])
)
return rms
|
[
"gossamer.exc.TestError",
"Image.open",
"ImageChops.difference",
"gossamer.util.log.debug"
] |
[((1394, 1450), 'gossamer.util.log.debug', 'util.log.debug', (['"""images_identical: %s, %s"""', 'path1', 'path2'], {}), "('images_identical: %s, %s', path1, path2)\n", (1408, 1450), False, 'from gossamer import util, exc\n'), ((1462, 1479), 'Image.open', 'Image.open', (['path1'], {}), '(path1)\n', (1472, 1479), False, 'import Image\n'), ((1490, 1507), 'Image.open', 'Image.open', (['path2'], {}), '(path2)\n', (1500, 1507), False, 'import Image\n'), ((2172, 2189), 'Image.open', 'Image.open', (['path1'], {}), '(path1)\n', (2182, 2189), False, 'import Image\n'), ((2200, 2217), 'Image.open', 'Image.open', (['path2'], {}), '(path2)\n', (2210, 2217), False, 'import Image\n'), ((1574, 1625), 'gossamer.util.log.debug', 'util.log.debug', (['"""images_identical: bounding box ok"""'], {}), "('images_identical: bounding box ok')\n", (1588, 1625), False, 'from gossamer import util, exc\n'), ((2345, 2418), 'gossamer.exc.TestError', 'exc.TestError', (["('Different pixel modes between %r and %r' % (path1, path2))"], {}), "('Different pixel modes between %r and %r' % (path1, path2))\n", (2358, 2418), False, 'from gossamer import util, exc\n'), ((2498, 2604), 'gossamer.exc.TestError', 'exc.TestError', (["('Different dimensions between %r (%r) and %r (%r)' % (path1, im1.size,\n path2, im2.size))"], {}), "('Different dimensions between %r (%r) and %r (%r)' % (path1,\n im1.size, path2, im2.size))\n", (2511, 2604), False, 'from gossamer import util, exc\n'), ((1774, 1833), 'gossamer.util.log.debug', 'util.log.debug', (["('images_identical: rmsdiff %s ok' % rmsdiff)"], {}), "('images_identical: rmsdiff %s ok' % rmsdiff)\n", (1788, 1833), False, 'from gossamer import util, exc\n'), ((1889, 1952), 'gossamer.util.log.debug', 'util.log.debug', (["('images_identical: rmsdiff %s failed' % rmsdiff)"], {}), "('images_identical: rmsdiff %s failed' % rmsdiff)\n", (1903, 1952), False, 'from gossamer import util, exc\n'), ((3344, 3375), 'ImageChops.difference', 'ImageChops.difference', (['im1', 'im2'], {}), '(im1, im2)\n', (3365, 3375), False, 'import ImageChops\n'), ((1515, 1546), 'ImageChops.difference', 'ImageChops.difference', (['im1', 'im2'], {}), '(im1, im2)\n', (1536, 1546), False, 'import ImageChops\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
class implementations for real-time 3D feature extraction
"""
from abc import abstractmethod
import pandas as pd
import os
import glob
import numpy as np
from tomo_encoders import Patches
from tomo_encoders import DataFile
import cupy as cp
import h5py
import abc
import time
from tomo_encoders.misc.voxel_processing import normalize_volume_gpu
# Parameters for weighted cross-entropy and focal loss - alpha is higher than 0.5 to emphasize loss in "ones" or metal pixels.
def _msg_exec_time(func, t_exec):
print("TIME: %s: %.2f seconds"%(func.__name__, t_exec))
return
def read_data_pair(ds_X, ds_Y, s_crops, normalize_sampling_factor):
print("loading data...")
# X = ds_X.read_data(slice_3D = s_crops).astype(np.float32)
# Y = ds_Y.read_data(slice_3D = s_crops).astype(np.uint8)
X = ds_X.read_full().astype(np.float32)
Y = ds_Y.read_full().astype(np.uint8)
X = X[s_crops].copy()
Y = Y[s_crops].copy()
# normalize volume, check if shape is compatible.
X = normalize_volume_gpu(X, normalize_sampling_factor = normalize_sampling_factor, chunk_size = 1).astype(np.float16)
print("done")
print("Shape X %s, shape Y %s"%(str(X.shape), str(Y.shape)))
return X, Y
def load_dataset_pairs(datasets, normalize_sampling_factor = 4, TIMEIT = False):
'''
load datasets using DataFile objects for X and Y. Multiple dataset pairs can be loaded.
'''
TIMEIT = True
t0 = time.time()
n_vols = len(datasets)
Xs = [0]*n_vols
Ys = [0]*n_vols
ii = 0
for filename, dataset in datasets.items():
ds_X = DataFile(dataset['fpath_X'], autodetect_format = True, \
data_tag = dataset['data_tag_X'], VERBOSITY = 0)
ds_Y = DataFile(dataset['fpath_Y'], autodetect_format = True, \
data_tag = dataset['data_tag_Y'], VERBOSITY = 0)
Xs[ii], Ys[ii] = read_data_pair(ds_X, ds_Y, dataset['s_crops'], normalize_sampling_factor)
ii += 1
del ii
if TIMEIT:
t_exec = float(time.time() - t0)
_msg_exec_time(load_dataset_pairs, t_exec)
print("DATASET SHAPES: \n")
for ip in range(len(Xs)):
print("dataset %i: "%(ip+1), " -- ", Xs[ip].shape)
return Xs, Ys
if __name__ == "__main__":
print('just a bunch of functions')
|
[
"tomo_encoders.misc.voxel_processing.normalize_volume_gpu",
"tomo_encoders.DataFile",
"time.time"
] |
[((1510, 1521), 'time.time', 'time.time', ([], {}), '()\n', (1519, 1521), False, 'import time\n'), ((1664, 1766), 'tomo_encoders.DataFile', 'DataFile', (["dataset['fpath_X']"], {'autodetect_format': '(True)', 'data_tag': "dataset['data_tag_X']", 'VERBOSITY': '(0)'}), "(dataset['fpath_X'], autodetect_format=True, data_tag=dataset[\n 'data_tag_X'], VERBOSITY=0)\n", (1672, 1766), False, 'from tomo_encoders import DataFile\n'), ((1810, 1912), 'tomo_encoders.DataFile', 'DataFile', (["dataset['fpath_Y']"], {'autodetect_format': '(True)', 'data_tag': "dataset['data_tag_Y']", 'VERBOSITY': '(0)'}), "(dataset['fpath_Y'], autodetect_format=True, data_tag=dataset[\n 'data_tag_Y'], VERBOSITY=0)\n", (1818, 1912), False, 'from tomo_encoders import DataFile\n'), ((1071, 1165), 'tomo_encoders.misc.voxel_processing.normalize_volume_gpu', 'normalize_volume_gpu', (['X'], {'normalize_sampling_factor': 'normalize_sampling_factor', 'chunk_size': '(1)'}), '(X, normalize_sampling_factor=normalize_sampling_factor,\n chunk_size=1)\n', (1091, 1165), False, 'from tomo_encoders.misc.voxel_processing import normalize_volume_gpu\n'), ((2105, 2116), 'time.time', 'time.time', ([], {}), '()\n', (2114, 2116), False, 'import time\n')]
|
# testing adafruit atecc module
import board
import adafruit_atecc
import busio
import time
_WAKE_CLK_FREQ = 100000 # slower clock speed
i2c = busio.I2C(board.SCL, board.SDA, frequency=_WAKE_CLK_FREQ)
atecc = adafruit_atecc.ATECCx08A(i2c)
print("Found ATECC!")
r = bytearray(1)
while True:
print("Waking up chip")
atecc.wakeup()
atecc.idle()
v = atecc.version()
print("Found version 0x%04x" % v)
print("Locked?", atecc.locked())
time.sleep(1)
print("---------------------")
|
[
"busio.I2C",
"adafruit_atecc.ATECCx08A",
"time.sleep"
] |
[((147, 204), 'busio.I2C', 'busio.I2C', (['board.SCL', 'board.SDA'], {'frequency': '_WAKE_CLK_FREQ'}), '(board.SCL, board.SDA, frequency=_WAKE_CLK_FREQ)\n', (156, 204), False, 'import busio\n'), ((214, 243), 'adafruit_atecc.ATECCx08A', 'adafruit_atecc.ATECCx08A', (['i2c'], {}), '(i2c)\n', (238, 243), False, 'import adafruit_atecc\n'), ((462, 475), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (472, 475), False, 'import time\n')]
|
import numpy as np
import colorlover as cl
from multiagent.scenario import BaseScenario
from mdac.utils.entities import Drone, TargetLandmark, SupplyEntity
from mdac.utils.worlds import DroneWorld
class Scenario(BaseScenario):
def make_world(self):
n_lidar_per_agent = 256
world = DroneWorld(n_lidar_per_agent=n_lidar_per_agent,
mem_frames=1, dt=0.08)
num_agents = 5
num_targets = num_agents
world.collaborative = False
world.agents = [Drone(uid=i) for i in range(num_agents)]
world.landmarks = [TargetLandmark() for i in range(num_targets)]
for i, agent in enumerate(world.agents):
agent.name = 'agent %d' % i
agent.collide = True
agent.silent = True
agent.lidar_range = 4.0
agent.target = world.landmarks[i]
agent.construct_range = 0.1
for i, landmark in enumerate(world.landmarks):
landmark.collide = False
landmark.movable = False
if isinstance(landmark, TargetLandmark):
landmark.name = 'landmark %d' % i
landmark.size = 0.05
if isinstance(landmark, SupplyEntity):
landmark.name = 'supply %d' % i
landmark.size = 1.5
# make initial conditions
self.reset_world(world)
return world
def generate_random_pose(self, agent):
pos = np.random.uniform(-7, +7, 2)
dis = np.linalg.norm(pos)
while (dis > 7):
pos = np.random.uniform(-7, +7, 2)
dis = np.linalg.norm(pos)
agent.state.p_pos = pos
def generate_random_goal(self, agent):
goal_pos = np.random.uniform(-7, +7, 2)
dis_origin = np.linalg.norm(goal_pos)
dis_goal = np.linalg.norm(agent.state.p_pos - goal_pos)
while (dis_origin > 7 or dis_goal > 8 or dis_goal < 6):
goal_pos = np.random.uniform(-7, +7, 2)
dis_origin = np.linalg.norm(goal_pos)
dis_goal = np.linalg.norm(agent.state.p_pos - goal_pos)
agent.target.state.p_pos = goal_pos
def reset_world(self, world):
colors = np.array(cl.to_numeric(cl.scales['5']['div']['RdYlBu']))/255
for i, agent in enumerate(world.agents):
agent.size = np.random.uniform(0.2, 0.3)
agent.pseudo_collision_range = agent.size + 0.1
agent.color = colors[i%5]
agent.target.color = colors[i%5]
self.generate_random_pose(agent)
self.generate_random_goal(agent)
agent.state.p_vel = np.zeros(world.dim_p)
agent.previous_state.p_pos = np.copy(agent.state.p_pos)
agent.previous_state.p_vel = np.copy(agent.state.p_vel)
agent.state.c = np.zeros(world.dim_c)
agent.terminate = False
for agent in world.agents:
agent.agents_lidar = world.lidar.get_ray_lidar(agent)
agent.lidar_memory = [agent.agents_lidar, agent.agents_lidar]
def is_collision(self, agent1, agent2):
dist = np.linalg.norm(agent1.state.p_pos - agent2.state.p_pos)
dist_min = agent1.size + agent2.size
return True if dist < dist_min else False
def collision_reward(self, agent, entity):
if agent is entity:
return 0
if agent.pseudo_collision_range is not None:
p_range = agent.pseudo_collision_range
else:
p_range = agent.size
d = np.linalg.norm(agent.state.p_pos-entity.state.p_pos)
s = agent.size + entity.size
if d > p_range + entity.size:
return 0
if d <= s:
return -15
return ((d - s) / (p_range - agent.size) - 1) * 15
def reward(self, agent, world):
prev_d = np.linalg.norm(agent.previous_state.p_pos - agent.target.state.p_pos)
d = np.linalg.norm(agent.state.p_pos - agent.target.state.p_pos)
reward_g = (prev_d - d) * 2.5
reward_c = 0
if agent.collide:
for a in world.agents:
if a is agent: continue
if self.is_collision(agent, a):
print(agent.name, 'collided')
reward_c -= 15
agent.terminate = True
else:
reward_c += self.collision_reward(agent, a)
if d < agent.construct_range and (np.abs(agent.state.p_vel) < 0.2).all():
print(agent.name, 'reached target')
reward_g += 15
# agent.target.state.p_pos = np.random.uniform(-6, +6, world.dim_p)
self.generate_random_goal(agent)
agent.terminate = True
return reward_c + reward_g
def observation(self, agent, world):
out = [np.concatenate(agent.lidar_memory + [agent.agents_lidar]),
agent.state.p_vel,
agent.target.state.p_pos - agent.state.p_pos,
]
return np.concatenate(out)
def done(self, agent, world):
return agent.terminate
|
[
"numpy.random.uniform",
"numpy.abs",
"numpy.copy",
"numpy.zeros",
"mdac.utils.entities.TargetLandmark",
"colorlover.to_numeric",
"mdac.utils.entities.Drone",
"numpy.linalg.norm",
"mdac.utils.worlds.DroneWorld",
"numpy.concatenate"
] |
[((303, 373), 'mdac.utils.worlds.DroneWorld', 'DroneWorld', ([], {'n_lidar_per_agent': 'n_lidar_per_agent', 'mem_frames': '(1)', 'dt': '(0.08)'}), '(n_lidar_per_agent=n_lidar_per_agent, mem_frames=1, dt=0.08)\n', (313, 373), False, 'from mdac.utils.worlds import DroneWorld\n'), ((1458, 1486), 'numpy.random.uniform', 'np.random.uniform', (['(-7)', '(+7)', '(2)'], {}), '(-7, +7, 2)\n', (1475, 1486), True, 'import numpy as np\n'), ((1501, 1520), 'numpy.linalg.norm', 'np.linalg.norm', (['pos'], {}), '(pos)\n', (1515, 1520), True, 'import numpy as np\n'), ((1726, 1754), 'numpy.random.uniform', 'np.random.uniform', (['(-7)', '(+7)', '(2)'], {}), '(-7, +7, 2)\n', (1743, 1754), True, 'import numpy as np\n'), ((1776, 1800), 'numpy.linalg.norm', 'np.linalg.norm', (['goal_pos'], {}), '(goal_pos)\n', (1790, 1800), True, 'import numpy as np\n'), ((1820, 1864), 'numpy.linalg.norm', 'np.linalg.norm', (['(agent.state.p_pos - goal_pos)'], {}), '(agent.state.p_pos - goal_pos)\n', (1834, 1864), True, 'import numpy as np\n'), ((3103, 3158), 'numpy.linalg.norm', 'np.linalg.norm', (['(agent1.state.p_pos - agent2.state.p_pos)'], {}), '(agent1.state.p_pos - agent2.state.p_pos)\n', (3117, 3158), True, 'import numpy as np\n'), ((3514, 3568), 'numpy.linalg.norm', 'np.linalg.norm', (['(agent.state.p_pos - entity.state.p_pos)'], {}), '(agent.state.p_pos - entity.state.p_pos)\n', (3528, 3568), True, 'import numpy as np\n'), ((3819, 3888), 'numpy.linalg.norm', 'np.linalg.norm', (['(agent.previous_state.p_pos - agent.target.state.p_pos)'], {}), '(agent.previous_state.p_pos - agent.target.state.p_pos)\n', (3833, 3888), True, 'import numpy as np\n'), ((3901, 3961), 'numpy.linalg.norm', 'np.linalg.norm', (['(agent.state.p_pos - agent.target.state.p_pos)'], {}), '(agent.state.p_pos - agent.target.state.p_pos)\n', (3915, 3961), True, 'import numpy as np\n'), ((4983, 5002), 'numpy.concatenate', 'np.concatenate', (['out'], {}), '(out)\n', (4997, 5002), True, 'import numpy as np\n'), ((518, 530), 'mdac.utils.entities.Drone', 'Drone', ([], {'uid': 'i'}), '(uid=i)\n', (523, 530), False, 'from mdac.utils.entities import Drone, TargetLandmark, SupplyEntity\n'), ((586, 602), 'mdac.utils.entities.TargetLandmark', 'TargetLandmark', ([], {}), '()\n', (600, 602), False, 'from mdac.utils.entities import Drone, TargetLandmark, SupplyEntity\n'), ((1564, 1592), 'numpy.random.uniform', 'np.random.uniform', (['(-7)', '(+7)', '(2)'], {}), '(-7, +7, 2)\n', (1581, 1592), True, 'import numpy as np\n'), ((1611, 1630), 'numpy.linalg.norm', 'np.linalg.norm', (['pos'], {}), '(pos)\n', (1625, 1630), True, 'import numpy as np\n'), ((1952, 1980), 'numpy.random.uniform', 'np.random.uniform', (['(-7)', '(+7)', '(2)'], {}), '(-7, +7, 2)\n', (1969, 1980), True, 'import numpy as np\n'), ((2006, 2030), 'numpy.linalg.norm', 'np.linalg.norm', (['goal_pos'], {}), '(goal_pos)\n', (2020, 2030), True, 'import numpy as np\n'), ((2054, 2098), 'numpy.linalg.norm', 'np.linalg.norm', (['(agent.state.p_pos - goal_pos)'], {}), '(agent.state.p_pos - goal_pos)\n', (2068, 2098), True, 'import numpy as np\n'), ((2331, 2358), 'numpy.random.uniform', 'np.random.uniform', (['(0.2)', '(0.3)'], {}), '(0.2, 0.3)\n', (2348, 2358), True, 'import numpy as np\n'), ((2624, 2645), 'numpy.zeros', 'np.zeros', (['world.dim_p'], {}), '(world.dim_p)\n', (2632, 2645), True, 'import numpy as np\n'), ((2687, 2713), 'numpy.copy', 'np.copy', (['agent.state.p_pos'], {}), '(agent.state.p_pos)\n', (2694, 2713), True, 'import numpy as np\n'), ((2755, 2781), 'numpy.copy', 'np.copy', (['agent.state.p_vel'], {}), '(agent.state.p_vel)\n', (2762, 2781), True, 'import numpy as np\n'), ((2810, 2831), 'numpy.zeros', 'np.zeros', (['world.dim_c'], {}), '(world.dim_c)\n', (2818, 2831), True, 'import numpy as np\n'), ((4797, 4854), 'numpy.concatenate', 'np.concatenate', (['(agent.lidar_memory + [agent.agents_lidar])'], {}), '(agent.lidar_memory + [agent.agents_lidar])\n', (4811, 4854), True, 'import numpy as np\n'), ((2205, 2251), 'colorlover.to_numeric', 'cl.to_numeric', (["cl.scales['5']['div']['RdYlBu']"], {}), "(cl.scales['5']['div']['RdYlBu'])\n", (2218, 2251), True, 'import colorlover as cl\n'), ((4428, 4453), 'numpy.abs', 'np.abs', (['agent.state.p_vel'], {}), '(agent.state.p_vel)\n', (4434, 4453), True, 'import numpy as np\n')]
|
import os
print("Enter your folder name to add to documents: ")
newDir = input()
print("Enter the number of chapters + 1 (16 chapters would be 17): ")
ch_Num = input()
ch_Num = int(y)
dirs = range(1, ch_Num)
parent_dir = f"/Documents/{newDir}"
for dir in dirs:
new_dir = ('Chapter' + str(dir))
path = os.path.join(parent_dir, new_dir)
os.makedirs(path)
print(f"Directory {new_dir} created successfully")
|
[
"os.path.join",
"os.makedirs"
] |
[((313, 346), 'os.path.join', 'os.path.join', (['parent_dir', 'new_dir'], {}), '(parent_dir, new_dir)\n', (325, 346), False, 'import os\n'), ((351, 368), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (362, 368), False, 'import os\n')]
|
# Required for Python to search this directory for module files
# Keep this file free of any code or import statements that could
# cause either an error to occur or a log message to be logged.
# This ensures that calling code can import initialization code from
# webkitpy before any errors or log messages due to code in this file.
# Initialization code can include things like version-checking code and
# logging configuration code.
#
# We do not execute any version-checking code or logging configuration
# code in this file so that callers can opt-in as they want. This also
# allows different callers to choose different initialization code,
# as necessary.
import os
import imp
from webkitpy.benchmark_runner.utils import load_subclasses
from webkitpy.benchmark_runner.http_server_driver.http_server_driver_factory import HTTPServerDriverFactory
def http_server_driver_loader(http_server_driver_class):
for platform in http_server_driver_class.platforms:
HTTPServerDriverFactory.add(platform, http_server_driver_class)
load_subclasses(
dirname=os.path.dirname(os.path.abspath(__file__)),
base_class_name='HTTPServerDriver',
base_class_file='http_server_driver.py',
loader=http_server_driver_loader)
|
[
"webkitpy.benchmark_runner.http_server_driver.http_server_driver_factory.HTTPServerDriverFactory.add",
"os.path.abspath"
] |
[((979, 1042), 'webkitpy.benchmark_runner.http_server_driver.http_server_driver_factory.HTTPServerDriverFactory.add', 'HTTPServerDriverFactory.add', (['platform', 'http_server_driver_class'], {}), '(platform, http_server_driver_class)\n', (1006, 1042), False, 'from webkitpy.benchmark_runner.http_server_driver.http_server_driver_factory import HTTPServerDriverFactory\n'), ((1090, 1115), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1105, 1115), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from sklearn.base import BaseEstimator, TransformerMixin
from karura.core.insight import Insight
from karura.core.dataframe_extension import FType
class DatetimeToCategoricalInsight(Insight):
def __init__(self):
super().__init__()
self.index.as_preprocessing()
self.automatic = True
self._categorized = []
def adopt(self, dfe, interpreted=None):
targets = self.get_insight_targets(dfe)
self._categorized = targets
for t in targets:
month = dfe.df[t].dt.month.rename(t + "_month").astype("category")
day = dfe.df[t].dt.day.rename(t + "_day").astype("category")
dfe.df = pd.concat([dfe.df, month, day], axis=1)
dfe.to_categorical((month.name, day.name))
dfe.df.drop(targets, axis=1, inplace=True) # drop original column
dfe.sync()
return True
def get_insight_targets(self, dfe):
return dfe.get_columns(FType.datetime, include_target=False)
def get_transformer(self, dfe):
return DatetimeToCategoricalTransformer(dfe, self)
class DatetimeToCategoricalTransformer(BaseEstimator, TransformerMixin):
def __init__(self, dfe, datetime_to_categorical_insight):
self.model_features = dfe.get_columns(include_target=False)
self.targets = datetime_to_categorical_insight._categorized
def fit(self, X, y=None):
return self # do nothing
def transform(self, X, y=None, copy=None):
for t in self.targets:
month_name = t + "_month"
day_name = t + "_day"
if month_name in self.model_features:
month = X[t].dt.month.rename(month_name).astype("category")
X = pd.concat([X, month], axis=1)
if day_name in self.model_features:
day = X[t].dt.day.rename(day_name).astype("category")
X = pd.concat([X, day], axis=1)
X.drop(t, axis=1, inplace=True)
return X
|
[
"pandas.concat"
] |
[((765, 804), 'pandas.concat', 'pd.concat', (['[dfe.df, month, day]'], {'axis': '(1)'}), '([dfe.df, month, day], axis=1)\n', (774, 804), True, 'import pandas as pd\n'), ((1829, 1858), 'pandas.concat', 'pd.concat', (['[X, month]'], {'axis': '(1)'}), '([X, month], axis=1)\n', (1838, 1858), True, 'import pandas as pd\n'), ((1997, 2024), 'pandas.concat', 'pd.concat', (['[X, day]'], {'axis': '(1)'}), '([X, day], axis=1)\n', (2006, 2024), True, 'import pandas as pd\n')]
|
#!/usr/bin/env python
"""Release the package."""
import datetime
import argparse
import os
import subprocess
def parse_arguments():
"""Return the command line arguments."""
bump = subprocess.check_output(("bump2version", "--list", "--dry-run", "--allow-dirty", "part"), text=True)
current_version = [line for line in bump.split("\n") if line.startswith("current_version")][0].split("=")[1]
parser = argparse.ArgumentParser(description=f'Release access-modifiers. Current version is {current_version}.')
parser.add_argument('version', choices=('patch', 'minor', 'major'))
return parser.parse_args()
def main():
"""Create the release."""
os.environ["RELEASE_DATE"] = datetime.date.today().isoformat() # Used by bump2version to update CHANGELOG.md
subprocess.run(("bump2version", parse_arguments().version), check=True)
subprocess.run(("git", "push", "--follow-tags"), check=True)
if __name__ == "__main__":
main()
|
[
"subprocess.run",
"subprocess.check_output",
"datetime.date.today",
"argparse.ArgumentParser"
] |
[((192, 296), 'subprocess.check_output', 'subprocess.check_output', (["('bump2version', '--list', '--dry-run', '--allow-dirty', 'part')"], {'text': '(True)'}), "(('bump2version', '--list', '--dry-run',\n '--allow-dirty', 'part'), text=True)\n", (215, 296), False, 'import subprocess\n'), ((419, 527), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'f"""Release access-modifiers. Current version is {current_version}."""'}), "(description=\n f'Release access-modifiers. Current version is {current_version}.')\n", (442, 527), False, 'import argparse\n'), ((864, 924), 'subprocess.run', 'subprocess.run', (["('git', 'push', '--follow-tags')"], {'check': '(True)'}), "(('git', 'push', '--follow-tags'), check=True)\n", (878, 924), False, 'import subprocess\n'), ((703, 724), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (722, 724), False, 'import datetime\n')]
|
import z3
from typing import List, NamedTuple, Dict, Optional
CloneExpressionOutput = NamedTuple("CloneExpressionOutput", [
("clones", List[z3.BoolRef]), ("var_map", Dict[z3.ExprRef, List[z3.ExprRef]])
])
def serialize_expression(expression: z3.ExprRef) -> str:
s = z3.Solver()
s.add(expression)
return s.sexpr()
def deserialize_expression(serialized_expression: str, ctx: Optional[z3.Context] = None) -> z3.ExprRef:
return z3.And(z3.parse_smt2_string(serialized_expression, ctx=ctx))
def get_variables(expression: z3.ExprRef) -> List[z3.ExprRef]:
"""
Returns all variables that are contained in the expression.
:param expression: Expression from which variables are extracted
"""
class AstRefKey:
def __init__(self, n):
self.n = n
def __hash__(self):
return self.n.hash()
def __eq__(self, other):
return self.n.eq(other.n)
def __repr__(self):
return str(self.n)
def askey(n):
assert isinstance(n, z3.AstRef)
return AstRefKey(n)
variables = set()
def collect(f):
if z3.is_const(f):
if f.decl().kind() == z3.Z3_OP_UNINTERPRETED and not askey(f) in variables:
variables.add(askey(f))
else:
for c in f.children():
collect(c)
collect(expression)
return [elem.n for elem in variables]
def recreate_variable(key: str, variable: z3.ExprRef) -> z3.ExprRef:
"""
Recreates the variable but renames it with a key that is used
to make it distinct.
:param key:
:param variable:
"""
return z3.Const(f"{key}_{variable}", variable.sort())
def clone_expression(
expression: z3.ExprRef,
q: int,
required_variables: Optional[List[z3.ExprRef]] = None,
) -> CloneExpressionOutput:
"""
Clones expression by generating q instances of the expression where each
variable is substituted by a unique newly generated variable for each variable in each clone.
The output will list each clone and a dictionary where each entry corresponds to
a mapping from variable in the original formula to the substituted cloned variables for each clone
listed in the same order as the clone list.
:param expression: Expression to be cloned
:param q: Amount of clones created
:param required_variables: Variables that should be cloned and put into the var_map even if they are not
contained in the expression.
"""
variables = set(get_variables(expression)).union(set(required_variables or []))
var_map = {
x: [recreate_variable(f"clone{{{i}}}", x) for i in range(q)] for x in variables
}
clones = [z3.substitute(expression, [(x, var_map[x][i]) for x in variables]) for i in range(q)]
return CloneExpressionOutput(
clones=clones,
var_map=var_map,
)
|
[
"z3.parse_smt2_string",
"typing.NamedTuple",
"z3.substitute",
"z3.Solver",
"z3.is_const"
] |
[((87, 208), 'typing.NamedTuple', 'NamedTuple', (['"""CloneExpressionOutput"""', "[('clones', List[z3.BoolRef]), ('var_map', Dict[z3.ExprRef, List[z3.ExprRef]])]"], {}), "('CloneExpressionOutput', [('clones', List[z3.BoolRef]), (\n 'var_map', Dict[z3.ExprRef, List[z3.ExprRef]])])\n", (97, 208), False, 'from typing import List, NamedTuple, Dict, Optional\n'), ((277, 288), 'z3.Solver', 'z3.Solver', ([], {}), '()\n', (286, 288), False, 'import z3\n'), ((456, 508), 'z3.parse_smt2_string', 'z3.parse_smt2_string', (['serialized_expression'], {'ctx': 'ctx'}), '(serialized_expression, ctx=ctx)\n', (476, 508), False, 'import z3\n'), ((1136, 1150), 'z3.is_const', 'z3.is_const', (['f'], {}), '(f)\n', (1147, 1150), False, 'import z3\n'), ((2714, 2780), 'z3.substitute', 'z3.substitute', (['expression', '[(x, var_map[x][i]) for x in variables]'], {}), '(expression, [(x, var_map[x][i]) for x in variables])\n', (2727, 2780), False, 'import z3\n')]
|
##############################################################################
# Copyright by The HDF Group. #
# All rights reserved. #
# #
# This file is part of HSDS (HDF5 Scalable Data Service), Libraries and #
# Utilities. The full HSDS copyright notice, including #
# terms governing use, modification, and redistribution, is contained in #
# the file COPYING, which can be found at the root of the source code #
# distribution tree. If you do not have access to this file, you may #
# request a copy from <EMAIL>. #
##############################################################################
#
# data node of hsds cluster
#
import time
from aiohttp.web_exceptions import HTTPBadRequest, HTTPNotFound, HTTPInternalServerError
from aiohttp.web import json_response
from util.idUtil import isValidUuid, validateUuid
from datanode_lib import get_obj_id, get_metadata_obj, save_metadata_obj, delete_metadata_obj, check_metadata_obj
import hsds_logger as log
async def GET_Datatype(request):
"""HTTP GET method to return JSON for /groups/
"""
log.request(request)
app = request.app
params = request.rel_url.query
ctype_id = get_obj_id(request)
if not isValidUuid(ctype_id, obj_class="type"):
log.error(f"Unexpected type_id: {ctype_id}")
raise HTTPInternalServerError()
if "bucket" in params:
bucket = params["bucket"]
else:
bucket = None
ctype_json = await get_metadata_obj(app, ctype_id, bucket=bucket)
resp_json = { }
resp_json["id"] = ctype_json["id"]
resp_json["root"] = ctype_json["root"]
resp_json["created"] = ctype_json["created"]
resp_json["lastModified"] = ctype_json["lastModified"]
resp_json["type"] = ctype_json["type"]
resp_json["attributeCount"] = len(ctype_json["attributes"])
if "include_attrs" in params and params["include_attrs"]:
resp_json["attributes"] = ctype_json["attributes"]
resp = json_response(resp_json)
log.response(request, resp=resp)
return resp
async def POST_Datatype(request):
""" Handler for POST /datatypes"""
log.info("Post_Datatype")
log.request(request)
app = request.app
params = request.rel_url.query
if not request.has_body:
msg = "POST_Datatype with no body"
log.error(msg)
raise HTTPBadRequest(reason=msg)
body = await request.json()
if "bucket" in params:
bucket = params["bucket"]
elif "bucket" in body:
bucket = params["bucket"]
else:
bucket = None
ctype_id = get_obj_id(request, body=body)
if not isValidUuid(ctype_id, obj_class="datatype"):
log.error( "Unexpected type_id: {}".format(ctype_id))
raise HTTPInternalServerError()
# verify the id doesn't already exist
obj_found = await check_metadata_obj(app, ctype_id, bucket=bucket)
if obj_found:
log.error( "Post with existing type_id: {}".format(ctype_id))
raise HTTPInternalServerError()
root_id = None
if "root" not in body:
msg = "POST_Datatype with no root"
log.error(msg)
raise HTTPInternalServerError()
root_id = body["root"]
try:
validateUuid(root_id, "group")
except ValueError:
msg = "Invalid root_id: " + root_id
log.error(msg)
raise HTTPInternalServerError()
if "type" not in body:
msg = "POST_Datatype with no type"
log.error(msg)
raise HTTPInternalServerError()
type_json = body["type"]
# ok - all set, create committed type obj
now = time.time()
log.info("POST_datatype, typejson: {}". format(type_json))
ctype_json = {"id": ctype_id, "root": root_id, "created": now,
"lastModified": now, "type": type_json, "attributes": {} }
await save_metadata_obj(app, ctype_id, ctype_json, bucket=bucket, notify=True, flush=True)
resp_json = {}
resp_json["id"] = ctype_id
resp_json["root"] = root_id
resp_json["created"] = ctype_json["created"]
resp_json["lastModified"] = ctype_json["lastModified"]
resp_json["attributeCount"] = 0
resp = json_response(resp_json, status=201)
log.response(request, resp=resp)
return resp
async def DELETE_Datatype(request):
"""HTTP DELETE method for datatype
"""
log.request(request)
app = request.app
params = request.rel_url.query
ctype_id = get_obj_id(request)
log.info(f"DELETE ctype: {ctype_id}")
if "bucket" in params:
bucket = params["bucket"]
else:
bucket = None
# verify the id exist
obj_found = await check_metadata_obj(app, ctype_id)
if not obj_found:
log.warn(f"Delete on non-existent obj: {ctype_id}")
raise HTTPNotFound
log.info("deleting ctype: {}".format(ctype_id))
notify=True
if "Notify" in params and not params["Notify"]:
log.info("notify value: {}".format(params["Notify"]))
notify=False
log.info("notify: {}".format(notify))
await delete_metadata_obj(app, ctype_id, bucket=bucket, notify=notify)
resp_json = { }
resp = json_response(resp_json)
log.response(request, resp=resp)
return resp
|
[
"datanode_lib.check_metadata_obj",
"hsds_logger.info",
"hsds_logger.request",
"datanode_lib.get_obj_id",
"util.idUtil.isValidUuid",
"aiohttp.web.json_response",
"time.time",
"aiohttp.web_exceptions.HTTPInternalServerError",
"aiohttp.web_exceptions.HTTPBadRequest",
"datanode_lib.save_metadata_obj",
"hsds_logger.error",
"util.idUtil.validateUuid",
"hsds_logger.warn",
"datanode_lib.delete_metadata_obj",
"datanode_lib.get_metadata_obj",
"hsds_logger.response"
] |
[((1327, 1347), 'hsds_logger.request', 'log.request', (['request'], {}), '(request)\n', (1338, 1347), True, 'import hsds_logger as log\n'), ((1420, 1439), 'datanode_lib.get_obj_id', 'get_obj_id', (['request'], {}), '(request)\n', (1430, 1439), False, 'from datanode_lib import get_obj_id, get_metadata_obj, save_metadata_obj, delete_metadata_obj, check_metadata_obj\n'), ((2214, 2238), 'aiohttp.web.json_response', 'json_response', (['resp_json'], {}), '(resp_json)\n', (2227, 2238), False, 'from aiohttp.web import json_response\n'), ((2243, 2275), 'hsds_logger.response', 'log.response', (['request'], {'resp': 'resp'}), '(request, resp=resp)\n', (2255, 2275), True, 'import hsds_logger as log\n'), ((2370, 2395), 'hsds_logger.info', 'log.info', (['"""Post_Datatype"""'], {}), "('Post_Datatype')\n", (2378, 2395), True, 'import hsds_logger as log\n'), ((2400, 2420), 'hsds_logger.request', 'log.request', (['request'], {}), '(request)\n', (2411, 2420), True, 'import hsds_logger as log\n'), ((2822, 2852), 'datanode_lib.get_obj_id', 'get_obj_id', (['request'], {'body': 'body'}), '(request, body=body)\n', (2832, 2852), False, 'from datanode_lib import get_obj_id, get_metadata_obj, save_metadata_obj, delete_metadata_obj, check_metadata_obj\n'), ((3846, 3857), 'time.time', 'time.time', ([], {}), '()\n', (3855, 3857), False, 'import time\n'), ((4403, 4439), 'aiohttp.web.json_response', 'json_response', (['resp_json'], {'status': '(201)'}), '(resp_json, status=201)\n', (4416, 4439), False, 'from aiohttp.web import json_response\n'), ((4445, 4477), 'hsds_logger.response', 'log.response', (['request'], {'resp': 'resp'}), '(request, resp=resp)\n', (4457, 4477), True, 'import hsds_logger as log\n'), ((4583, 4603), 'hsds_logger.request', 'log.request', (['request'], {}), '(request)\n', (4594, 4603), True, 'import hsds_logger as log\n'), ((4681, 4700), 'datanode_lib.get_obj_id', 'get_obj_id', (['request'], {}), '(request)\n', (4691, 4700), False, 'from datanode_lib import get_obj_id, get_metadata_obj, save_metadata_obj, delete_metadata_obj, check_metadata_obj\n'), ((4705, 4742), 'hsds_logger.info', 'log.info', (['f"""DELETE ctype: {ctype_id}"""'], {}), "(f'DELETE ctype: {ctype_id}')\n", (4713, 4742), True, 'import hsds_logger as log\n'), ((5400, 5424), 'aiohttp.web.json_response', 'json_response', (['resp_json'], {}), '(resp_json)\n', (5413, 5424), False, 'from aiohttp.web import json_response\n'), ((5429, 5461), 'hsds_logger.response', 'log.response', (['request'], {'resp': 'resp'}), '(request, resp=resp)\n', (5441, 5461), True, 'import hsds_logger as log\n'), ((1458, 1497), 'util.idUtil.isValidUuid', 'isValidUuid', (['ctype_id'], {'obj_class': '"""type"""'}), "(ctype_id, obj_class='type')\n", (1469, 1497), False, 'from util.idUtil import isValidUuid, validateUuid\n'), ((1507, 1551), 'hsds_logger.error', 'log.error', (['f"""Unexpected type_id: {ctype_id}"""'], {}), "(f'Unexpected type_id: {ctype_id}')\n", (1516, 1551), True, 'import hsds_logger as log\n'), ((1566, 1591), 'aiohttp.web_exceptions.HTTPInternalServerError', 'HTTPInternalServerError', ([], {}), '()\n', (1589, 1591), False, 'from aiohttp.web_exceptions import HTTPBadRequest, HTTPNotFound, HTTPInternalServerError\n'), ((1710, 1756), 'datanode_lib.get_metadata_obj', 'get_metadata_obj', (['app', 'ctype_id'], {'bucket': 'bucket'}), '(app, ctype_id, bucket=bucket)\n', (1726, 1756), False, 'from datanode_lib import get_obj_id, get_metadata_obj, save_metadata_obj, delete_metadata_obj, check_metadata_obj\n'), ((2559, 2573), 'hsds_logger.error', 'log.error', (['msg'], {}), '(msg)\n', (2568, 2573), True, 'import hsds_logger as log\n'), ((2588, 2614), 'aiohttp.web_exceptions.HTTPBadRequest', 'HTTPBadRequest', ([], {'reason': 'msg'}), '(reason=msg)\n', (2602, 2614), False, 'from aiohttp.web_exceptions import HTTPBadRequest, HTTPNotFound, HTTPInternalServerError\n'), ((2864, 2907), 'util.idUtil.isValidUuid', 'isValidUuid', (['ctype_id'], {'obj_class': '"""datatype"""'}), "(ctype_id, obj_class='datatype')\n", (2875, 2907), False, 'from util.idUtil import isValidUuid, validateUuid\n'), ((2985, 3010), 'aiohttp.web_exceptions.HTTPInternalServerError', 'HTTPInternalServerError', ([], {}), '()\n', (3008, 3010), False, 'from aiohttp.web_exceptions import HTTPBadRequest, HTTPNotFound, HTTPInternalServerError\n'), ((3076, 3124), 'datanode_lib.check_metadata_obj', 'check_metadata_obj', (['app', 'ctype_id'], {'bucket': 'bucket'}), '(app, ctype_id, bucket=bucket)\n', (3094, 3124), False, 'from datanode_lib import get_obj_id, get_metadata_obj, save_metadata_obj, delete_metadata_obj, check_metadata_obj\n'), ((3227, 3252), 'aiohttp.web_exceptions.HTTPInternalServerError', 'HTTPInternalServerError', ([], {}), '()\n', (3250, 3252), False, 'from aiohttp.web_exceptions import HTTPBadRequest, HTTPNotFound, HTTPInternalServerError\n'), ((3356, 3370), 'hsds_logger.error', 'log.error', (['msg'], {}), '(msg)\n', (3365, 3370), True, 'import hsds_logger as log\n'), ((3385, 3410), 'aiohttp.web_exceptions.HTTPInternalServerError', 'HTTPInternalServerError', ([], {}), '()\n', (3408, 3410), False, 'from aiohttp.web_exceptions import HTTPBadRequest, HTTPNotFound, HTTPInternalServerError\n'), ((3455, 3485), 'util.idUtil.validateUuid', 'validateUuid', (['root_id', '"""group"""'], {}), "(root_id, 'group')\n", (3467, 3485), False, 'from util.idUtil import isValidUuid, validateUuid\n'), ((3700, 3714), 'hsds_logger.error', 'log.error', (['msg'], {}), '(msg)\n', (3709, 3714), True, 'import hsds_logger as log\n'), ((3729, 3754), 'aiohttp.web_exceptions.HTTPInternalServerError', 'HTTPInternalServerError', ([], {}), '()\n', (3752, 3754), False, 'from aiohttp.web_exceptions import HTTPBadRequest, HTTPNotFound, HTTPInternalServerError\n'), ((4078, 4166), 'datanode_lib.save_metadata_obj', 'save_metadata_obj', (['app', 'ctype_id', 'ctype_json'], {'bucket': 'bucket', 'notify': '(True)', 'flush': '(True)'}), '(app, ctype_id, ctype_json, bucket=bucket, notify=True,\n flush=True)\n', (4095, 4166), False, 'from datanode_lib import get_obj_id, get_metadata_obj, save_metadata_obj, delete_metadata_obj, check_metadata_obj\n'), ((4887, 4920), 'datanode_lib.check_metadata_obj', 'check_metadata_obj', (['app', 'ctype_id'], {}), '(app, ctype_id)\n', (4905, 4920), False, 'from datanode_lib import get_obj_id, get_metadata_obj, save_metadata_obj, delete_metadata_obj, check_metadata_obj\n'), ((4951, 5002), 'hsds_logger.warn', 'log.warn', (['f"""Delete on non-existent obj: {ctype_id}"""'], {}), "(f'Delete on non-existent obj: {ctype_id}')\n", (4959, 5002), True, 'import hsds_logger as log\n'), ((5300, 5364), 'datanode_lib.delete_metadata_obj', 'delete_metadata_obj', (['app', 'ctype_id'], {'bucket': 'bucket', 'notify': 'notify'}), '(app, ctype_id, bucket=bucket, notify=notify)\n', (5319, 5364), False, 'from datanode_lib import get_obj_id, get_metadata_obj, save_metadata_obj, delete_metadata_obj, check_metadata_obj\n'), ((3561, 3575), 'hsds_logger.error', 'log.error', (['msg'], {}), '(msg)\n', (3570, 3575), True, 'import hsds_logger as log\n'), ((3590, 3615), 'aiohttp.web_exceptions.HTTPInternalServerError', 'HTTPInternalServerError', ([], {}), '()\n', (3613, 3615), False, 'from aiohttp.web_exceptions import HTTPBadRequest, HTTPNotFound, HTTPInternalServerError\n')]
|
import os
import unittest
class TestCython(unittest.TestCase):
pass
def add_test(name, runner, target):
setattr(TestCython, "test_" + name, lambda s: runner(s, target))
try:
import Cython
import glob
import sys
targets = glob.glob(os.path.join(os.path.dirname(__file__), "cython", "setup_*.py"))
sys.path.append(os.path.join(os.path.dirname(__file__), "cython"))
for target in targets:
def runner(self, target):
cwd = os.getcwd()
try:
os.chdir(os.path.dirname(target))
exec(open(os.path.basename(target)).read())
except:
raise
finally:
os.chdir(cwd)
name, _ = os.path.splitext(os.path.basename(target))
add_test(name, runner, target)
except ImportError:
pass
|
[
"os.getcwd",
"os.path.dirname",
"os.chdir",
"os.path.basename"
] |
[((270, 295), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (285, 295), False, 'import os\n'), ((355, 380), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (370, 380), False, 'import os\n'), ((473, 484), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (482, 484), False, 'import os\n'), ((740, 764), 'os.path.basename', 'os.path.basename', (['target'], {}), '(target)\n', (756, 764), False, 'import os\n'), ((691, 704), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (699, 704), False, 'import os\n'), ((527, 550), 'os.path.dirname', 'os.path.dirname', (['target'], {}), '(target)\n', (542, 550), False, 'import os\n'), ((578, 602), 'os.path.basename', 'os.path.basename', (['target'], {}), '(target)\n', (594, 602), False, 'import os\n')]
|
#!/usr/bin/env python2
import sys
import os
import random
import msgpack # requires msgpack-python
samples = [
-1,
15441,
99999999999,
"43110",
192168,
['aaaaaaa', 'bbbbbbb', os.urandom(30), 12345678, 11111111111, {123: "4567890"}],
"[sjfdkjsdalfksjdlfsafl]",
"",
0,
1,
["this", 123, "is", {"not": "gonna"}, [["work", "!"]]],
{"cmd": "getFile", "req_id": 1, "params": {"site": "1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D", "inner_path": "content.json", "location": 0}},
{"cmd": "getFile", "req_id": 12345, "params": {"site": "1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D", "inner_path": "../sites.json", "location": -1230}},
{"cmd": "ping", "req_id": 34323683, "params": {}},
{"cmd": "ping", "req_id": False, "params": False},
{"cmd": "pex", "req_id": 3123683, "params": ["sdfasfdsf", "vvvvvvv", 12388712937812]},
{"cmd": "pex", "req_id": "fake", "params": {"site": "1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D", "peers": "AAAABBBBCCCCDDDD", "need": 50}},
{"cmd": "pex", "req_id": "fake", "params:": {"site": "1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D", "peers": 0xABCDABCD, "need": -100}},
{"cmd": "pex", "req_id": 50, "params:": {"site": "1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D", "peers": os.urandom(32), "need": "x"}},
{"cmd": "pex", "req_id": 5, "params": {"site": "1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D", "peers": ["aaaa", "bbbb", "cccc", "dddd"], "need": [100]}},
{"cmd": "getFile", "req_id": os.urandom(64), "params": {"site": "1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D", "inner_path": os.urandom(64), "location": 0}},
{"cmd": "getFile", "req_id": 25519, "params": {"site": "1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D", "inner_path": ["./", "./", "./..", "sites.json"], "location": -1}},
{"cmd": "", "req_id": None, "params": {"site": "1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D", "inner_path": os.urandom(64), "location": 0xAAAA}},
{"cmd": [[[[[["getWhat?"]]]]]], "req_id": -300, "params": {"site": "1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D", "inner_path": ["./", "./", "./../", "/sites.json"], "location": 65537}},
{"cmd": "update", "req_id": 123, "params": {"site": os.urandom(32)}},
{os.urandom(8): "update", os.urandom(8): 123, "params": {"site": os.urandom(32), "inner_path": "././././././content.json"}},
]
item = random.SystemRandom().choice(samples)
sys.stdout.write(msgpack.packb(item))
|
[
"os.urandom",
"random.SystemRandom",
"msgpack.packb"
] |
[((2322, 2341), 'msgpack.packb', 'msgpack.packb', (['item'], {}), '(item)\n', (2335, 2341), False, 'import msgpack\n'), ((201, 215), 'os.urandom', 'os.urandom', (['(30)'], {}), '(30)\n', (211, 215), False, 'import os\n'), ((1445, 1459), 'os.urandom', 'os.urandom', (['(64)'], {}), '(64)\n', (1455, 1459), False, 'import os\n'), ((2132, 2145), 'os.urandom', 'os.urandom', (['(8)'], {}), '(8)\n', (2142, 2145), False, 'import os\n'), ((2157, 2170), 'os.urandom', 'os.urandom', (['(8)'], {}), '(8)\n', (2167, 2170), False, 'import os\n'), ((2267, 2288), 'random.SystemRandom', 'random.SystemRandom', ([], {}), '()\n', (2286, 2288), False, 'import random\n'), ((1232, 1246), 'os.urandom', 'os.urandom', (['(32)'], {}), '(32)\n', (1242, 1246), False, 'import os\n'), ((1532, 1546), 'os.urandom', 'os.urandom', (['(64)'], {}), '(64)\n', (1542, 1546), False, 'import os\n'), ((1833, 1847), 'os.urandom', 'os.urandom', (['(64)'], {}), '(64)\n', (1843, 1847), False, 'import os\n'), ((2109, 2123), 'os.urandom', 'os.urandom', (['(32)'], {}), '(32)\n', (2119, 2123), False, 'import os\n'), ((2196, 2210), 'os.urandom', 'os.urandom', (['(32)'], {}), '(32)\n', (2206, 2210), False, 'import os\n')]
|
from ze_mailer.app.core.messages import Info
import re
class UtilitiesMixin:
"""A mixin used to extend classes with various definitions on
repetitive tasks on names such as normalizing them etc.
"""
@staticmethod
def check_name_structure(name):
"""Check the structure of a name. Sometimes, we might
get names like '<NAME>' or 'eugenie' and we
have to able to distinguish that
"""
is_normal = re.match(r'^(?:\w+\s?)+$', name)
is_single = re.match(r'^(?:\w+)$', name)
if is_normal:
return {'regex': is_normal, 'match': 'normal'}
else:
return {'regex': is_single, 'match': 'single'}
def split_name(self, name):
"""Create an array with a single name by splitting it.
Result
------
`<NAME>` becomes `[Eugénie, Bouchard]`.
"""
# We have to assert through a regex
# that we are getting a classic pattern:
# 'eugenie bouchard' as opposed to 'eugenie'
check = self.check_name_structure(name)
if check['match'] == 'single':
# If we do not have a match,
# it means that the name is a single
# element and need to return as is
return name
return name.split(' ')
@classmethod
def split_multiple_names(cls, names:list):
"""Split multiple names into arrays
"""
for name in names:
yield cls.split_name(name)
@staticmethod
def normalize_name(name):
"""A helper function that normalizes a name to lowercase
and strips any whitespaces
Example
-------
"<NAME> " becomes "eugenie bouchard"
"""
return name.lower().strip()
@classmethod
def normalize_names(cls, names:list):
for index, name in enumerate(names):
# TODO: Cases where the array contains
# two names - Build something
# [<NAME>, ...]
names[index][0] = cls.normalize_name(name[0])
return names
@classmethod
def flatten_name(cls, name):
"""Replace all accents from a name and
normalize it.
Example
------
"<NAME>" or "<NAME>\\s?"
becomes `eugenie bouchard`.
NOTE - This method will also normalize the name
"""
new_name=''
accents = {
'é': 'e',
'è': 'e',
'ê': 'e',
'ë': 'e',
'ï': 'i',
'î': 'i',
'ü': 'u',
'ù': 'u',
'à': 'a',
}
for letter in name:
for key, value in accents.items():
if letter == key:
letter = value
new_name += letter
return cls.normalize_name(new_name)
@classmethod
def reverse(cls, name):
"""Reverse an array with names.
Example
-------
[Eugenie, Bouchard] to [Bouchard, Eugenie]
"""
return list(reversed(cls.split_name(name)))
def decompose(self, name, change_position=False):
"""Structures composed names into two unique names
Example
-------
"<NAME>" becomes "Eugenie" "<NAME>"
[Eugenie, Pauline Bouchard] or [Eugenie Pauline, Bouchard]
Parameters
----------
change_position - changes the direction in which the composed name
should appear. The default position is on the left.
"""
# [Eugenie, Pauline, Bouchard]
splitted_name = self.split_name(name)
# Test if list = 3
if len(splitted_name) != 3:
print(Info('Cannot perform operation. Your name seems to be a '
'non composed name: %s') % name)
return None
# Pop middle name
middle_name = splitted_name.pop(1)
# Create composed name by joining parts
if change_position:
# .. Eugenie and Pauline
composed_name = ' '.join([splitted_name[0], middle_name])
splitted_name[0] = composed_name
else:
# .. <NAME>
composed_name = ' '.join([middle_name, splitted_name[1]])
# ..
splitted_name[1] = composed_name
# [Eugenie, <NAME>] or
# [<NAME>, Bouchard]
return splitted_name
|
[
"re.match",
"ze_mailer.app.core.messages.Info"
] |
[((454, 487), 're.match', 're.match', (['"""^(?:\\\\w+\\\\s?)+$"""', 'name'], {}), "('^(?:\\\\w+\\\\s?)+$', name)\n", (462, 487), False, 'import re\n'), ((507, 535), 're.match', 're.match', (['"""^(?:\\\\w+)$"""', 'name'], {}), "('^(?:\\\\w+)$', name)\n", (515, 535), False, 'import re\n'), ((3764, 3843), 'ze_mailer.app.core.messages.Info', 'Info', (['"""Cannot perform operation. Your name seems to be a non composed name: %s"""'], {}), "('Cannot perform operation. Your name seems to be a non composed name: %s')\n", (3768, 3843), False, 'from ze_mailer.app.core.messages import Info\n')]
|
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from gppylib.commands.base import Command
from tinctest import logger
from mpp.lib.PSQL import PSQL
from mpp.models import MPPTestCase
from mpp.gpdb.tests.storage.walrepl import lib as walrepl
from mpp.gpdb.tests.storage.walrepl.lib.walcomm import *
from mpp.gpdb.tests.storage.walrepl.lib import PgControlData
import subprocess
class simple(MPPTestCase):
def setUp(self):
Command('gpinitstandby -ra', 'gpinitstandby -ra').run()
def tearDown(self):
pass
def test_identify_system(self):
"""
Check if the system identifier matches.
@tags sanity
"""
with WalClient("replication=true") as client:
(sysid, tli, xpos) = client.identify_system()
datadir = os.environ['MASTER_DATA_DIRECTORY']
controldata = PgControlData(datadir)
assert sysid == controldata.get('Database system identifier')
def test_streaming(self):
"""
Run sendtest, let database emit WAL.
sendtest should receive a new WAL records. After all, we kill
the walsender process, otherwise the test doesn't finish.
@tags sanity
"""
PSQL.run_sql_command('DROP TABLE IF EXISTS foo')
with WalClient("replication=true") as client:
(sysid, tli, xpos) = client.identify_system()
xpos_ptr = XLogRecPtr.from_string(xpos)
client.start_replication(xpos_ptr)
# Can't use PSQL here as it is blocked due to Sync Rep
subprocess.Popen(['psql', '-c', 'CREATE TABLE foo(a int, b int)'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
while True:
msg = client.receive(1000)
if isinstance(msg, WalMessageData):
header = msg.header
# sync replication needs a reply otherwise backend blocks
client.reply(header.walEnd, header.walEnd, header.walEnd)
# success, should get some 'w' message
break
elif isinstance(msg, WalMessageNoData):
# could be timeout
client.reply(xpos_ptr, xpos_ptr, xpos_ptr)
else:
raise StandardError(msg.errmsg)
def test_async(self):
"""
Run sendtest in async mode.
@tags sanity
"""
PSQL.run_sql_command('DROP TABLE IF EXISTS foo')
with WalClient("replication=true") as client:
self.assertEqual(client.status(), CONNECTION_OK)
(sysid, tli, xpos) = client.identify_system()
xpos_ptr = XLogRecPtr.from_string(xpos)
client.start_replication(xpos_ptr, sync=False)
# wouldn't block since it's async connection.
PSQL.run_sql_command('CREATE TABLE foo(a int, b int)')
# it may take time to get complete message
for i in walrepl.polling(10, 0.5):
msg = client.receive(1000)
if isinstance(msg, WalMessageData):
break
self.assertIsInstance(msg, WalMessageData)
def test_invalid_command_new(self):
"""
Check syntax error.
@tags sanity
@product_version gpdb: [4.3.6.2-4.3.9.0]
"""
with WalClient("replication=true") as client:
# connection established?
self.assertEqual(client.status(), CONNECTION_OK)
# send bad message
res = client.execute("IDENTIFY_SYSTEM FOO")
# check if server responded syntax error
self.assertEqual(res.status(), PGRES_FATAL_ERROR)
self.assertIn("syntax error", client.error_message())
self.assertEqual(client.status(), CONNECTION_OK)
with WalClient("replication=true") as client:
self.assertEquals(client.status(), CONNECTION_OK)
# send bad message
res = client.execute("START_REPLICATION")
# check if server responded syntax error
self.assertEqual(res.status(), PGRES_FATAL_ERROR)
self.assertIn("syntax error", client.error_message())
self.assertEqual(client.status(), CONNECTION_OK)
|
[
"subprocess.Popen",
"mpp.lib.PSQL.PSQL.run_sql_command",
"mpp.gpdb.tests.storage.walrepl.lib.polling",
"mpp.gpdb.tests.storage.walrepl.lib.PgControlData",
"gppylib.commands.base.Command"
] |
[((1853, 1901), 'mpp.lib.PSQL.PSQL.run_sql_command', 'PSQL.run_sql_command', (['"""DROP TABLE IF EXISTS foo"""'], {}), "('DROP TABLE IF EXISTS foo')\n", (1873, 1901), False, 'from mpp.lib.PSQL import PSQL\n'), ((3089, 3137), 'mpp.lib.PSQL.PSQL.run_sql_command', 'PSQL.run_sql_command', (['"""DROP TABLE IF EXISTS foo"""'], {}), "('DROP TABLE IF EXISTS foo')\n", (3109, 3137), False, 'from mpp.lib.PSQL import PSQL\n'), ((1487, 1509), 'mpp.gpdb.tests.storage.walrepl.lib.PgControlData', 'PgControlData', (['datadir'], {}), '(datadir)\n', (1500, 1509), False, 'from mpp.gpdb.tests.storage.walrepl.lib import PgControlData\n'), ((2194, 2313), 'subprocess.Popen', 'subprocess.Popen', (["['psql', '-c', 'CREATE TABLE foo(a int, b int)']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['psql', '-c', 'CREATE TABLE foo(a int, b int)'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE)\n", (2210, 2313), False, 'import subprocess\n'), ((3494, 3548), 'mpp.lib.PSQL.PSQL.run_sql_command', 'PSQL.run_sql_command', (['"""CREATE TABLE foo(a int, b int)"""'], {}), "('CREATE TABLE foo(a int, b int)')\n", (3514, 3548), False, 'from mpp.lib.PSQL import PSQL\n'), ((3626, 3650), 'mpp.gpdb.tests.storage.walrepl.lib.polling', 'walrepl.polling', (['(10)', '(0.5)'], {}), '(10, 0.5)\n', (3641, 3650), True, 'from mpp.gpdb.tests.storage.walrepl import lib as walrepl\n'), ((1064, 1113), 'gppylib.commands.base.Command', 'Command', (['"""gpinitstandby -ra"""', '"""gpinitstandby -ra"""'], {}), "('gpinitstandby -ra', 'gpinitstandby -ra')\n", (1071, 1113), False, 'from gppylib.commands.base import Command\n')]
|
from django import template
from django.utils.safestring import mark_safe
from datetime import datetime
import re
from basic.blog.models import Post
import urllib2
register = template.Library()
@register.filter
def order_by(queryset, args):
'''
Orders a queryset
Arguments:
queryset
duh
args
a string of attributes by which to order, separated by commas
'''
args = [x.strip() for x in args.split(',')]
return queryset.order_by(*args)
@register.filter
def sizeToReadable(value):
'''
Takes a number of bits and reformats it to nice MB/GB/TB format. Returns '??' in a pinch.
Arguments:
value
Something that can be converted to float.
'''
try:
value = float(value)
except Exception: # we expect a number, after all.
#or something that can be turned into a number.
return "??"
count = 0
while value > 1024:
value = value/1024
count += 1
if count == 1:
appender = "KiB"
elif count == 2:
appender = "MiB"
elif count == 3:
appender = "GiB"
elif count == 4:
appender = "TiB"
else:
appender = "B"
niceNum = "%.1f" % value # 1 decimal place for table formatting reasons
return niceNum + " " + appender
@register.filter
def dateToReadable(value):
'''
converts that ugly fucking date to month/day format; expects a datetime object as an arg
'''
try:
return value.strftime("%m/%d")
except Exception:
return "??"
@register.filter
def highlight(object,words, autoescape=None):
'''bolds the search query in the files found in the search'''
try:
value = object.filename
except AttributeError:
value = object.fullname
except:
return object
if len(value) > 105:
value = value[:35] + "..." + value[-35:]
# REGULAR EXPRESSION MATCHING FOR HIGHLIGHTING SHIT
for word in words:
regexps = re.compile("("+word+")",flags=re.IGNORECASE) # generate a regexp WITH wrapping parens
explosion = re.split(regexps,value) # the parens make this keep the match in the split list
expl2 = []
for piece in explosion: # oh gods please work
if re.match(regexps,piece):
piece = "<strong>"+piece+"</strong>" # this doesn't change the piece inside of explosion...
expl2 += [piece] # so fuck it. we'll just populate a new list!
value = ''.join(expl2)
return mark_safe(value)
highlight.is_safe = True
@register.filter
def makeLink(object, directServe=False):
'''
Intelligently returns the link to a file or path, either smb or http, based on the parameter directServe. If True, creates a direct link. If false, creates an smb link ref.
'''
# if isinstance(object, File)
try:
if object.path.hid.servesDirect == True and directServe != False:
return "http://%s:%d%s/%s" % (object.path.hid,object.path.hid.directPort, urllib2.quote(str(object.path)),urllib2.quote(str(object)))
else:
return "smb://%s%s" % (object.path.hid, object.path)
except AttributeError: # no object.path -> object is a path
try:
if object.hid.servesDirect == True and directServe != False:
#do nothing
return "smb://%s%s" % (object.hid, object)
else:
return "smb://%s%s" % (object.hid, object)
except AttributeError:
try:
# no HID - shouldn't exist anymore anyway.
object.delete()
except:
pass
return "??"
except:
return "??"
@register.filter
def sanitize(file):
''' Returns a UnicodeEncodeError safe string. Fuck these template errors.'''
try:
string = file.filename.encode('ascii','replace')
except:
string = "??"
return string
@register.filter
def host(object):
'''
Returns the :model:`browseNet.Host` of a :model:`browseNet.Path` or :model:`search.File`, or '??' if it can't for any reason.
'''
try:
return object.hid
except AttributeError:
try:
return object.path.hid
except:
return '??'
except:
return '??'
@register.filter
def size(object):
'''
returns the size of the :model:`browseNet.Path` or :model:`search.File`
'''
try:
return object.filesize
except AttributeError:
return object.pathsize
except:
return '??'
@register.filter
def status(object):
''' prints good/bad/unclear image for file'''
try:
object.pathsize
return ''
except AttributeError:
img = "<img class='statusind' style=\"float:right;margin-right:15px;\" src='/media/images/%(img)s' alt='%(alt)s'/>"
if object.goodfile == 1:
value = img % {'img':'goodfile.gif','alt':'File is good!'}
elif object.goodfile == 0:
value = img % {'img':'badfile.gif','alt':'File is bad.'}
else:
value = img % {'img':'goodfile.gif','alt':'File is contested.'}
return mark_safe(value)
except:
return ''
status.is_safe=True
################################################################
# Handler for {% logo %} tag
#
from django.core.urlresolvers import reverse
import random
class LogoNode(template.Node):
mChoices = ['Mmath'] * 15 # make it more likely to see the old M's
mChoices += ['Moldenglish',
'Mvivaldi',
'Mcurlz',
'Mmagneto',
'M_andrew_ho',
'Mcompass',
'Mdoctor',]
arrows = ['Arrowmath',] * 10 # also the old arrow
styling = "<a class='logolink' href=\"%s\">\
<div id='modlogo' style=\"\">%s</div>\
</a>"
extras = {
'movies':styling % (reverse('advancedsearch.views.movieSplash'),'ovies'),
'music': styling % (reverse('advancedsearch.views.musicSplash'),'usic'),
'shows': styling % (reverse('advancedsearch.views.showSplash'),'TV'),
'None':"",
}
def __init__(self, module):
self.left = random.choice(self.mChoices)
self.right = random.choice(self.mChoices)
self.arrow = random.choice(self.arrows)
# halloween!
if (datetime.now().day in [29,30,31] and datetime.now().month==10):
self.left = 'mPunkin'
self.right = 'mHalCat'
self.arrow = 'Arrowmath'
# jessi peck's birthday
if (datetime.now().day == 25 and datetime.now().month == 10):
self.left = 'M_jessi_peck'
if module not in self.extras:
raise ValueError("logo tag could not recognize module: %r" % module)
else:
self.extra = self.extras[module]
def render(self, context):
try:
return "\
<a href=\"%(index)s\"><span>\
<img id='leftlogo' src='/media/images/%(left)s.png'/>\
<img id='arrowlogo' src='/media/images/%(arrow)s.png'/>\
<img id='rightlogo' src='/media/images/%(right)s.png'/></span></a>%(extra)s" % {
'left':self.left,
'right':self.right,
'arrow':self.arrow,
'extra':self.extra,
'index':reverse('search.views.index')
}
except:
return '<span style="font-size:6em;">Logo Unavailable</span>'
@register.tag(name="logo")
def do_logo(parser,token):
try:
tag_name, module = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError ("%r tag requires an argument" % token.contents.split()[0])
return LogoNode(module)
#
################################################################
@register.tag(name="extra_styles")
def do_extra_styles(parser,token):
return ExtraStyles()
class ExtraStyles(template.Node):
def __init__(self):
# halloween!
self.stylesheet = []
if (datetime.now().day in [29,30,31] and datetime.now().month == 10):
self.stylesheet += ['halloween']
def render(self, context):
if self.stylesheet == []:
return ''
else:
return '\n'.join(["<link rel=\"stylesheet\" type=\"text/css\" href=\"/media/styles/{}.css\" />".format(x) for x in self.stylesheet])
from datetime import date, timedelta
class NewNewsNode(template.Node):
string = "<div id=\"newNews\"><p>+{:d}</p></div>"
def __init__(self, *args, **kwargs):
self.number = Post.objects.filter(publish__gt=date.today()-timedelta(days=4)).count()
def render(self, context):
try:
if self.number > 0:
return self.string.format(self.number)
else:
return ""
except:
return ""
@register.tag(name="latestnews")
def do_newNews(parser,token):
try:
tag_name = token.split_contents()
except:
return ""
return NewNewsNode()
|
[
"django.template.Library",
"re.split",
"django.core.urlresolvers.reverse",
"random.choice",
"django.utils.safestring.mark_safe",
"re.match",
"datetime.date.today",
"datetime.timedelta",
"datetime.datetime.now",
"re.compile"
] |
[((179, 197), 'django.template.Library', 'template.Library', ([], {}), '()\n', (195, 197), False, 'from django import template\n'), ((2630, 2646), 'django.utils.safestring.mark_safe', 'mark_safe', (['value'], {}), '(value)\n', (2639, 2646), False, 'from django.utils.safestring import mark_safe\n'), ((2064, 2113), 're.compile', 're.compile', (["('(' + word + ')')"], {'flags': 're.IGNORECASE'}), "('(' + word + ')', flags=re.IGNORECASE)\n", (2074, 2113), False, 'import re\n'), ((2170, 2194), 're.split', 're.split', (['regexps', 'value'], {}), '(regexps, value)\n', (2178, 2194), False, 'import re\n'), ((6379, 6407), 'random.choice', 'random.choice', (['self.mChoices'], {}), '(self.mChoices)\n', (6392, 6407), False, 'import random\n'), ((6429, 6457), 'random.choice', 'random.choice', (['self.mChoices'], {}), '(self.mChoices)\n', (6442, 6457), False, 'import random\n'), ((6479, 6505), 'random.choice', 'random.choice', (['self.arrows'], {}), '(self.arrows)\n', (6492, 6505), False, 'import random\n'), ((2357, 2381), 're.match', 're.match', (['regexps', 'piece'], {}), '(regexps, piece)\n', (2365, 2381), False, 'import re\n'), ((5297, 5313), 'django.utils.safestring.mark_safe', 'mark_safe', (['value'], {}), '(value)\n', (5306, 5313), False, 'from django.utils.safestring import mark_safe\n'), ((6070, 6113), 'django.core.urlresolvers.reverse', 'reverse', (['"""advancedsearch.views.movieSplash"""'], {}), "('advancedsearch.views.movieSplash')\n", (6077, 6113), False, 'from django.core.urlresolvers import reverse\n'), ((6152, 6195), 'django.core.urlresolvers.reverse', 'reverse', (['"""advancedsearch.views.musicSplash"""'], {}), "('advancedsearch.views.musicSplash')\n", (6159, 6195), False, 'from django.core.urlresolvers import reverse\n'), ((6233, 6275), 'django.core.urlresolvers.reverse', 'reverse', (['"""advancedsearch.views.showSplash"""'], {}), "('advancedsearch.views.showSplash')\n", (6240, 6275), False, 'from django.core.urlresolvers import reverse\n'), ((6540, 6554), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6552, 6554), False, 'from datetime import datetime\n'), ((6577, 6591), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6589, 6591), False, 'from datetime import datetime\n'), ((6763, 6777), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6775, 6777), False, 'from datetime import datetime\n'), ((6792, 6806), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6804, 6806), False, 'from datetime import datetime\n'), ((7933, 7962), 'django.core.urlresolvers.reverse', 'reverse', (['"""search.views.index"""'], {}), "('search.views.index')\n", (7940, 7962), False, 'from django.core.urlresolvers import reverse\n'), ((8707, 8721), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (8719, 8721), False, 'from datetime import datetime\n'), ((8744, 8758), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (8756, 8758), False, 'from datetime import datetime\n'), ((9297, 9309), 'datetime.date.today', 'date.today', ([], {}), '()\n', (9307, 9309), False, 'from datetime import date, timedelta\n'), ((9310, 9327), 'datetime.timedelta', 'timedelta', ([], {'days': '(4)'}), '(days=4)\n', (9319, 9327), False, 'from datetime import date, timedelta\n')]
|
import re
import cattr
from django.utils import timezone
from django.utils.http import parse_http_date
from httpsig import HeaderVerifier
from rest_framework.permissions import BasePermission
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import exceptions, authentication
from sqlalchemy import select, and_
from variants.helpers import get_engine
from variants.models import Case, SmallVariant
from .models import Site
from .models_api import (
BeaconInfo,
Dataset,
Organisation,
BeaconAlleleRequest,
API_VERSION,
BeaconAlleleResponse,
)
def _header_canonical(header_name):
"""Translate HTTP headers to Django header names."""
# Translate as stated in the docs:
# https://docs.djangoproject.com/en/1.6/ref/request-response/#django.http.HttpRequest.META
header_name = header_name.lower()
if header_name == "content-type":
return "CONTENT-TYPE"
elif header_name == "content-length":
return "CONTENT-LENGTH"
return "HTTP_%s" % header_name.replace("-", "_").upper()
class _SignedSiteAuthentication(authentication.BaseAuthentication):
"""Based on the code of ``django-rest-framework-httpsignature``."""
SIGNATURE_RE = re.compile('signature="(.+?)"')
HEADERS_RE = re.compile('headers="([\(\)\sa-z0-9-]+?)"')
KEYID_RE = re.compile('keyId="([\(\)\sa-z0-9-]+?)"')
ALGORITHM_RE = re.compile('algorithm="([\(\)\sa-z0-9-]+?)"')
def get_signature_from_signature_string(self, signature):
"""Return the signature from the signature header or None."""
match = self.SIGNATURE_RE.search(signature)
if not match:
return None
return match.group(1)
def get_headers_from_signature(self, signature):
"""Returns a list of headers fields to sign.
According to http://tools.ietf.org/html/draft-cavage-http-signatures-03
section 2.1.3, the headers are optional. If not specified, the single
value of "Date" must be used.
"""
match = self.HEADERS_RE.search(signature)
if not match:
return ["date", "x-beacon-user"]
headers_string = match.group(1)
return headers_string.split()
def get_keyid_from_signature(self, signature):
"""Returns the keyId field."""
match = self.KEYID_RE.search(signature)
if not match:
return None
return match.group(1)
def get_algorithm_from_signature(self, signature):
"""Returns the algorithm field."""
match = self.ALGORITHM_RE.search(signature)
if not match:
return None
return match.group(1)
def build_dict_to_verify(self, request, signature_headers):
"""Build a dict with headers and values used in the signature.
"signature_headers" is a list of lowercase header names.
"""
d = {}
for header in signature_headers:
if header == "(request-target)":
continue
d[header] = request.META.get(_header_canonical(header))
return d
def is_signature_valid(self, site, request):
"""Return whether the request signature is valid for ``site``."""
secret = site.private_key if site.is_key_algo_symmetric() else site.public_key
x = _header_canonical("Authorization")
sent_signature = request.META.get(x)
signature_headers = self.get_headers_from_signature(sent_signature)
if len({"date", "x-beacon-user"} & set(signature_headers)) != 2:
raise exceptions.AuthenticationFailed("Headers Date and X-Beacon-User must be signed")
if "authorization" not in signature_headers:
signature_headers += ["authorization"]
headers_to_sign = self.build_dict_to_verify(request, signature_headers)
# Sign string and compare.
verifier = HeaderVerifier(headers=headers_to_sign, secret=secret)
return verifier.verify()
def authenticate(self, request):
# Check if request has a "Signature" request header.
authorization_header = _header_canonical("Authorization")
sent_string = request.META.get(authorization_header)
if not sent_string:
raise exceptions.AuthenticationFailed("No signature provided")
key_id = self.get_keyid_from_signature(sent_string)
# Fetch site for API key from the data store.
try:
site = Site.objects.get(identifier=key_id, state=Site.ENABLED)
except TypeError:
raise exceptions.AuthenticationFailed("Bad site")
if not self.is_signature_valid(site, request):
raise exceptions.AuthenticationFailed("Bad signature")
return site, key_id
class _SiteBeaconPermission(BasePermission):
def has_permission(self, request, view):
return request.user is not None
class _RequestAgeAcceptable(BasePermission):
def has_permission(self, request, view):
site = request.user
header_ts = parse_http_date(request.META.get(_header_canonical("Date")))
now_ds = timezone.now().timestamp()
clock_skew = header_ts - now_ds
if abs(clock_skew) > site.max_clock_skew:
raise exceptions.PermissionDenied("Clock skeq detected (%d seconds)" % clock_skew)
return True
class BeaconInfoApiView(APIView):
"""Implementation of the GA4GH info endpoint."""
authentication_classes = (_SignedSiteAuthentication,)
permission_classes = (_SiteBeaconPermission, _RequestAgeAcceptable)
http_method_names = ("get",)
def get(self, request, *args, **kwargs):
return self._handle(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self._handle(request, *args, **kwargs)
def _handle(self, request, *_args, **_kwargs):
"""Handle the GA4GH Beacon query.
NB: the remote ``Site`` object is stored in request.user. The permission logic ensures that only active
sites can query.
"""
remote_site = request.user
local_site = Site.objects.get(role=Site.LOCAL)
if local_site.state != Site.ENABLED:
return (
Response({"detail": "The site is not enabled!"}, status=400, reason="invalid site"),
)
if local_site.state != Site.ENABLED:
return Response(
{"detail": "The site is not enabled!"}, status=400, reason="invalid site"
)
datasets = [
Dataset(
id=str(p.sodar_uuid), name=p.title, assembly="GRCh37", description=p.description,
)
for p in remote_site.get_all_projects()
]
result = BeaconInfo(
id=local_site.identifier,
name=local_site.title,
apiVersion=API_VERSION,
organisation=Organisation(
id=str(local_site.sodar_uuid),
name=local_site.title,
description=local_site.description,
),
datasets=tuple(datasets),
)
return Response(cattr.unstructure(result))
class BeaconQueryApiView(APIView):
"""Implementation of the GA4GH query endpoint."""
authentication_classes = (_SignedSiteAuthentication,)
permission_classes = (_SiteBeaconPermission, _RequestAgeAcceptable)
http_method_names = ("get", "post")
def get(self, request, *args, **kwargs):
return self._handle(request.GET, request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self._handle(request.POST, request, *args, **kwargs)
def _handle(self, params, request, *_args, **_kwargs):
"""Handle the GA4GH Beacon query.
NB: the remote ``Site`` object is stored in request.user. The permission logic ensures that only active
sites can query.
"""
allele_req = cattr.structure(dict(params.items()), BeaconAlleleRequest)
remote_site = request.user
# TODO: perform one large query only
project_pks = [p.pk for p in remote_site.get_all_projects()]
result = (
select(
[SmallVariant.sa.num_hom_alt, SmallVariant.sa.num_het, SmallVariant.sa.num_hemi_alt]
)
.select_from(SmallVariant.sa)
.where(
and_(
SmallVariant.sa.case_id.in_(
select([Case.sa.id])
.select_from(Case.sa)
.where(Case.sa.project_id.in_(project_pks))
),
SmallVariant.sa.release == allele_req.assemblyId,
SmallVariant.sa.chromosome == allele_req.referenceName,
SmallVariant.sa.start == allele_req.start,
SmallVariant.sa.reference == allele_req.referenceBases,
SmallVariant.sa.alternative == allele_req.alternateBases,
)
)
)
sum_hom_alt = 0
sum_het_alt = 0
sum_hemi_alt = 0
for row in get_engine().execute(result):
sum_hom_alt += row.num_hom_alt
sum_het_alt += row.num_het
sum_hemi_alt += row.num_hemi_alt
total_alleles = sum_hom_alt * 2 + sum_het_alt + sum_hemi_alt
site = Site.objects.get(role=Site.LOCAL)
if site.state != Site.ENABLED:
return (
Response({"detail": "The site is not enabled!"}, status=400, reason="invalid site"),
)
result = BeaconAlleleResponse(
beaconId=site.identifier,
apiVersion=API_VERSION,
exists=(total_alleles > 0),
alleleRequest=allele_req,
)
return Response(cattr.unstructure(result))
|
[
"rest_framework.exceptions.AuthenticationFailed",
"django.utils.timezone.now",
"sqlalchemy.select",
"cattr.unstructure",
"rest_framework.exceptions.PermissionDenied",
"variants.models.Case.sa.project_id.in_",
"rest_framework.response.Response",
"httpsig.HeaderVerifier",
"variants.helpers.get_engine",
"re.compile"
] |
[((1257, 1288), 're.compile', 're.compile', (['"""signature="(.+?)\\""""'], {}), '(\'signature="(.+?)"\')\n', (1267, 1288), False, 'import re\n'), ((1306, 1352), 're.compile', 're.compile', (['"""headers="([\\\\(\\\\)\\\\sa-z0-9-]+?)\\""""'], {}), '(\'headers="([\\\\(\\\\)\\\\sa-z0-9-]+?)"\')\n', (1316, 1352), False, 'import re\n'), ((1365, 1409), 're.compile', 're.compile', (['"""keyId="([\\\\(\\\\)\\\\sa-z0-9-]+?)\\""""'], {}), '(\'keyId="([\\\\(\\\\)\\\\sa-z0-9-]+?)"\')\n', (1375, 1409), False, 'import re\n'), ((1426, 1474), 're.compile', 're.compile', (['"""algorithm="([\\\\(\\\\)\\\\sa-z0-9-]+?)\\""""'], {}), '(\'algorithm="([\\\\(\\\\)\\\\sa-z0-9-]+?)"\')\n', (1436, 1474), False, 'import re\n'), ((3901, 3955), 'httpsig.HeaderVerifier', 'HeaderVerifier', ([], {'headers': 'headers_to_sign', 'secret': 'secret'}), '(headers=headers_to_sign, secret=secret)\n', (3915, 3955), False, 'from httpsig import HeaderVerifier\n'), ((3581, 3666), 'rest_framework.exceptions.AuthenticationFailed', 'exceptions.AuthenticationFailed', (['"""Headers Date and X-Beacon-User must be signed"""'], {}), "('Headers Date and X-Beacon-User must be signed'\n )\n", (3612, 3666), False, 'from rest_framework import exceptions, authentication\n'), ((4261, 4317), 'rest_framework.exceptions.AuthenticationFailed', 'exceptions.AuthenticationFailed', (['"""No signature provided"""'], {}), "('No signature provided')\n", (4292, 4317), False, 'from rest_framework import exceptions, authentication\n'), ((4683, 4731), 'rest_framework.exceptions.AuthenticationFailed', 'exceptions.AuthenticationFailed', (['"""Bad signature"""'], {}), "('Bad signature')\n", (4714, 4731), False, 'from rest_framework import exceptions, authentication\n'), ((5245, 5321), 'rest_framework.exceptions.PermissionDenied', 'exceptions.PermissionDenied', (["('Clock skeq detected (%d seconds)' % clock_skew)"], {}), "('Clock skeq detected (%d seconds)' % clock_skew)\n", (5272, 5321), False, 'from rest_framework import exceptions, authentication\n'), ((6377, 6465), 'rest_framework.response.Response', 'Response', (["{'detail': 'The site is not enabled!'}"], {'status': '(400)', 'reason': '"""invalid site"""'}), "({'detail': 'The site is not enabled!'}, status=400, reason=\n 'invalid site')\n", (6385, 6465), False, 'from rest_framework.response import Response\n'), ((7109, 7134), 'cattr.unstructure', 'cattr.unstructure', (['result'], {}), '(result)\n', (7126, 7134), False, 'import cattr\n'), ((9751, 9776), 'cattr.unstructure', 'cattr.unstructure', (['result'], {}), '(result)\n', (9768, 9776), False, 'import cattr\n'), ((4565, 4608), 'rest_framework.exceptions.AuthenticationFailed', 'exceptions.AuthenticationFailed', (['"""Bad site"""'], {}), "('Bad site')\n", (4596, 4608), False, 'from rest_framework import exceptions, authentication\n'), ((5110, 5124), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (5122, 5124), False, 'from django.utils import timezone\n'), ((6213, 6301), 'rest_framework.response.Response', 'Response', (["{'detail': 'The site is not enabled!'}"], {'status': '(400)', 'reason': '"""invalid site"""'}), "({'detail': 'The site is not enabled!'}, status=400, reason=\n 'invalid site')\n", (6221, 6301), False, 'from rest_framework.response import Response\n'), ((9074, 9086), 'variants.helpers.get_engine', 'get_engine', ([], {}), '()\n', (9084, 9086), False, 'from variants.helpers import get_engine\n'), ((9426, 9514), 'rest_framework.response.Response', 'Response', (["{'detail': 'The site is not enabled!'}"], {'status': '(400)', 'reason': '"""invalid site"""'}), "({'detail': 'The site is not enabled!'}, status=400, reason=\n 'invalid site')\n", (9434, 9514), False, 'from rest_framework.response import Response\n'), ((8139, 8236), 'sqlalchemy.select', 'select', (['[SmallVariant.sa.num_hom_alt, SmallVariant.sa.num_het, SmallVariant.sa.\n num_hemi_alt]'], {}), '([SmallVariant.sa.num_hom_alt, SmallVariant.sa.num_het, SmallVariant.\n sa.num_hemi_alt])\n', (8145, 8236), False, 'from sqlalchemy import select, and_\n'), ((8517, 8552), 'variants.models.Case.sa.project_id.in_', 'Case.sa.project_id.in_', (['project_pks'], {}), '(project_pks)\n', (8539, 8552), False, 'from variants.models import Case, SmallVariant\n'), ((8419, 8439), 'sqlalchemy.select', 'select', (['[Case.sa.id]'], {}), '([Case.sa.id])\n', (8425, 8439), False, 'from sqlalchemy import select, and_\n')]
|
##############################################################################
#
# Library: TubeTK
#
# Copyright 2010 Kitware Inc. 28 Corporate Drive,
# Clifton Park, NY, 12065, USA.
#
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
#!/usr/bin/env python
import Image
import cv2
import cv2.cv as cv
import numpy as np
def cv2array(im):
'''
Copied from http://opencv.willowgarage.com/wiki/PythonInterface
'''
depth2dtype = {
cv.IPL_DEPTH_8U: 'uint8',
cv.IPL_DEPTH_8S: 'int8',
cv.IPL_DEPTH_16U: 'uint16',
cv.IPL_DEPTH_16S: 'int16',
cv.IPL_DEPTH_32S: 'int32',
cv.IPL_DEPTH_32F: 'float32',
cv.IPL_DEPTH_64F: 'float64',
}
arrdtype=im.depth
a = np.fromstring(
im.tostring(),
dtype=depth2dtype[im.depth],
count=im.width*im.height*im.nChannels)
if im.nChannels is not 1:
a.shape = (im.height,im.width,im.nChannels)
else:
a.shape = (im.height,im.width)
return a
def array2cv(a):
'''
Copied from http://opencv.willowgarage.com/wiki/PythonInterface
'''
dtype2depth = {
'bool': cv.IPL_DEPTH_8U,
'uint8': cv.IPL_DEPTH_8U,
'int8': cv.IPL_DEPTH_8S,
'uint16': cv.IPL_DEPTH_16U,
'int16': cv.IPL_DEPTH_16S,
'int32': cv.IPL_DEPTH_32S,
'float32': cv.IPL_DEPTH_32F,
'float64': cv.IPL_DEPTH_64F,
}
try:
nChannels = a.shape[2]
except:
nChannels = 1
cv_im = cv.CreateImageHeader((a.shape[1],a.shape[0]),
dtype2depth[str(a.dtype)],
nChannels)
cv.SetData(cv_im, a.tostring(),
a.dtype.itemsize*nChannels*a.shape[1])
return cv_im
def cv2Image(cv_im):
pi = Image.fromstring( "L", cv.GetSize(cv_im), cv_im.tostring() )
return pi
def Image2cv(pi):
cv_im = cv.CreateImageHeader( pi.size, cv.IPL_DEPTH_8U, 1 )
cv.SetData( cv_im, pi.tostring() )
return cv_im
def gaussianFilter( inputImage, filterSize ):
"""
Apply Gaussian filter of OpenCV to a given array
"""
# Convert from numpy array to CvMat
outputImage = cv.CreateImage( cv.GetSize( inputImage ), cv.IPL_DEPTH_32F, 1 )
cv.Smooth( inputImage, outputImage, cv.CV_GAUSSIAN, filterSize, filterSize )
return outputImage
def adaptiveThresholding( inputImage, neighborhoodWidth=71, offsetFromMean=15 ):
"""
Apply adaptive thresholding to a given image. Uses a
neighborhoodWidth x neighborhoodWidth kernel. Threshold is set at
mean intensity within kernel + offsetFromMean.
"""
outputImage = cv.CreateImage( cv.GetSize( inputImage ), cv.IPL_DEPTH_8U, 1 )
cv.AdaptiveThreshold( inputImage, outputImage, 255, cv.CV_THRESH_BINARY,
cv.CV_ADAPTIVE_THRESH_MEAN_C, neighborhoodWidth,
offsetFromMean )
return outputImage
|
[
"cv2.cv.CreateImageHeader",
"cv2.cv.Smooth",
"cv2.cv.GetSize",
"cv2.cv.AdaptiveThreshold"
] |
[((2401, 2450), 'cv2.cv.CreateImageHeader', 'cv.CreateImageHeader', (['pi.size', 'cv.IPL_DEPTH_8U', '(1)'], {}), '(pi.size, cv.IPL_DEPTH_8U, 1)\n', (2421, 2450), True, 'import cv2.cv as cv\n'), ((2754, 2828), 'cv2.cv.Smooth', 'cv.Smooth', (['inputImage', 'outputImage', 'cv.CV_GAUSSIAN', 'filterSize', 'filterSize'], {}), '(inputImage, outputImage, cv.CV_GAUSSIAN, filterSize, filterSize)\n', (2763, 2828), True, 'import cv2.cv as cv\n'), ((3235, 3376), 'cv2.cv.AdaptiveThreshold', 'cv.AdaptiveThreshold', (['inputImage', 'outputImage', '(255)', 'cv.CV_THRESH_BINARY', 'cv.CV_ADAPTIVE_THRESH_MEAN_C', 'neighborhoodWidth', 'offsetFromMean'], {}), '(inputImage, outputImage, 255, cv.CV_THRESH_BINARY, cv.\n CV_ADAPTIVE_THRESH_MEAN_C, neighborhoodWidth, offsetFromMean)\n', (3255, 3376), True, 'import cv2.cv as cv\n'), ((2322, 2339), 'cv2.cv.GetSize', 'cv.GetSize', (['cv_im'], {}), '(cv_im)\n', (2332, 2339), True, 'import cv2.cv as cv\n'), ((2701, 2723), 'cv2.cv.GetSize', 'cv.GetSize', (['inputImage'], {}), '(inputImage)\n', (2711, 2723), True, 'import cv2.cv as cv\n'), ((3183, 3205), 'cv2.cv.GetSize', 'cv.GetSize', (['inputImage'], {}), '(inputImage)\n', (3193, 3205), True, 'import cv2.cv as cv\n')]
|
import logging.handlers
from pythonjsonlogger import jsonlogger
import datetime
import socket
class JsonFormatter(jsonlogger.JsonFormatter, object):
def __init__(self,
fmt="%(asctime) %(name) %(processName) %(filename) %(funcName) %(levelname) %(lineno) %(module) %(threadName) %(message)",
datefmt="%Y-%m-%dT%H:%M:%S%z",
extra={}, *args, **kwargs):
self._extra = extra
jsonlogger.JsonFormatter.__init__(self, fmt=fmt, datefmt=datefmt, *args, **kwargs)
def process_log_record(self, log_record):
# Enforce the presence of a timestamp
if "asctime" in log_record:
log_record["timestamp"] = log_record["asctime"]
else:
log_record["timestamp"] = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
if self._extra is not None:
for key, value in self._extra.items():
log_record[key] = value
return super(JsonFormatter, self).process_log_record(log_record)
# Derive from object to force a new-style class and thus allow super() to work
# on Python 2.6
class LogmaticHandler(logging.handlers.SocketHandler, object):
"""Python logging handler. Sends events over TCP.
:param host: The host of the logstash server.
:param port: The port of the logstash server (default 5959).
"""
def __init__(self, logmaticKey, host="api.logmatic.io", port=10514):
super(LogmaticHandler, self).__init__(host, port)
self.logmaticKey = logmaticKey
def makePickle(self, record):
return self.logmaticKey.encode() + " ".encode() + self.formatter.format(record).encode() + "\n".encode()
# Allow SyslogHandler to emit in Json with a prefix (for instance appname)
class SysLogJsonHandler(logging.handlers.SysLogHandler, object):
# Override constructor
def __init__(self, address=('localhost', logging.handlers.SYSLOG_UDP_PORT),
facility=logging.handlers.SysLogHandler.LOG_USER, socktype=None, prefix=""):
super(SysLogJsonHandler, self).__init__(address, facility, socktype)
self._prefix = prefix
if self._prefix != "":
self._prefix = prefix + ": "
# Override format method to handle prefix
def format(self, record):
return self._prefix + super(SysLogJsonHandler, self).format(record)
|
[
"pythonjsonlogger.jsonlogger.JsonFormatter.__init__",
"datetime.datetime.utcnow"
] |
[((445, 532), 'pythonjsonlogger.jsonlogger.JsonFormatter.__init__', 'jsonlogger.JsonFormatter.__init__', (['self', '*args'], {'fmt': 'fmt', 'datefmt': 'datefmt'}), '(self, *args, fmt=fmt, datefmt=datefmt, **\n kwargs)\n', (478, 532), False, 'from pythonjsonlogger import jsonlogger\n'), ((769, 795), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (793, 795), False, 'import datetime\n')]
|
"""Code snippets vol-49-snippet-243
Remove all empty folders in a directory.
Download all snippets so far:
https://wp.me/Pa5TU8-1yg
Blog: stevepython.wordpress.com
requirements:None
https://gist.github.com/brentvollebregt/04ea53f3761667b4ff39e6f2caf5a5d9
"""
import errno
import os
# Enter required folder to check.
folder_to_scan_and_delete = r'C:/temp/'
show_ignored = True
show_deleted = True
deleted = 0
for root, dirs, files in os.walk(folder_to_scan_and_delete, topdown=False):
for name in dirs:
direcotry = os.path.join(root, name)
try:
os.rmdir(direcotry)
except OSError as ex:
if ex.errno == errno.ENOTEMPTY:
if show_ignored:
print('[Ignored] : ' + direcotry)
else:
if show_deleted:
print('[Deleted] : ' + direcotry)
deleted += 1
print('Deleted: ' + str(deleted))
|
[
"os.rmdir",
"os.walk",
"os.path.join"
] |
[((443, 492), 'os.walk', 'os.walk', (['folder_to_scan_and_delete'], {'topdown': '(False)'}), '(folder_to_scan_and_delete, topdown=False)\n', (450, 492), False, 'import os\n'), ((536, 560), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (548, 560), False, 'import os\n'), ((586, 605), 'os.rmdir', 'os.rmdir', (['direcotry'], {}), '(direcotry)\n', (594, 605), False, 'import os\n')]
|
# Copyright (c) 2015 - 2021, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY LOG OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
'''
AppConf class for HPL reference (netlib) benchmark.
'''
import os
import sys
import math
import textwrap
from apps import apps
def setup_run_args(parser):
""" Add common arguments for all run scripts:
--frac-dram
"""
help_text = 'Ratio of the total node DRAM that should be used for the HPL ' + \
'matrix (assuming DP). Value should be between 0 and 1. ' + \
'Default is 0.7. 0.8-0.9 is a better value but might fail due to ' + \
'out-of-memory.'
parser.add_argument('--frac-dram', dest='frac_dram_per_node',
action='store', type=float, default=0.7,
help=help_text)
class HplNetlibAppConf(apps.AppConf):
@staticmethod
def name():
return 'hpl_netlib'
def __init__(self, num_nodes, mach, frac_dram_per_node, cores_per_node=None):
'''
num_nodes: Number of MPI ranks (1 node per rank) -- 2, 4, 8 or 16.
frac_dram_per_node: Ratio of the total node DRAM that should be used for the
HPL matrix (assuming DP).
80-90% is a good amount to maximize efficiency.
cores_per_node: Number of Xeon cores that each MPI process can offload to via OMP.
Total number of physical cores will be selected if this is set to None.
'''
dram_for_app = num_nodes * mach.total_node_memory_bytes() * frac_dram_per_node
if cores_per_node is None:
cores_per_node = mach.num_core()
benchmark_dir = os.path.dirname(os.path.abspath(__file__))
self.exec_path = os.path.join(benchmark_dir, 'hpl-2.3/bin/Linux_Intel64/xhpl')
self.NBs = 384 # This is the recommended size for Intel (R) Xeon (R) Scalable family.
process_grid_ratios = {
1: {'P': 1, 'Q': 1},
2: {'P': 1, 'Q': 2},
4: {'P': 2, 'Q': 2},
8: {'P': 2, 'Q': 4},
16: {'P': 4, 'Q': 4}
}
if num_nodes not in process_grid_ratios:
raise RuntimeError("Number of nodes {} is not defined for HPL.".format(num_nodes))
self.P = process_grid_ratios[num_nodes]['P']
self.Q = process_grid_ratios[num_nodes]['Q']
self.N = int(round(math.sqrt(dram_for_app / 8)))
self._cpu_per_rank = cores_per_node
sys.stdout.write('DRAM reserved for APP: {dram_for_app:0.2f}GB\n'.format(dram_for_app=dram_for_app/2**30))
sys.stdout.write('Cores for app: {}\n'.format(cores_per_node))
sys.stdout.write('N={}\n'.format(self.N))
def trial_setup(self, run_id, output_dir):
dat_file_path = os.path.join(output_dir + "/HPL.dat")
if not os.path.isfile(dat_file_path):
dat_file_text = textwrap.dedent('''\
HPLinpack benchmark input file
Innovative Computing Laboratory, University of Tennessee
HPL.out output file name (if any)
6 device out (6=stdout,7=stderr,file)
1 # of problems sizes (N)
{N} Ns
1 # of NBs
{NBs} NBs
0 PMAP process mapping (0=Row-,1=Column-major)
1 # of process grids (P x Q)
{P} Ps
{Q} Qs
16.0 threshold
1 # of panel fact
1 PFACTs (0=left, 1=Crout, 2=Right)1
1 # of recursive stopping criterium
4 NBMINs (>= 1)
1 # of panels in recursion
2 NDIVs
1 # of recursive panel fact.
1 RFACTs (0=left, 1=Crout, 2=Right)
1 # of broadcast
0 BCASTs (0=1rg,1=1rM,2=2rg,3=2rM,4=Lng,5=LnM)
1 # of lookahead depth
0 DEPTHs (>=0)
2 SWAP (0=bin-exch,1=long,2=mix)
64 swapping threshold
0 L1 in (0=transposed,1=no-transposed) form
0 U in (0=transposed,1=no-transposed) form
1 Equilibration (0=no,1=yes)
8 memory alignment in double (> 0)
'''.format(N=self.N, NBs=self.NBs, P=self.P, Q=self.Q))
with open(dat_file_path, "w") as dat_file:
dat_file.write(dat_file_text)
def get_rank_per_node(self):
return 1
def get_cpu_per_rank(self):
return self._cpu_per_rank
def get_bash_exec_path(self):
return self.exec_path
def get_bash_exec_args(self):
return ''
def get_custom_geopm_args(self):
# See README.md for an explanation of why
# HPL cannot start in process control mode.
# Also hyperthreading does not benefit HPL and
# it is turned off.
return ['--geopm-ctl=application',
'--geopm-hyperthreads-disable']
def parse_fom(self, log_path):
result = None
key = 'WR00'
with open(log_path) as fid:
for line in fid.readlines():
if key in line:
result = float(line.split(' ')[-1])
break
return result
|
[
"os.path.abspath",
"os.path.isfile",
"os.path.join",
"math.sqrt"
] |
[((3235, 3296), 'os.path.join', 'os.path.join', (['benchmark_dir', '"""hpl-2.3/bin/Linux_Intel64/xhpl"""'], {}), "(benchmark_dir, 'hpl-2.3/bin/Linux_Intel64/xhpl')\n", (3247, 3296), False, 'import os\n'), ((4261, 4298), 'os.path.join', 'os.path.join', (["(output_dir + '/HPL.dat')"], {}), "(output_dir + '/HPL.dat')\n", (4273, 4298), False, 'import os\n'), ((3183, 3208), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (3198, 3208), False, 'import os\n'), ((4314, 4343), 'os.path.isfile', 'os.path.isfile', (['dat_file_path'], {}), '(dat_file_path)\n', (4328, 4343), False, 'import os\n'), ((3878, 3905), 'math.sqrt', 'math.sqrt', (['(dram_for_app / 8)'], {}), '(dram_for_app / 8)\n', (3887, 3905), False, 'import math\n')]
|
# <NAME>
# CPSC 386 FALL 2016
# Project 5 (Final Project)
# <EMAIL>
# Armada.py is a top-down space shooter game made using Python 3.4 and Pygame 1.9.
import sys, random, time, pygame
from pygame.locals import *
# Colors
WHITE = (255, 255, 255)
BLACK = (0, 0, 0 )
RED = (255, 0, 0 )
GREEN = (0, 255, 0 )
# Text color
TEXT_COLOR = WHITE
# Set up window size and FPS
GAME_WINDOW_WIDTH = 1400
GAME_WINDOW_HEIGHT = 800
FPS = 60
# Boss level
BOSS_LVL = random.randint(5,10)
# Alien Size, includes ALIEN1 and ALIEN2
ALIEN_SIZE = 70
# Alien Spawn Rate
# Increase this to make the spawn timer longer,
# Or decrease to make the aliens spawn quickly
ALIEN1_SPAWN_RATE = 60
ALIEN2_SPAWN_RATE = ALIEN1_SPAWN_RATE
# Keep track of the player, aliens, bullets, and reload speed
# Player Speed
PLAYER_SPEED = 15
# Alien Speed
ALIEN1_SPEED = 2
ALIEN2_SPEED = ALIEN1_SPEED / 2
BOSS_SPEED = ALIEN1_SPEED / 2
# Speed of the bullet, and reload speed
BULLET_SPEED = 10
ALIEN_BULLET_SPEED = 10
RELOAD_SPEED = 15
# Keep a list of all of the aliens and ammo
ALIEN1 = []
ALIEN2 = []
BULLETS = []
ALIEN_BULLETS = []
# Set up images for the game
# Player image
PLAYER_IMG = pygame.image.load('player_ship.png')
PLAYER_RECT = PLAYER_IMG.get_rect()
# Alien images
ALIEN1_IMG = pygame.image.load('alien1.png')
ALIEN2_IMG = pygame.image.load('alien2.png')
BOSS_IMG = pygame.image.load('boss.png')
BOSS_RECT = BOSS_IMG.get_rect()
# Player ammo
BULLET_IMG = pygame.Surface([10, 2])
BULLET_IMG.fill(RED)
BULLET_RECT = BULLET_IMG.get_rect()
# Alien ammo
ALIEN_BULLET_IMG = pygame.Surface([10, 2])
ALIEN_BULLET_IMG.fill(GREEN)
ALIEN_BULLET_RECT = BULLET_IMG.get_rect()
# Explosion images
EXPLOSION_IMG = pygame.image.load('explosion.png')
BIG_EXPLOSION_IMG = pygame.image.load('big_explosion.png')
# Title screen image
TITLE_IMG = pygame.image.load("title_screen.jpg")
#TITLE_IMG = pygame.transform.scale(TITLE_IMG, (GAME_WINDOW_WIDTH, GAME_WINDOW_HEIGHT))
# Instructions screen image
INSTRUCTIONS_IMG = pygame.image.load('instructions_bg.jpg')
INSTRUCTIONS_IMG = pygame.transform.scale(INSTRUCTIONS_IMG, (GAME_WINDOW_WIDTH, GAME_WINDOW_HEIGHT))
# In game background image
BACKGROUND_IMG = pygame.image.load('sky.jpg')
BACKGROUND_IMG = pygame.transform.scale(BACKGROUND_IMG, (GAME_WINDOW_WIDTH, GAME_WINDOW_HEIGHT))
# main() function
def main():
global FPS_CLOCK, GAME_DISPLAY, SMALL_FONT, LRG_FONT, XTRA_LRG_FONT
pygame.init()
FPS_CLOCK = pygame.time.Clock()
GAME_DISPLAY = pygame.display.set_mode((GAME_WINDOW_WIDTH, GAME_WINDOW_HEIGHT))
SMALL_FONT = pygame.font.SysFont('freesansbold.ttf', 30)
LRG_FONT = pygame.font.SysFont('freesansbold.ttf', 60)
XTRA_LRG_FONT = pygame.font.SysFont('freesansbold.ttf', 120)
# There's no mouse input for this game, so don't show the pointer
pygame.mouse.set_visible(False)
# Play non in-game music
pygame.mixer.music.load('death.mid')
pygame.mixer.music.play(-1, 0.0)
# Show the title screen
GAME_DISPLAY.blit(TITLE_IMG, (0, 0))
drawText('ARMADA', XTRA_LRG_FONT, GAME_DISPLAY, 0, 0 , TEXT_COLOR)
drawText('Press Enter', LRG_FONT, GAME_DISPLAY, 600, 750, TEXT_COLOR)
drawText('Game by <NAME>', SMALL_FONT, GAME_DISPLAY, 1190, 780, TEXT_COLOR)
pygame.display.update()
getLoadingScreenInput()
# Show the instructions screen
GAME_DISPLAY.blit(INSTRUCTIONS_IMG, (0, 0))
drawText('INSTRUCTIONS:', LRG_FONT, GAME_DISPLAY, 10 , 10, TEXT_COLOR) # Display at top left corner
drawText('Defeat the mothership to win the game', SMALL_FONT, GAME_DISPLAY, 10 , 50, TEXT_COLOR)
drawText('Don\'t let the mothership reach Earth. If you do, then it\'s Game Over', SMALL_FONT, GAME_DISPLAY, 10 , 70, TEXT_COLOR)
drawText('Avoid all aliens, if an alien gets close enough to you, then it\'s Game Over', SMALL_FONT, GAME_DISPLAY, 10 , 90, TEXT_COLOR)
drawText('If your HP falls to zero, then it\'s Game Over', SMALL_FONT, GAME_DISPLAY, 10 , 110, TEXT_COLOR)
drawText('Each time an alien reaches Earth, the Earth\'s defense drops 5 percent', SMALL_FONT, GAME_DISPLAY, 10 , 130, TEXT_COLOR)
drawText('If Earth\'s defense drops to 0 you lose the game (i.e., 20 aliens reaching Earth results in Game Over)', SMALL_FONT, GAME_DISPLAY, 10 , 150, TEXT_COLOR)
drawText('CONTROLS:', LRG_FONT, GAME_DISPLAY, 10 , 210, TEXT_COLOR)
drawText('To move: W,A,S,D or arrow keys', SMALL_FONT, GAME_DISPLAY, 10 , 250, TEXT_COLOR)
drawText('To shoot: Spacebar', SMALL_FONT, GAME_DISPLAY, 10 , 270, TEXT_COLOR)
drawText('To quit: Esc', SMALL_FONT, GAME_DISPLAY, 10 , 290, TEXT_COLOR)
drawText('Press Enter', LRG_FONT, GAME_DISPLAY, 600, 750, TEXT_COLOR)
pygame.display.update()
getLoadingScreenInput()
# Stop music
pygame.mixer.music.stop()
# Limit to 60 frames per second
FPS_CLOCK.tick(FPS)
#################################
# main() GAME LOOP #
#################################
while True:
pygame.mixer.music.load('boss.mid')
pygame.mixer.music.play(-1)
runGame()
pygame.mixer.music.stop()
if (EARTH_DEFENSE <= 0):
GAME_DISPLAY.blit(TITLE_IMG, (0, 0))
drawText('DEFEAT', XTRA_LRG_FONT, GAME_DISPLAY, (GAME_WINDOW_WIDTH / 3) + 50, (GAME_WINDOW_HEIGHT / 3), RED)
drawText('EARTH HAS BEEN DESTROYED', LRG_FONT, GAME_DISPLAY, (GAME_WINDOW_WIDTH / 3)- 130, (GAME_WINDOW_HEIGHT / 3) + 100, TEXT_COLOR)
drawText('Press enter to play again or esc to quit', LRG_FONT, GAME_DISPLAY, 300, 750, TEXT_COLOR)
pygame.display.update()
getLoadingScreenInput()
cleanUp(BULLETS,ALIEN1,ALIEN2) # Clear screen for the next game
if playerCollision(PLAYER_RECT, ALIEN1) or playerCollision(PLAYER_RECT, ALIEN2) or PLAYER_RECT.colliderect(BOSS_RECT):
GAME_DISPLAY.blit(TITLE_IMG, (0, 0))
drawText('DEFEAT', XTRA_LRG_FONT, GAME_DISPLAY, (GAME_WINDOW_WIDTH / 3) + 50, (GAME_WINDOW_HEIGHT / 3), RED)
drawText('YOU HAVE BEEN CAPTURED BY THE ALIENS', LRG_FONT, GAME_DISPLAY, (GAME_WINDOW_WIDTH / 3) - 230, (GAME_WINDOW_HEIGHT / 3) +100, TEXT_COLOR)
drawText('Press enter to play again or esc to quit', LRG_FONT, GAME_DISPLAY, 300, 750, TEXT_COLOR)
pygame.display.update()
getLoadingScreenInput()
cleanUp(BULLETS,ALIEN1,ALIEN2) # Clear screen for the next game
if (PLAYER_WON == True):
GAME_DISPLAY.blit(TITLE_IMG, (0, 0))
drawText('VICTORY', XTRA_LRG_FONT, GAME_DISPLAY, (GAME_WINDOW_WIDTH / 3) + 50, (GAME_WINDOW_HEIGHT / 3), GREEN)
drawText('THE ALIENS HAVE BEEN DEFEATED', LRG_FONT, GAME_DISPLAY, (GAME_WINDOW_WIDTH / 3) - 130, (GAME_WINDOW_HEIGHT / 3) +100, TEXT_COLOR)
drawText('Press enter to play again or esc to quit', LRG_FONT, GAME_DISPLAY, 300, 750, TEXT_COLOR)
pygame.display.update()
getLoadingScreenInput()
cleanUp(BULLETS,ALIEN1,ALIEN2) # Clear screen for the next game
def runGame():
# Set up the start of the game
global EARTH_DEFENSE, PLAYER_HP, BOSS_HP, SCORE, PLAYER_WON
PLAYER_WON = False
EARTH_DEFENSE = 100 # If this reaches 0, game over
PLAYER_HP = 100 # If this reaches 0, game over
BOSS_HP = 100 # If the player defeats the boss they win
# Set the score, lvl, and frequency at which the alien moves
SCORE = 0
lvl, ALIEN1_SPEED = calcLvlAndAlienSpeed(SCORE) # Note: only change speed of Alien1
PLAYER_RECT.topleft = (50, GAME_WINDOW_HEIGHT /2)
#Put boss rect off the screen to start with
BOSS_RECT.topright = (1600, GAME_WINDOW_HEIGHT /2)
moveLeft = False
moveRight = False
moveUp = False
moveDown = False
shoot = False
alien1_spawn_counter = 0
alien2_spawn_counter = 0
player_bullet_spawn_rate = 40
alien_bullet_spawn_rate = 40
effect = pygame.mixer.Sound('laser_fire.wav')
#################################
# runGame() GAME LOOP #
#################################
while True: # the game loop runs while the game part is playing
# Calculate level and enemy speed,
if lvl != BOSS_LVL:
lvl, ALIEN1_SPEED = calcLvlAndAlienSpeed(SCORE) # Note: only change speed of Alien1
for event in pygame.event.get():
if event.type == QUIT:
terminate()
# Check if the key was pressed down
elif event.type == KEYDOWN:
if event.key == K_UP or event.key == K_w:
moveDown = False
moveUp = True
moveRight = False
moveLeft = False
elif event.key == K_DOWN or event.key == K_s:
moveUp = False
moveDown = True
moveRight = False
moveLeft = False
elif event.key == K_RIGHT or event.key == K_d:
moveUp = False
moveDown = False
moveRight = True
moveLeft = False
elif event.key == K_LEFT or event.key == K_a:
moveUp = False
moveDown = False
moveRight = False
moveLeft = True
elif event.key == K_SPACE:
shoot = True
# Check if the key was released
# If you release the key, you are no longer moving
# Set vars to false and terminate game if player last hit esc
elif event.type == KEYUP:
if event.key == K_ESCAPE:
terminate()
elif event.key == K_UP or event.key == K_w:
moveUp = False
elif event.key == K_DOWN or event.key == K_s:
moveDown = False
elif event.key == K_RIGHT or event.key == K_d:
moveRight = False
elif event.key == K_LEFT or event.key == K_a:
moveLeft = False
elif event.key == K_SPACE:
shoot = False
# Add new ALIEN1 at the top of the screen, if needed.
alien1_spawn_counter += 1
if alien1_spawn_counter == ALIEN1_SPAWN_RATE:
alien1_spawn_counter = 0
ALIEN1_SIZE = ALIEN_SIZE
rand_y1 = random.randint(10,GAME_WINDOW_HEIGHT-ALIEN1_SIZE-10)
while rand_y1 == GAME_WINDOW_HEIGHT /2:
rand_y1 = random.randint(10,GAME_WINDOW_HEIGHT-ALIEN1_SIZE-10)
newAlien1 = {'rect': pygame.Rect(GAME_WINDOW_WIDTH, rand_y1, ALIEN1_SIZE, ALIEN1_SIZE),
'surface':pygame.transform.scale(ALIEN1_IMG, (ALIEN1_SIZE, ALIEN1_SIZE))}
ALIEN1.append(newAlien1)
# Add new ALIEN2 at the top of the screen, if needed.
alien2_spawn_counter += 1
if alien2_spawn_counter == ALIEN2_SPAWN_RATE:
alien2_spawn_counter = 0
ALIEN2_SIZE = ALIEN_SIZE
rand_y2 = random.randint(10,GAME_WINDOW_HEIGHT-ALIEN2_SIZE-10)
while rand_y2 == GAME_WINDOW_HEIGHT /2:
rand_y1 = random.randint(10,GAME_WINDOW_HEIGHT-ALIEN1_SIZE-10)
newAlien2 = {'rect': pygame.Rect(GAME_WINDOW_WIDTH, rand_y2, ALIEN2_SIZE, ALIEN2_SIZE),
'surface':pygame.transform.scale(ALIEN2_IMG, (ALIEN2_SIZE, ALIEN2_SIZE))}
ALIEN2.append(newAlien2)
# add new bullet
player_bullet_spawn_rate += 1
if player_bullet_spawn_rate >= RELOAD_SPEED * 2 and shoot == True:
player_bullet_spawn_rate = 0
newBullet1 = {'rect':pygame.Rect(PLAYER_RECT.centerx+10, PLAYER_RECT.centery-25, BULLET_RECT.width, BULLET_RECT.height),
'surface':pygame.transform.scale(BULLET_IMG, (BULLET_RECT.width, BULLET_RECT.height))}
newBullet2 = {'rect':pygame.Rect(PLAYER_RECT.centerx+10, PLAYER_RECT.centery+25, BULLET_RECT.width, BULLET_RECT.height),
'surface':pygame.transform.scale(BULLET_IMG, (BULLET_RECT.width, BULLET_RECT.height))}
BULLETS.append(newBullet1)
BULLETS.append(newBullet2)
effect.play(1)
# Move the player around.
if moveLeft and PLAYER_RECT.left > 0:
PLAYER_RECT.move_ip(-1 * PLAYER_SPEED, 0)
if moveRight and PLAYER_RECT.right < GAME_WINDOW_WIDTH-10:
PLAYER_RECT.move_ip(PLAYER_SPEED, 0)
if moveUp and PLAYER_RECT.top > 30:
PLAYER_RECT.move_ip(0, -1 * PLAYER_SPEED)
if moveDown and PLAYER_RECT.bottom < GAME_WINDOW_HEIGHT-10:
PLAYER_RECT.move_ip(0, PLAYER_SPEED)
# Move the ALIEN1 down and add the bullets
for a1 in ALIEN1:
a1['rect'].move_ip(-1*ALIEN1_SPEED, 0)
# add new alien1 bullets
alien_bullet_spawn_rate +=1
if alien_bullet_spawn_rate >= RELOAD_SPEED * 30: # Include multiplier to slow reload speed
alien_bullet_spawn_rate = 0
alienBullet = {'rect':pygame.Rect(a1['rect'].centerx, a1['rect'].centery, ALIEN_BULLET_RECT.width, ALIEN_BULLET_RECT.height),
'surface':pygame.transform.scale(ALIEN_BULLET_IMG, (ALIEN_BULLET_RECT.width, ALIEN_BULLET_RECT.height))}
ALIEN_BULLETS.append(alienBullet)
# Move the ALIEN2 down and add the bullets
for a2 in ALIEN2:
a2['rect'].move_ip(-1*ALIEN2_SPEED,0)
# add new alien1 bullets
alien_bullet_spawn_rate +=1
if alien_bullet_spawn_rate >= RELOAD_SPEED * 30: # Include multiplier to slow reload speed
alien_bullet_spawn_rate = 0
alienBullet = {'rect':pygame.Rect(a2['rect'].centerx, a2['rect'].centery, ALIEN_BULLET_RECT.width, ALIEN_BULLET_RECT.height),
'surface':pygame.transform.scale(ALIEN_BULLET_IMG, (ALIEN_BULLET_RECT.width, ALIEN_BULLET_RECT.height))}
ALIEN_BULLETS.append(alienBullet)
# Move the boss around.
if lvl == BOSS_LVL:
if BOSS_RECT.left > 0:
BOSS_RECT.move_ip(-1 * BOSS_SPEED, 0)
# add new boss bullets
alien_bullet_spawn_rate += 1
if alien_bullet_spawn_rate >= RELOAD_SPEED * 20: # Include multiplier to slow reload speed
alien_bullet_spawn_rate = 0
alienBullet = {'rect':pygame.Rect(BOSS_RECT.centerx, BOSS_RECT.centery, ALIEN_BULLET_RECT.width, ALIEN_BULLET_RECT.height),
'surface':pygame.transform.scale(ALIEN_BULLET_IMG, (ALIEN_BULLET_RECT.width, ALIEN_BULLET_RECT.height))}
ALIEN_BULLETS.append(alienBullet)
# move the player bullet
for b in BULLETS:
b['rect'].move_ip(1 * BULLET_SPEED, 0)
# move the alien bullet
for b2 in ALIEN_BULLETS:
b2['rect'].move_ip(-1 * ALIEN_BULLET_SPEED, 0)
# If boss reaches Earth, game over
if BOSS_RECT.left < 20:
EARTH_DEFENSE = 0
break
# Delete ALIEN1 that continued past the screen.
for a1 in ALIEN1[:]:
if a1['rect'].left < 0:
ALIEN1.remove(a1)
EARTH_DEFENSE -= 5
# Delete ALIEN2 that continued past the screen.
for a2 in ALIEN2[:]:
if a2['rect'].left < 0:
ALIEN2.remove(a2)
EARTH_DEFENSE -= 5
# Delete all player bullets that continued past the screen
for b in BULLETS[:]:
if b['rect'].right>GAME_WINDOW_WIDTH:
BULLETS.remove(b)
# Check if the alien bullet hit the player
if hitPlayer(ALIEN_BULLETS, PLAYER_RECT):
PLAYER_HP -= 5
if (PLAYER_HP <= 0):
EARTH_DEFENSE = 0 # The game ends because player died
break
# Check if the player bullet hit the boss
if lvl == BOSS_LVL:
if hitBoss(BULLETS, BOSS_RECT):
BOSS_HP -= 5
if (BOSS_HP <= 0):
PLAYER_WON = True
GAME_DISPLAY.blit(BIG_EXPLOSION_IMG, BOSS_RECT)
pygame.display.update(BOSS_RECT)
# Limit to 60 frames per second
FPS_CLOCK.tick(FPS)
pygame.time.delay(30)
break
# Check if the player bullet hit the aliens
for a1 in ALIEN1:
if hitAlien1(BULLETS, ALIEN1, a1):
SCORE += 1
GAME_DISPLAY.blit(EXPLOSION_IMG, a1['rect'])
pygame.display.update(a1['rect'])
# Limit to 60 frames per second
FPS_CLOCK.tick(FPS)
ALIEN1.remove(a1)
for a2 in ALIEN2:
if hitAlien2(BULLETS, ALIEN2, a2):
SCORE += 1
GAME_DISPLAY.blit(EXPLOSION_IMG, a2['rect'])
pygame.display.update(a2['rect'])
# Limit to 60 frames per second
FPS_CLOCK.tick(FPS)
ALIEN2.remove(a2)
# Display the background in-game image
GAME_DISPLAY.blit(BACKGROUND_IMG, (0, 0))
# Draw the player
GAME_DISPLAY.blit(PLAYER_IMG, PLAYER_RECT)
# Check if we need to draw the boss and bullets
if lvl == BOSS_LVL:
# Draw the boss
GAME_DISPLAY.blit(BOSS_IMG, BOSS_RECT)
# Draw each alien
for a1 in ALIEN1:
GAME_DISPLAY.blit(a1['surface'], a1['rect'])
for a2 in ALIEN2:
GAME_DISPLAY.blit(a2['surface'], a2['rect'])
# Draw each bullet
for b in BULLETS:
GAME_DISPLAY.blit(b['surface'], b['rect'])
# Draw each bullet
for b2 in ALIEN_BULLETS:
GAME_DISPLAY.blit(b2['surface'], b2['rect'])
# Draw the score and how many Aliens got past your defenses
drawText('Earth Defense: %s percent' % (EARTH_DEFENSE), SMALL_FONT, GAME_DISPLAY, 290, 20, TEXT_COLOR)
drawText('Aliens eliminated: %s' % (SCORE), SMALL_FONT, GAME_DISPLAY, 625, 20, TEXT_COLOR)
drawText('Level: %s' % (lvl), SMALL_FONT, GAME_DISPLAY, 895, 20, TEXT_COLOR)
drawText('HP: %s' % (PLAYER_HP), SMALL_FONT, GAME_DISPLAY, 1050, 20, TEXT_COLOR)
drawText('_______________________________________________________________________________________________________________________________',
SMALL_FONT, GAME_DISPLAY, 0, 30, TEXT_COLOR)
# update the display
pygame.display.update()
# Check if any of the aliens ran into the player.
if playerCollision(PLAYER_RECT, ALIEN1):
break
if playerCollision(PLAYER_RECT, ALIEN2):
break
if PLAYER_RECT.colliderect(BOSS_RECT):
break
# check if Earth's defense is depleted, resulting in game over
if EARTH_DEFENSE <= 0:
break
FPS_CLOCK.tick(FPS)
# Calculate the level and the alien speed
def calcLvlAndAlienSpeed(SCORE):
# Based on the score, return the level the player is on and
# how many seconds pass until a enemy moves one space.
lvl = int(SCORE / 50) + 1
alien_speed = ALIEN1_SPEED + (lvl * 0.5)
return lvl, alien_speed
# Clean up aliens and bullets
def cleanUp(BULLETS, ALIEN1, ALIEN2):
for i in range(GAME_WINDOW_WIDTH):
for b in BULLETS:
BULLETS.remove(b)
for b2 in ALIEN_BULLETS:
ALIEN_BULLETS.remove(b2)
for a1 in ALIEN1:
ALIEN1.remove(a1)
for a2 in ALIEN2:
ALIEN2.remove(a2)
# Draw text on the screen
def drawText(text, font, surface, width, height, text_color):
txt_obj = font.render(text, True, text_color)
txt_rect = txt_obj.get_rect()
txt_rect.topleft = (width, height)
surface.blit(txt_obj, txt_rect)
# Get user input when not in game
def getLoadingScreenInput():
while True:
for event in pygame.event.get():
if event.type == QUIT:
terminate()
if event.type == KEYDOWN:
if event.key == K_ESCAPE: # pressing esc quits
terminate()
if event.key == K_RETURN:
return
# Check to see if the player hit the boss
def hitBoss(BULLETS, BOSS_RECT):
for b in BULLETS:
if b['rect'].colliderect(BOSS_RECT):
BULLETS.remove(b)
return True
return False
# Check to see if the player hit alien1
def hitAlien1(BULLETS, ALIEN1, a1):
for b in BULLETS:
if b['rect'].colliderect(a1['rect']):
BULLETS.remove(b)
return True
return False
# Check to see if the player hit alien2
def hitAlien2(BULLETS, ALIEN2, a2):
for b in BULLETS:
if b['rect'].colliderect(a2['rect']):
BULLETS.remove(b)
return True
return False
# Check if the player was hit by an enemy bullet
def hitPlayer(ALIEN_BULLETS, PLAYER_RECT):
for b2 in ALIEN_BULLETS:
if b2['rect'].colliderect(PLAYER_RECT):
ALIEN_BULLETS.remove(b2)
return True
return False
# Check to see if the player collided into an alien
def playerCollision(PLAYER_RECT, Alien):
for i in Alien:
if PLAYER_RECT.colliderect(i['rect']):
return True
return False
def terminate():
pygame.quit()
sys.exit()
# Call the main function, start up the game
if __name__ == '__main__':
main()
|
[
"pygame.mouse.set_visible",
"pygame.event.get",
"pygame.Rect",
"pygame.display.update",
"random.randint",
"pygame.font.SysFont",
"pygame.display.set_mode",
"pygame.mixer.music.play",
"pygame.transform.scale",
"pygame.mixer.Sound",
"pygame.quit",
"pygame.Surface",
"pygame.init",
"pygame.image.load",
"pygame.time.Clock",
"pygame.mixer.music.stop",
"sys.exit",
"pygame.time.delay",
"pygame.mixer.music.load"
] |
[((472, 493), 'random.randint', 'random.randint', (['(5)', '(10)'], {}), '(5, 10)\n', (486, 493), False, 'import sys, random, time, pygame\n'), ((1176, 1212), 'pygame.image.load', 'pygame.image.load', (['"""player_ship.png"""'], {}), "('player_ship.png')\n", (1193, 1212), False, 'import sys, random, time, pygame\n'), ((1277, 1308), 'pygame.image.load', 'pygame.image.load', (['"""alien1.png"""'], {}), "('alien1.png')\n", (1294, 1308), False, 'import sys, random, time, pygame\n'), ((1322, 1353), 'pygame.image.load', 'pygame.image.load', (['"""alien2.png"""'], {}), "('alien2.png')\n", (1339, 1353), False, 'import sys, random, time, pygame\n'), ((1367, 1396), 'pygame.image.load', 'pygame.image.load', (['"""boss.png"""'], {}), "('boss.png')\n", (1384, 1396), False, 'import sys, random, time, pygame\n'), ((1457, 1480), 'pygame.Surface', 'pygame.Surface', (['[10, 2]'], {}), '([10, 2])\n', (1471, 1480), False, 'import sys, random, time, pygame\n'), ((1570, 1593), 'pygame.Surface', 'pygame.Surface', (['[10, 2]'], {}), '([10, 2])\n', (1584, 1593), False, 'import sys, random, time, pygame\n'), ((1704, 1738), 'pygame.image.load', 'pygame.image.load', (['"""explosion.png"""'], {}), "('explosion.png')\n", (1721, 1738), False, 'import sys, random, time, pygame\n'), ((1759, 1797), 'pygame.image.load', 'pygame.image.load', (['"""big_explosion.png"""'], {}), "('big_explosion.png')\n", (1776, 1797), False, 'import sys, random, time, pygame\n'), ((1831, 1868), 'pygame.image.load', 'pygame.image.load', (['"""title_screen.jpg"""'], {}), "('title_screen.jpg')\n", (1848, 1868), False, 'import sys, random, time, pygame\n'), ((2004, 2044), 'pygame.image.load', 'pygame.image.load', (['"""instructions_bg.jpg"""'], {}), "('instructions_bg.jpg')\n", (2021, 2044), False, 'import sys, random, time, pygame\n'), ((2064, 2149), 'pygame.transform.scale', 'pygame.transform.scale', (['INSTRUCTIONS_IMG', '(GAME_WINDOW_WIDTH, GAME_WINDOW_HEIGHT)'], {}), '(INSTRUCTIONS_IMG, (GAME_WINDOW_WIDTH,\n GAME_WINDOW_HEIGHT))\n', (2086, 2149), False, 'import sys, random, time, pygame\n'), ((2190, 2218), 'pygame.image.load', 'pygame.image.load', (['"""sky.jpg"""'], {}), "('sky.jpg')\n", (2207, 2218), False, 'import sys, random, time, pygame\n'), ((2236, 2315), 'pygame.transform.scale', 'pygame.transform.scale', (['BACKGROUND_IMG', '(GAME_WINDOW_WIDTH, GAME_WINDOW_HEIGHT)'], {}), '(BACKGROUND_IMG, (GAME_WINDOW_WIDTH, GAME_WINDOW_HEIGHT))\n', (2258, 2315), False, 'import sys, random, time, pygame\n'), ((2424, 2437), 'pygame.init', 'pygame.init', ([], {}), '()\n', (2435, 2437), False, 'import sys, random, time, pygame\n'), ((2454, 2473), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (2471, 2473), False, 'import sys, random, time, pygame\n'), ((2493, 2557), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(GAME_WINDOW_WIDTH, GAME_WINDOW_HEIGHT)'], {}), '((GAME_WINDOW_WIDTH, GAME_WINDOW_HEIGHT))\n', (2516, 2557), False, 'import sys, random, time, pygame\n'), ((2575, 2618), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""freesansbold.ttf"""', '(30)'], {}), "('freesansbold.ttf', 30)\n", (2594, 2618), False, 'import sys, random, time, pygame\n'), ((2634, 2677), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""freesansbold.ttf"""', '(60)'], {}), "('freesansbold.ttf', 60)\n", (2653, 2677), False, 'import sys, random, time, pygame\n'), ((2698, 2742), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""freesansbold.ttf"""', '(120)'], {}), "('freesansbold.ttf', 120)\n", (2717, 2742), False, 'import sys, random, time, pygame\n'), ((2818, 2849), 'pygame.mouse.set_visible', 'pygame.mouse.set_visible', (['(False)'], {}), '(False)\n', (2842, 2849), False, 'import sys, random, time, pygame\n'), ((2884, 2920), 'pygame.mixer.music.load', 'pygame.mixer.music.load', (['"""death.mid"""'], {}), "('death.mid')\n", (2907, 2920), False, 'import sys, random, time, pygame\n'), ((2925, 2957), 'pygame.mixer.music.play', 'pygame.mixer.music.play', (['(-1)', '(0.0)'], {}), '(-1, 0.0)\n', (2948, 2957), False, 'import sys, random, time, pygame\n'), ((3257, 3280), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (3278, 3280), False, 'import sys, random, time, pygame\n'), ((4705, 4728), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (4726, 4728), False, 'import sys, random, time, pygame\n'), ((4778, 4803), 'pygame.mixer.music.stop', 'pygame.mixer.music.stop', ([], {}), '()\n', (4801, 4803), False, 'import sys, random, time, pygame\n'), ((7898, 7934), 'pygame.mixer.Sound', 'pygame.mixer.Sound', (['"""laser_fire.wav"""'], {}), "('laser_fire.wav')\n", (7916, 7934), False, 'import sys, random, time, pygame\n'), ((21547, 21560), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (21558, 21560), False, 'import sys, random, time, pygame\n'), ((21565, 21575), 'sys.exit', 'sys.exit', ([], {}), '()\n', (21573, 21575), False, 'import sys, random, time, pygame\n'), ((5004, 5039), 'pygame.mixer.music.load', 'pygame.mixer.music.load', (['"""boss.mid"""'], {}), "('boss.mid')\n", (5027, 5039), False, 'import sys, random, time, pygame\n'), ((5047, 5074), 'pygame.mixer.music.play', 'pygame.mixer.music.play', (['(-1)'], {}), '(-1)\n', (5070, 5074), False, 'import sys, random, time, pygame\n'), ((5099, 5124), 'pygame.mixer.music.stop', 'pygame.mixer.music.stop', ([], {}), '()\n', (5122, 5124), False, 'import sys, random, time, pygame\n'), ((8324, 8342), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (8340, 8342), False, 'import sys, random, time, pygame\n'), ((18701, 18724), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (18722, 18724), False, 'import sys, random, time, pygame\n'), ((20143, 20161), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (20159, 20161), False, 'import sys, random, time, pygame\n'), ((5582, 5605), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (5603, 5605), False, 'import sys, random, time, pygame\n'), ((6293, 6316), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (6314, 6316), False, 'import sys, random, time, pygame\n'), ((6890, 6913), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (6911, 6913), False, 'import sys, random, time, pygame\n'), ((10450, 10507), 'random.randint', 'random.randint', (['(10)', '(GAME_WINDOW_HEIGHT - ALIEN1_SIZE - 10)'], {}), '(10, GAME_WINDOW_HEIGHT - ALIEN1_SIZE - 10)\n', (10464, 10507), False, 'import sys, random, time, pygame\n'), ((11116, 11173), 'random.randint', 'random.randint', (['(10)', '(GAME_WINDOW_HEIGHT - ALIEN2_SIZE - 10)'], {}), '(10, GAME_WINDOW_HEIGHT - ALIEN2_SIZE - 10)\n', (11130, 11173), False, 'import sys, random, time, pygame\n'), ((10581, 10638), 'random.randint', 'random.randint', (['(10)', '(GAME_WINDOW_HEIGHT - ALIEN1_SIZE - 10)'], {}), '(10, GAME_WINDOW_HEIGHT - ALIEN1_SIZE - 10)\n', (10595, 10638), False, 'import sys, random, time, pygame\n'), ((10667, 10732), 'pygame.Rect', 'pygame.Rect', (['GAME_WINDOW_WIDTH', 'rand_y1', 'ALIEN1_SIZE', 'ALIEN1_SIZE'], {}), '(GAME_WINDOW_WIDTH, rand_y1, ALIEN1_SIZE, ALIEN1_SIZE)\n', (10678, 10732), False, 'import sys, random, time, pygame\n'), ((10768, 10830), 'pygame.transform.scale', 'pygame.transform.scale', (['ALIEN1_IMG', '(ALIEN1_SIZE, ALIEN1_SIZE)'], {}), '(ALIEN1_IMG, (ALIEN1_SIZE, ALIEN1_SIZE))\n', (10790, 10830), False, 'import sys, random, time, pygame\n'), ((11247, 11304), 'random.randint', 'random.randint', (['(10)', '(GAME_WINDOW_HEIGHT - ALIEN1_SIZE - 10)'], {}), '(10, GAME_WINDOW_HEIGHT - ALIEN1_SIZE - 10)\n', (11261, 11304), False, 'import sys, random, time, pygame\n'), ((11333, 11398), 'pygame.Rect', 'pygame.Rect', (['GAME_WINDOW_WIDTH', 'rand_y2', 'ALIEN2_SIZE', 'ALIEN2_SIZE'], {}), '(GAME_WINDOW_WIDTH, rand_y2, ALIEN2_SIZE, ALIEN2_SIZE)\n', (11344, 11398), False, 'import sys, random, time, pygame\n'), ((11434, 11496), 'pygame.transform.scale', 'pygame.transform.scale', (['ALIEN2_IMG', '(ALIEN2_SIZE, ALIEN2_SIZE)'], {}), '(ALIEN2_IMG, (ALIEN2_SIZE, ALIEN2_SIZE))\n', (11456, 11496), False, 'import sys, random, time, pygame\n'), ((11761, 11868), 'pygame.Rect', 'pygame.Rect', (['(PLAYER_RECT.centerx + 10)', '(PLAYER_RECT.centery - 25)', 'BULLET_RECT.width', 'BULLET_RECT.height'], {}), '(PLAYER_RECT.centerx + 10, PLAYER_RECT.centery - 25, BULLET_RECT\n .width, BULLET_RECT.height)\n', (11772, 11868), False, 'import sys, random, time, pygame\n'), ((11875, 11950), 'pygame.transform.scale', 'pygame.transform.scale', (['BULLET_IMG', '(BULLET_RECT.width, BULLET_RECT.height)'], {}), '(BULLET_IMG, (BULLET_RECT.width, BULLET_RECT.height))\n', (11897, 11950), False, 'import sys, random, time, pygame\n'), ((11985, 12092), 'pygame.Rect', 'pygame.Rect', (['(PLAYER_RECT.centerx + 10)', '(PLAYER_RECT.centery + 25)', 'BULLET_RECT.width', 'BULLET_RECT.height'], {}), '(PLAYER_RECT.centerx + 10, PLAYER_RECT.centery + 25, BULLET_RECT\n .width, BULLET_RECT.height)\n', (11996, 12092), False, 'import sys, random, time, pygame\n'), ((12099, 12174), 'pygame.transform.scale', 'pygame.transform.scale', (['BULLET_IMG', '(BULLET_RECT.width, BULLET_RECT.height)'], {}), '(BULLET_IMG, (BULLET_RECT.width, BULLET_RECT.height))\n', (12121, 12174), False, 'import sys, random, time, pygame\n'), ((16746, 16779), 'pygame.display.update', 'pygame.display.update', (["a1['rect']"], {}), "(a1['rect'])\n", (16767, 16779), False, 'import sys, random, time, pygame\n'), ((17080, 17113), 'pygame.display.update', 'pygame.display.update', (["a2['rect']"], {}), "(a2['rect'])\n", (17101, 17113), False, 'import sys, random, time, pygame\n'), ((13152, 13258), 'pygame.Rect', 'pygame.Rect', (["a1['rect'].centerx", "a1['rect'].centery", 'ALIEN_BULLET_RECT.width', 'ALIEN_BULLET_RECT.height'], {}), "(a1['rect'].centerx, a1['rect'].centery, ALIEN_BULLET_RECT.width,\n ALIEN_BULLET_RECT.height)\n", (13163, 13258), False, 'import sys, random, time, pygame\n'), ((13295, 13392), 'pygame.transform.scale', 'pygame.transform.scale', (['ALIEN_BULLET_IMG', '(ALIEN_BULLET_RECT.width, ALIEN_BULLET_RECT.height)'], {}), '(ALIEN_BULLET_IMG, (ALIEN_BULLET_RECT.width,\n ALIEN_BULLET_RECT.height))\n', (13317, 13392), False, 'import sys, random, time, pygame\n'), ((13831, 13937), 'pygame.Rect', 'pygame.Rect', (["a2['rect'].centerx", "a2['rect'].centery", 'ALIEN_BULLET_RECT.width', 'ALIEN_BULLET_RECT.height'], {}), "(a2['rect'].centerx, a2['rect'].centery, ALIEN_BULLET_RECT.width,\n ALIEN_BULLET_RECT.height)\n", (13842, 13937), False, 'import sys, random, time, pygame\n'), ((13974, 14071), 'pygame.transform.scale', 'pygame.transform.scale', (['ALIEN_BULLET_IMG', '(ALIEN_BULLET_RECT.width, ALIEN_BULLET_RECT.height)'], {}), '(ALIEN_BULLET_IMG, (ALIEN_BULLET_RECT.width,\n ALIEN_BULLET_RECT.height))\n', (13996, 14071), False, 'import sys, random, time, pygame\n'), ((14534, 14638), 'pygame.Rect', 'pygame.Rect', (['BOSS_RECT.centerx', 'BOSS_RECT.centery', 'ALIEN_BULLET_RECT.width', 'ALIEN_BULLET_RECT.height'], {}), '(BOSS_RECT.centerx, BOSS_RECT.centery, ALIEN_BULLET_RECT.width,\n ALIEN_BULLET_RECT.height)\n', (14545, 14638), False, 'import sys, random, time, pygame\n'), ((14675, 14772), 'pygame.transform.scale', 'pygame.transform.scale', (['ALIEN_BULLET_IMG', '(ALIEN_BULLET_RECT.width, ALIEN_BULLET_RECT.height)'], {}), '(ALIEN_BULLET_IMG, (ALIEN_BULLET_RECT.width,\n ALIEN_BULLET_RECT.height))\n', (14697, 14772), False, 'import sys, random, time, pygame\n'), ((16339, 16371), 'pygame.display.update', 'pygame.display.update', (['BOSS_RECT'], {}), '(BOSS_RECT)\n', (16360, 16371), False, 'import sys, random, time, pygame\n'), ((16472, 16493), 'pygame.time.delay', 'pygame.time.delay', (['(30)'], {}), '(30)\n', (16489, 16493), False, 'import sys, random, time, pygame\n')]
|
import os
import pytest
from nixui.options import parser
SAMPLES_PATH = 'tests/sample'
@pytest.mark.datafiles(SAMPLES_PATH)
def test_get_all_option_values():
assert parser.get_all_option_values(
os.path.abspath(os.path.join(SAMPLES_PATH, 'configuration.nix'))
)
@pytest.mark.datafiles(SAMPLES_PATH)
def test_get_all_option_values_correct_attributes():
module_path = os.path.abspath(os.path.join(SAMPLES_PATH, 'configuration.nix'))
found_attrs = set([str(x) for x in parser.get_all_option_values(module_path)])
expected_attrs = {
'boot.extraModulePackages',
'boot.initrd.availableKernelModules',
'boot.initrd.availableKernelModules."[0]"',
'boot.initrd.availableKernelModules."[1]"',
'boot.initrd.availableKernelModules."[2]"',
'boot.initrd.availableKernelModules."[3]"',
'boot.initrd.availableKernelModules."[4]"',
'boot.initrd.availableKernelModules."[5]"',
'boot.initrd.kernelModules',
'boot.kernelModules',
'boot.kernelModules."[0]"',
'boot.kernelModules."[1]"',
'environment.etc',
'environment.etc."resolv.conf".text',
'environment.systemPackages',
'fileSystems."/"',
'fileSystems."/".device',
'fileSystems."/".fsType',
'fileSystems."/".label',
'fileSystems."/".options',
'fileSystems."/".options."[0]"',
'fileSystems."/".options."[1]"',
'fileSystems."/".options."[2]"',
'fileSystems."/boot"',
'fileSystems."/boot".device',
'fileSystems."/boot".fsType',
'fileSystems."/home"',
'fileSystems."/home".device',
'fileSystems."/home".fsType',
'fileSystems."/home/sample/media"',
'fileSystems."/home/sample/media".device',
'fileSystems."/home/sample/media".fsType',
'fonts',
'fonts.fontDir.enable',
'fonts.fonts',
'hardware.bluetooth',
'hardware.bluetooth.enable',
'hardware.bluetooth.settings',
'hardware.bluetooth.settings.General',
'hardware.bluetooth.settings.General.Enable',
'hardware.enableRedistributableFirmware',
'hardware.opengl.driSupport32Bit',
'hardware.pulseaudio',
'hardware.pulseaudio.enable',
'hardware.pulseaudio.extraModules',
'hardware.pulseaudio.extraModules."[0]"',
'hardware.pulseaudio.package',
'hardware.pulseaudio.support32Bit',
'networking',
'networking.firewall.allowPing',
'networking.firewall.allowedTCPPorts',
'networking.firewall.allowedTCPPorts."[0]"',
'networking.firewall.allowedTCPPorts."[1]"',
'networking.firewall.enable',
'networking.hostId',
'networking.hostName',
'networking.networkmanager.enable',
'programs',
'programs.vim.defaultEditor',
'programs.zsh.enable',
'security.sudo.extraConfig',
'services.blueman.enable',
'services.bookstack.nginx.listen',
'services.bookstack.nginx.listen."[0]"',
'services.bookstack.nginx.listen."[0]".addr',
'services.bookstack.nginx.listen."[0]".port',
'services.bookstack.nginx.listen."[0]".ssl',
'services.bookstack.nginx.listen."[1]"',
'services.bookstack.nginx.listen."[1]".addr',
'services.bookstack.nginx.listen."[1]".port',
'services.dbus',
'services.dbus.enable',
'services.dbus.packages',
'services.dbus.packages."[0]"',
'services.logind.lidSwitch',
'services.printing',
'services.printing.drivers',
'services.printing.drivers."[0]"',
'services.printing.enable',
'services.redshift.enable',
'services.redshift.temperature.day',
'services.redshift.temperature.night',
'services.unbound',
'services.unbound.enable',
'services.unbound.settings',
'services.unbound.settings.forward-zone',
'services.unbound.settings.forward-zone."[0]"',
'services.unbound.settings.forward-zone."[0]".forward-addr',
'services.unbound.settings.forward-zone."[0]".forward-addr."[0]"',
'services.unbound.settings.forward-zone."[0]".forward-addr."[1]"',
'services.unbound.settings.forward-zone."[0]".forward-tls-upstream',
'services.unbound.settings.forward-zone."[0]".name',
'services.unbound.settings.server',
'services.unbound.settings.server.cache-min-ttl',
'services.unbound.settings.server.do-tcp',
'services.unbound.settings.server.ssl-upstream',
'services.xserver',
'services.xserver.displayManager.lightdm.enable',
'services.xserver.displayManager.sessionCommands',
'services.xserver.enable',
'services.xserver.synaptics.enable',
'services.xserver.windowManager.i3.enable',
'services.xserver.xkbOptions',
'sound.enable',
'swapDevices',
'swapDevices."[0]"',
'swapDevices."[0]".device',
'system.stateVersion',
'time.timeZone',
'users.extraGroups.vboxusers.members',
'users.extraGroups.vboxusers.members."[0]"',
'users.extraUsers.sample',
'users.extraUsers.sample.description',
'users.extraUsers.sample.extraGroups',
'users.extraUsers.sample.extraGroups."[0]"',
'users.extraUsers.sample.extraGroups."[1]"',
'users.extraUsers.sample.extraGroups."[2]"',
'users.extraUsers.sample.extraGroups."[3]"',
'users.extraUsers.sample.extraGroups."[4]"',
'users.extraUsers.sample.home',
'users.extraUsers.sample.isNormalUser',
'users.extraUsers.sample.shell',
'users.extraUsers.sample.uid',
'virtualisation.libvirtd.enable'
}
assert found_attrs == expected_attrs
|
[
"nixui.options.parser.get_all_option_values",
"os.path.join",
"pytest.mark.datafiles"
] |
[((92, 127), 'pytest.mark.datafiles', 'pytest.mark.datafiles', (['SAMPLES_PATH'], {}), '(SAMPLES_PATH)\n', (113, 127), False, 'import pytest\n'), ((285, 320), 'pytest.mark.datafiles', 'pytest.mark.datafiles', (['SAMPLES_PATH'], {}), '(SAMPLES_PATH)\n', (306, 320), False, 'import pytest\n'), ((408, 455), 'os.path.join', 'os.path.join', (['SAMPLES_PATH', '"""configuration.nix"""'], {}), "(SAMPLES_PATH, 'configuration.nix')\n", (420, 455), False, 'import os\n'), ((227, 274), 'os.path.join', 'os.path.join', (['SAMPLES_PATH', '"""configuration.nix"""'], {}), "(SAMPLES_PATH, 'configuration.nix')\n", (239, 274), False, 'import os\n'), ((496, 537), 'nixui.options.parser.get_all_option_values', 'parser.get_all_option_values', (['module_path'], {}), '(module_path)\n', (524, 537), False, 'from nixui.options import parser\n')]
|
import datetime
import re
from urllib import parse
import requests
from bs4 import BeautifulSoup
URL_CALENDAR = "https://innherredrenovasjon.no/tommeplan/{premise_id}/kalender/"
URL_ADDRESS_SEARCH = "https://innherredrenovasjon.no/wp-json/ir/v1/addresses/{}"
DEFAULT_HEADERS = {
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:89.0) Gecko/20100101 Firefox/89.0",
"Cache-Control": "max-age=0",
}
def find_address(address: str) -> dict[str, str]:
result = requests.get(URL_ADDRESS_SEARCH.format(parse.quote(address)), headers=DEFAULT_HEADERS).json()
return dict((e['id'], e['address']) for e in result['data']['results'])
def get_calendar(premise_id: str):
response = requests.get(URL_CALENDAR.format(premise_id=premise_id), headers=DEFAULT_HEADERS)
soup = BeautifulSoup(response.content, 'html.parser')
year = int(re.findall(r"\d{4}", soup.find('h2', class_='article__title').get_text(strip=True)).pop())
items = []
types = []
for item in soup.findAll('div', class_='gd-calendar__list-item'):
datestr = re.findall(r"(\d{2})\.(\d{2})",
item.find(class_='gd-calendar__list-item-date').get_text(strip=True)).pop()
dt_format = datetime.datetime.strptime(f"{year}-{datestr[1]}-{datestr[0]}", "%Y-%m-%d")
entry = {
"date": dt_format,
"type": item.find(class_='gd-calendar__list-item-type').get_text(strip=True),
}
if entry['type'] not in types:
types.append(entry['type'])
items.append(entry)
return items
def get_next_pickup(premise_id: str):
today = datetime.datetime.today().replace(hour=0, minute=0, second=0, microsecond=0)
n = []
for e in get_calendar(premise_id):
if (e['date'] > today and len(n) == 0) or (len(n) > 0 and n[0]['date'] == e['date']):
n.append(e)
if len(n) == 0:
return None
return {
"date": n[0]['date'],
"types": " + ".join([e['type'] for e in n]),
}
|
[
"bs4.BeautifulSoup",
"urllib.parse.quote",
"datetime.datetime.strptime",
"datetime.datetime.today"
] |
[((795, 841), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content', '"""html.parser"""'], {}), "(response.content, 'html.parser')\n", (808, 841), False, 'from bs4 import BeautifulSoup\n'), ((1224, 1299), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['f"""{year}-{datestr[1]}-{datestr[0]}"""', '"""%Y-%m-%d"""'], {}), "(f'{year}-{datestr[1]}-{datestr[0]}', '%Y-%m-%d')\n", (1250, 1299), False, 'import datetime\n'), ((1627, 1652), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (1650, 1652), False, 'import datetime\n'), ((519, 539), 'urllib.parse.quote', 'parse.quote', (['address'], {}), '(address)\n', (530, 539), False, 'from urllib import parse\n')]
|
from collections import OrderedDict
import pkg_resources
from gitgud.levels.util import BasicChallenge
all_challenges = OrderedDict()
all_challenges['committing'] = BasicChallenge('committing', pkg_resources.resource_filename(__name__, '_committing/'))
all_challenges['branching'] = BasicChallenge('branching', pkg_resources.resource_filename(__name__, '_branching/'))
all_challenges['merging'] = BasicChallenge('merging', pkg_resources.resource_filename(__name__, '_merging/'))
all_challenges['rebasing'] = BasicChallenge('rebasing', pkg_resources.resource_filename(__name__, '_rebasing/'))
|
[
"collections.OrderedDict",
"pkg_resources.resource_filename"
] |
[((123, 136), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (134, 136), False, 'from collections import OrderedDict\n'), ((197, 254), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['__name__', '"""_committing/"""'], {}), "(__name__, '_committing/')\n", (228, 254), False, 'import pkg_resources\n'), ((314, 370), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['__name__', '"""_branching/"""'], {}), "(__name__, '_branching/')\n", (345, 370), False, 'import pkg_resources\n'), ((426, 480), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['__name__', '"""_merging/"""'], {}), "(__name__, '_merging/')\n", (457, 480), False, 'import pkg_resources\n'), ((538, 593), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['__name__', '"""_rebasing/"""'], {}), "(__name__, '_rebasing/')\n", (569, 593), False, 'import pkg_resources\n')]
|
import re
import types
import logging
from banal import ensure_list
from normality import stringify
from balkhash.utils import safe_fragment
from email.utils import parsedate_to_datetime, getaddresses
from normality import safe_filename, ascii_text
from followthemoney.types import registry
from ingestors.support.html import HTMLSupport
from ingestors.support.cache import CacheSupport
from ingestors.support.temp import TempFileSupport
log = logging.getLogger(__name__)
class EmailIdentity(object):
def __init__(self, manager, name, email):
self.email = ascii_text(stringify(email))
self.name = stringify(name)
if not registry.email.validate(self.email):
self.email = None
if registry.email.validate(self.name):
self.email = self.email or ascii_text(self.name)
self.name = None
# This should be using formataddr, but I cannot figure out how
# to use that without encoding the name.
self.label = None
if self.name is not None and self.email is not None:
self.label = '%s <%s>' % (self.name, self.email)
elif self.name is None and self.email is not None:
self.label = self.email
elif self.email is None and self.name is not None:
self.label = self.name
self.entity = None
if self.email is not None:
key = self.email.lower().strip()
fragment = safe_fragment(self.label)
self.entity = manager.make_entity('Person')
self.entity.make_id(key)
self.entity.add('name', self.name)
self.entity.add('email', self.email)
manager.emit_entity(self.entity, fragment=fragment)
class EmailSupport(TempFileSupport, HTMLSupport, CacheSupport):
"""Extract metadata from email messages."""
MID_RE = re.compile(r'<([^>]*)>')
def ingest_attachment(self, entity, name, mime_type, body):
has_body = body is not None and len(body)
if stringify(name) is None and not has_body:
# Hello, Outlook.
return
file_name = safe_filename(name, default='attachment')
file_path = self.make_work_file(file_name)
with open(file_path, 'wb') as fh:
if isinstance(body, str):
body = body.encode('utf-8')
if body is not None:
fh.write(body)
checksum = self.manager.store(file_path, mime_type=mime_type)
file_path.unlink()
child = self.manager.make_entity('Document', parent=entity)
child.make_id(name, checksum)
child.add('contentHash', checksum)
child.add('fileName', name)
child.add('mimeType', mime_type)
self.manager.queue_entity(child)
def get_header(self, msg, *headers):
values = []
for header in headers:
try:
for value in ensure_list(msg.get_all(header)):
values.append(value)
except (TypeError, IndexError, AttributeError, ValueError) as exc:
log.warning("Failed to parse [%s]: %s", header, exc)
return values
def get_dates(self, msg, *headers):
dates = []
for value in self.get_header(msg, *headers):
try:
dates.append(parsedate_to_datetime(value))
except Exception:
log.warning("Failed to parse: %s", value)
return dates
def get_identities(self, values):
values = [v for v in ensure_list(values) if v is not None]
for (name, email) in getaddresses(values):
yield EmailIdentity(self.manager, name, email)
def get_header_identities(self, msg, *headers):
yield from self.get_identities(self.get_header(msg, *headers))
def apply_identities(self, entity, identities, eprop=None, lprop=None):
if isinstance(identities, types.GeneratorType):
identities = list(identities)
for identity in ensure_list(identities):
if eprop is not None:
entity.add(eprop, identity.entity)
if lprop is not None:
entity.add(lprop, identity.label)
entity.add('namesMentioned', identity.name)
entity.add('emailMentioned', identity.email)
def parse_message_ids(self, values):
message_ids = []
for value in ensure_list(values):
value = stringify(value)
if value is None:
continue
for message_id in self.MID_RE.findall(value):
message_id = message_id.strip()
if len(message_id) <= 4:
continue
message_ids.append(message_id)
return message_ids
def parse_references(self, references, in_reply_to):
references = self.parse_message_ids(references)
if len(references):
return references[-1]
in_reply_to = self.parse_message_ids(in_reply_to)
if len(in_reply_to):
return in_reply_to[0]
def resolve_message_ids(self, entity):
# https://cr.yp.to/immhf/thread.html
ctx = self.manager.stage.job.dataset.name
for message_id in entity.get('messageId'):
key = self.cache_key('mid-ent', ctx, message_id)
self.add_cache_set(key, entity.id)
rev_key = self.cache_key('ent-mid', ctx, message_id)
for response_id in self.get_cache_set(rev_key):
if response_id == entity.id:
continue
email = self.manager.make_entity('Email')
email.id = response_id
email.add('inReplyToEmail', entity.id)
fragment = safe_fragment(message_id)
self.manager.emit_entity(email, fragment=fragment)
for message_id in entity.get('inReplyTo'):
# forward linking: from message ID to entity ID
key = self.cache_key('mid-ent', ctx, message_id)
for email_id in self.get_cache_set(key):
if email_id != entity.id:
entity.add('inReplyToEmail', email_id)
# backward linking: prepare entity ID for message to come
rev_key = self.cache_key('ent-mid', ctx, message_id)
self.add_cache_set(rev_key, entity.id)
def extract_msg_headers(self, entity, msg):
"""Parse E-Mail headers into FtM properties."""
try:
entity.add('indexText', msg.values())
except Exception as ex:
log.warning("Cannot parse all headers: %r", ex)
entity.add('subject', self.get_header(msg, 'Subject'))
entity.add('date', self.get_dates(msg, 'Date'))
entity.add('mimeType', self.get_header(msg, 'Content-Type'))
entity.add('threadTopic', self.get_header(msg, 'Thread-Topic'))
entity.add('generator', self.get_header(msg, 'X-Mailer'))
entity.add('language', self.get_header(msg, 'Content-Language'))
entity.add('keywords', self.get_header(msg, 'Keywords'))
entity.add('summary', self.get_header(msg, 'Comments'))
message_id = self.get_header(msg, 'Message-ID')
entity.add('messageId', self.parse_message_ids(message_id))
references = self.get_header(msg, 'References')
in_reply_to = self.get_header(msg, 'In-Reply-To')
entity.add('inReplyTo', self.parse_references(references, in_reply_to))
return_path = self.get_header_identities(msg, 'Return-Path')
self.apply_identities(entity, return_path)
reply_to = self.get_header_identities(msg, 'Reply-To')
self.apply_identities(entity, reply_to)
sender = self.get_header_identities(msg, 'Sender', 'X-Sender')
self.apply_identities(entity, sender, 'emitters', 'sender')
froms = self.get_header_identities(msg, 'From', 'X-From')
self.apply_identities(entity, froms, 'emitters', 'from')
tos = self.get_header_identities(msg, 'To', 'Resent-To')
self.apply_identities(entity, tos, 'recipients', 'to')
ccs = self.get_header_identities(msg, 'CC', 'Cc', 'Resent-Cc')
self.apply_identities(entity, ccs, 'recipients', 'cc')
bccs = self.get_header_identities(msg, 'Bcc', 'BCC', 'Resent-Bcc')
self.apply_identities(entity, bccs, 'recipients', 'bcc')
|
[
"balkhash.utils.safe_fragment",
"followthemoney.types.registry.email.validate",
"normality.stringify",
"normality.ascii_text",
"email.utils.parsedate_to_datetime",
"banal.ensure_list",
"email.utils.getaddresses",
"normality.safe_filename",
"logging.getLogger",
"re.compile"
] |
[((446, 473), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (463, 473), False, 'import logging\n'), ((1852, 1875), 're.compile', 're.compile', (['"""<([^>]*)>"""'], {}), "('<([^>]*)>')\n", (1862, 1875), False, 'import re\n'), ((622, 637), 'normality.stringify', 'stringify', (['name'], {}), '(name)\n', (631, 637), False, 'from normality import stringify\n'), ((731, 765), 'followthemoney.types.registry.email.validate', 'registry.email.validate', (['self.name'], {}), '(self.name)\n', (754, 765), False, 'from followthemoney.types import registry\n'), ((2115, 2156), 'normality.safe_filename', 'safe_filename', (['name'], {'default': '"""attachment"""'}), "(name, default='attachment')\n", (2128, 2156), False, 'from normality import safe_filename, ascii_text\n'), ((3579, 3599), 'email.utils.getaddresses', 'getaddresses', (['values'], {}), '(values)\n', (3591, 3599), False, 'from email.utils import parsedate_to_datetime, getaddresses\n'), ((3983, 4006), 'banal.ensure_list', 'ensure_list', (['identities'], {}), '(identities)\n', (3994, 4006), False, 'from banal import ensure_list\n'), ((4378, 4397), 'banal.ensure_list', 'ensure_list', (['values'], {}), '(values)\n', (4389, 4397), False, 'from banal import ensure_list\n'), ((584, 600), 'normality.stringify', 'stringify', (['email'], {}), '(email)\n', (593, 600), False, 'from normality import stringify\n'), ((653, 688), 'followthemoney.types.registry.email.validate', 'registry.email.validate', (['self.email'], {}), '(self.email)\n', (676, 688), False, 'from followthemoney.types import registry\n'), ((1446, 1471), 'balkhash.utils.safe_fragment', 'safe_fragment', (['self.label'], {}), '(self.label)\n', (1459, 1471), False, 'from balkhash.utils import safe_fragment\n'), ((4419, 4435), 'normality.stringify', 'stringify', (['value'], {}), '(value)\n', (4428, 4435), False, 'from normality import stringify\n'), ((806, 827), 'normality.ascii_text', 'ascii_text', (['self.name'], {}), '(self.name)\n', (816, 827), False, 'from normality import safe_filename, ascii_text\n'), ((2003, 2018), 'normality.stringify', 'stringify', (['name'], {}), '(name)\n', (2012, 2018), False, 'from normality import stringify\n'), ((3512, 3531), 'banal.ensure_list', 'ensure_list', (['values'], {}), '(values)\n', (3523, 3531), False, 'from banal import ensure_list\n'), ((5724, 5749), 'balkhash.utils.safe_fragment', 'safe_fragment', (['message_id'], {}), '(message_id)\n', (5737, 5749), False, 'from balkhash.utils import safe_fragment\n'), ((3305, 3333), 'email.utils.parsedate_to_datetime', 'parsedate_to_datetime', (['value'], {}), '(value)\n', (3326, 3333), False, 'from email.utils import parsedate_to_datetime, getaddresses\n')]
|
"""useful utility methods"""
import os
import json
import envoy
import shutil
import unittest
from pymongo import MongoClient
from dlkit.runtime import RUNTIME, PROXY_SESSION
from dlkit.runtime.primordium import Id, DataInputStream, Type
from dlkit.runtime.proxy_example import SimpleRequest
from dlkit.records.registry import ASSESSMENT_RECORD_TYPES
import dlkit.runtime.configs
from .authorization import create_authz_superuser, add_user_authz_to_settings
SIMPLE_SEQUENCE_ASSESSMENT_RECORD = Type(**ASSESSMENT_RECORD_TYPES['simple-child-sequencing'])
PROJECT_PATH = os.path.dirname(os.path.abspath(__file__))
ABS_PATH = os.path.abspath(os.path.join(PROJECT_PATH, os.pardir))
TEST_DATA_STORE_PATH = os.path.join(ABS_PATH, '../../test_datastore')
TEST_BANK_GENUS = Type('assessment.Bank%3Atest-catalog%40ODL.MIT.EDU')
TEST_BIN_GENUS = Type('resource.Bin%3Atest-catalog%40ODL.MIT.EDU')
TEST_GRADEBOOK_GENUS = Type('grading.Gradebook%3Atest-catalog%40ODL.MIT.EDU')
TEST_LOG_GENUS = Type('logging.Log%3Atest-catalog%40ODL.MIT.EDU')
TEST_OBJECTIVE_BANK_GENUS = Type('learning.ObjectiveBank%3Atest-catalog%40ODL.MIT.EDU')
TEST_REPOSITORY_GENUS = Type('repository.Repository%3Atest-catalog%40ODL.MIT.EDU')
class DLKitTestCase(unittest.TestCase):
"""
"""
dbs_to_delete = ['test_dlkit_assessment',
'test_dlkit_assessment_authoring',
'test_dlkit_authorization',
'test_dlkit_commenting',
'test_dlkit_hierarchy',
'test_dlkit_id',
'test_dlkit_learning',
'test_dlkit_logging',
'test_dlkit_grading',
'test_dlkit_relationship',
'test_dlkit_repository',
'test_dlkit_resource']
@staticmethod
def _delete_database(db_name):
MongoClient().drop_database(db_name)
# def _pre_setup(self):
# MockTestCase.setUp(self)
# def _post_teardown(self):
# MockTestCase.tearDown(self)
def code(self, _req, _code):
self.assertEqual(_req.status_code, _code)
def create_assessment_for_items(self, bank, item_list):
form = bank.get_assessment_form_for_create([SIMPLE_SEQUENCE_ASSESSMENT_RECORD])
form.display_name = 'a test assessment'
form.description = 'for testing with'
new_assessment = bank.create_assessment(form)
for item in item_list:
bank.add_item(new_assessment.ident, item.ident)
return new_assessment
def _get_test_bank(self):
am = get_manager(self.req, 'assessment')
querier = am.get_bank_query()
querier.match_genus_type(TEST_BANK_GENUS, True)
bank = next(am.get_banks_by_query(querier))
return am.get_bank(bank.ident) # to make sure we get a services bank
def create_new_bank(self, name="my new assessment bank"):
am = get_manager(self.req, 'assessment')
form = am.get_bank_form_for_create([])
form.display_name = name
form.description = 'for testing with'
form.set_genus_type(TEST_BANK_GENUS)
bank = am.create_bank(form)
add_user_authz_to_settings('instructor',
self.username,
catalog_id=bank.ident)
add_user_authz_to_settings('student',
self.student_name,
catalog_id=bank.ident)
return bank
def _get_test_bin(self):
# assume the first one -- we're missing permissions to query?
rm = get_manager(self.req, 'resource')
return next(rm.get_bins())
def create_new_bin(self):
rm = get_manager(self.req, 'resource')
form = rm.get_bin_form_for_create([])
form.display_name = 'my new bin'
form.description = 'for testing with'
form.set_genus_type(TEST_BIN_GENUS)
bin = rm.create_bin(form)
add_user_authz_to_settings('instructor',
self.username,
catalog_id=bin.ident)
add_user_authz_to_settings('student',
self.student_name,
catalog_id=bin.ident)
return bin
def _get_test_gradebook(self):
# no gradebook query, so assume first gradebook
gm = get_manager(self.req, 'grading')
return next(gm.get_gradebooks())
def create_new_gradebook(self):
gm = get_manager(self.req, 'grading')
form = gm.get_gradebook_form_for_create([])
form.display_name = 'my new grade book'
form.description = 'for testing with'
form.set_genus_type(TEST_GRADEBOOK_GENUS)
gradebook = gm.create_gradebook(form)
add_user_authz_to_settings('instructor',
self.username,
catalog_id=gradebook.ident)
add_user_authz_to_settings('student',
self.student_name,
catalog_id=gradebook.ident)
return gradebook
def _get_test_log(self):
# we don't have log query enabled ... so assume first log found
logm = get_manager(self.req, 'logging')
return next(logm.get_logs())
def create_new_log(self):
logm = get_manager(self.req, 'logging')
form = logm.get_log_form_for_create([])
form.display_name = 'my new log'
form.description = 'for testing with'
form.set_genus_type(TEST_LOG_GENUS)
log = logm.create_log(form)
add_user_authz_to_settings('instructor',
self.username,
catalog_id=log.ident)
add_user_authz_to_settings('student',
self.student_name,
catalog_id=log.ident)
return log
def _get_test_objective_bank(self):
# get the first one because no objective bank query
lm = get_manager(self.req, 'learning')
return next(lm.get_objective_banks())
def create_new_objective_bank(self):
lm = get_manager(self.req, 'learning')
form = lm.get_objective_bank_form_for_create([])
form.display_name = 'my new objective bank'
form.description = 'for testing with'
form.set_genus_type(TEST_OBJECTIVE_BANK_GENUS)
objective_bank = lm.create_objective_bank(form)
add_user_authz_to_settings('instructor',
self.username,
catalog_id=objective_bank.ident)
add_user_authz_to_settings('student',
self.student_name,
catalog_id=objective_bank.ident)
return objective_bank
def _get_test_repository(self):
rm = get_manager(self.req, 'repository')
querier = rm.get_repository_query()
querier.match_genus_type(TEST_REPOSITORY_GENUS, True)
repo = next(rm.get_repositories_by_query(querier))
return rm.get_repository(repo.ident) # to make sure we get a services repository
def create_new_repo(self):
rm = get_manager(self.req, 'repository')
form = rm.get_repository_form_for_create([])
form.display_name = 'my new repository'
form.description = 'for testing with'
form.set_genus_type(TEST_REPOSITORY_GENUS)
repo = rm.create_repository(form)
add_user_authz_to_settings('instructor',
self.username,
catalog_id=repo.ident)
add_user_authz_to_settings('student',
self.student_name,
catalog_id=repo.ident)
return repo
def create_taken_for_items(self, bank, item_list):
new_assessment = self.create_assessment_for_items(bank, item_list)
form = bank.get_assessment_offered_form_for_create(new_assessment.ident, [])
new_offered = bank.create_assessment_offered(form)
form = bank.get_assessment_taken_form_for_create(new_offered.ident, [])
taken = bank.create_assessment_taken(form)
return taken
def created(self, _req):
self.code(_req, 201)
def deleted(self, _req):
self.code(_req, 204)
def filename(self, file_):
try:
return file_.name.split('/')[-1].split('.')[0]
except AttributeError:
return file_.split('/')[-1].split('.')[0]
def get_book(self, book_id):
cm = get_manager(self.req, 'commenting')
if is_string(book_id):
book_id = Id(book_id)
book = cm.get_book(book_id)
add_user_authz_to_settings('instructor',
self.username,
catalog_id=book.ident)
add_user_authz_to_settings('student',
self.student_name,
catalog_id=book.ident)
return book
def get_repo(self, repo_id):
rm = get_manager(self.req, 'repository')
if isinstance(repo_id, str):
repo_id = Id(repo_id)
repo = rm.get_repository(repo_id)
add_user_authz_to_settings('instructor',
self.username,
catalog_id=repo.ident)
add_user_authz_to_settings('student',
self.student_name,
catalog_id=repo.ident)
return repo
def is_streamable_url(self, _url):
self.assertIn('/stream', _url)
def json(self, _req):
return json.loads(_req.content)
def message(self, _req, _msg):
self.assertIn(_msg, str(_req.content))
def ok(self, _req):
self.assertEqual(_req.status_code, 200)
def setUp(self):
for db in self.dbs_to_delete:
self._delete_database(db)
load_fixtures()
self.url = '/api/v2/repository/'
self.username = '<EMAIL>'
self.instructor_req = SimpleRequest(self.username)
self.student_name = '<EMAIL>'
self.student_req = SimpleRequest(self.student_name)
self.unauthenticated_req = SimpleRequest(self.username, authenticated=False)
self.req = self.instructor_req
self.test_file1 = open(ABS_PATH + '/files/test_file_1.txt', 'rb')
self.test_file2 = open(ABS_PATH + '/files/test_file_2.txt', 'rb')
if os.path.isdir(TEST_DATA_STORE_PATH):
shutil.rmtree(TEST_DATA_STORE_PATH)
if not os.path.isdir(TEST_DATA_STORE_PATH):
os.makedirs(TEST_DATA_STORE_PATH)
# add_user_authz_to_settings('instructor',
# self.username)
# add_user_authz_to_settings('student',
# self.student_name)
#
# import pdb
# pdb.set_trace()
# self.create_new_bank()
# self.create_new_bin()
# self.create_new_gradebook()
# self.create_new_log()
# self.create_new_objective_bank()
# self.create_new_repo()
# pdb.set_trace()
def setup_asset(self, repository_id):
test_file = '/functional/files/test_file_1.txt'
repo = self.get_repo(repository_id)
asset_form = repo.get_asset_form_for_create([])
asset_form.display_name = 'test'
asset_form.description = 'ing'
new_asset = repo.create_asset(asset_form)
# now add the new data
asset_content_type_list = []
try:
config = repo._runtime.get_configuration()
parameter_id = Id('parameter:assetContentRecordTypeForFiles@json')
asset_content_type_list.append(
config.get_value_by_parameter(parameter_id).get_type_value())
except AttributeError:
pass
asset_content_form = repo.get_asset_content_form_for_create(new_asset.ident,
asset_content_type_list)
self.default_asset_file = ABS_PATH + test_file
with open(self.default_asset_file, 'r') as file_:
asset_content_form.set_data(DataInputStream(file_))
repo.create_asset_content(asset_content_form)
new_asset = repo.get_asset(new_asset.ident)
return new_asset.object_map
def tearDown(self):
for db in self.dbs_to_delete:
self._delete_database(db)
self.test_file1.close()
self.test_file2.close()
if os.path.isdir(TEST_DATA_STORE_PATH):
shutil.rmtree(TEST_DATA_STORE_PATH)
def updated(self, _req):
self.code(_req, 202)
def is_string(string_):
try:
# python 2
return isinstance(string_, basestring)
except NameError:
# python 3
return isinstance(string_, str)
def load_fixtures():
"""use test settings, not the production settings"""
# create a super-user who can create authorizations
# create_authz_superuser()
envoy.run('mongorestore --db test_dlkit_assessment --drop tests/functional/fixtures/test_dlkit_assessment')
envoy.run('mongorestore --db test_dlkit_authorization --drop tests/functional/fixtures/test_dlkit_authorization')
envoy.run('mongorestore --db test_dlkit_grading --drop tests/functional/fixtures/test_dlkit_grading')
envoy.run('mongorestore --db test_dlkit_learning --drop tests/functional/fixtures/test_dlkit_learning')
envoy.run('mongorestore --db test_dlkit_logging --drop tests/functional/fixtures/test_dlkit_logging')
envoy.run('mongorestore --db test_dlkit_repository --drop tests/functional/fixtures/test_dlkit_repository')
envoy.run('mongorestore --db test_dlkit_resource --drop tests/functional/fixtures/test_dlkit_resource')
def create_test_bank(test_instance):
"""
helper method to create a test assessment bank
"""
test_endpoint = '/api/v2/assessment/banks/'
test_instance.login()
payload = {
"name": "a test bank",
"description": "for testing"
}
req = test_instance.client.post(test_endpoint, payload, format='json')
return json.loads(req.content)
def create_test_request(test_user):
# from django.http import HttpRequest
# from django.conf import settings
# from django.utils.importlib import import_module
# #http://stackoverflow.com/questions/16865947/django-httprequest-object-has-no-attribute-session
# test_request = HttpRequest()
# engine = import_module(settings.SESSION_ENGINE)
# session_key = None
# test_request.user = test_user
# test_request.session = engine.SessionStore(session_key)
# return test_request
return SimpleRequest(username=test_user.username)
def get_agent_id(agent_id):
"""Not a great hack...depends too much on internal DLKit knowledge"""
if '@mit.edu' not in agent_id:
agent_id += '@mit.edu'
test_request = SimpleRequest(agent_id)
condition = PROXY_SESSION.get_proxy_condition()
condition.set_http_request(test_request)
proxy = PROXY_SESSION.get_proxy(condition)
resm = RUNTIME.get_service_manager('RESOURCE',
implementation='TEST_SERVICE_FUNCTIONAL',
proxy=proxy)
return resm.effective_agent_id
def get_manager(request, manager_type):
condition = PROXY_SESSION.get_proxy_condition()
condition.set_http_request(request)
proxy = PROXY_SESSION.get_proxy(condition)
return RUNTIME.get_service_manager(manager_type.upper(),
implementation='TEST_SERVICE_FUNCTIONAL',
proxy=proxy)
def serialize_date(date):
return {
'day': date.day,
'month': date.month,
'year': date.year,
'hour': date.hour,
'minute': date.minute,
'second': date.second,
'microsecond': date.microsecond
}
|
[
"pymongo.MongoClient",
"dlkit.runtime.primordium.DataInputStream",
"os.path.abspath",
"json.loads",
"os.makedirs",
"os.path.isdir",
"dlkit.runtime.PROXY_SESSION.get_proxy",
"dlkit.runtime.primordium.Type",
"envoy.run",
"dlkit.runtime.primordium.Id",
"dlkit.runtime.RUNTIME.get_service_manager",
"dlkit.runtime.proxy_example.SimpleRequest",
"shutil.rmtree",
"dlkit.runtime.PROXY_SESSION.get_proxy_condition",
"os.path.join"
] |
[((499, 557), 'dlkit.runtime.primordium.Type', 'Type', ([], {}), "(**ASSESSMENT_RECORD_TYPES['simple-child-sequencing'])\n", (503, 557), False, 'from dlkit.runtime.primordium import Id, DataInputStream, Type\n'), ((706, 752), 'os.path.join', 'os.path.join', (['ABS_PATH', '"""../../test_datastore"""'], {}), "(ABS_PATH, '../../test_datastore')\n", (718, 752), False, 'import os\n'), ((772, 824), 'dlkit.runtime.primordium.Type', 'Type', (['"""assessment.Bank%3Atest-catalog%40ODL.MIT.EDU"""'], {}), "('assessment.Bank%3Atest-catalog%40ODL.MIT.EDU')\n", (776, 824), False, 'from dlkit.runtime.primordium import Id, DataInputStream, Type\n'), ((842, 891), 'dlkit.runtime.primordium.Type', 'Type', (['"""resource.Bin%3Atest-catalog%40ODL.MIT.EDU"""'], {}), "('resource.Bin%3Atest-catalog%40ODL.MIT.EDU')\n", (846, 891), False, 'from dlkit.runtime.primordium import Id, DataInputStream, Type\n'), ((915, 969), 'dlkit.runtime.primordium.Type', 'Type', (['"""grading.Gradebook%3Atest-catalog%40ODL.MIT.EDU"""'], {}), "('grading.Gradebook%3Atest-catalog%40ODL.MIT.EDU')\n", (919, 969), False, 'from dlkit.runtime.primordium import Id, DataInputStream, Type\n'), ((987, 1035), 'dlkit.runtime.primordium.Type', 'Type', (['"""logging.Log%3Atest-catalog%40ODL.MIT.EDU"""'], {}), "('logging.Log%3Atest-catalog%40ODL.MIT.EDU')\n", (991, 1035), False, 'from dlkit.runtime.primordium import Id, DataInputStream, Type\n'), ((1064, 1123), 'dlkit.runtime.primordium.Type', 'Type', (['"""learning.ObjectiveBank%3Atest-catalog%40ODL.MIT.EDU"""'], {}), "('learning.ObjectiveBank%3Atest-catalog%40ODL.MIT.EDU')\n", (1068, 1123), False, 'from dlkit.runtime.primordium import Id, DataInputStream, Type\n'), ((1148, 1206), 'dlkit.runtime.primordium.Type', 'Type', (['"""repository.Repository%3Atest-catalog%40ODL.MIT.EDU"""'], {}), "('repository.Repository%3Atest-catalog%40ODL.MIT.EDU')\n", (1152, 1206), False, 'from dlkit.runtime.primordium import Id, DataInputStream, Type\n'), ((590, 615), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (605, 615), False, 'import os\n'), ((644, 681), 'os.path.join', 'os.path.join', (['PROJECT_PATH', 'os.pardir'], {}), '(PROJECT_PATH, os.pardir)\n', (656, 681), False, 'import os\n'), ((13213, 13330), 'envoy.run', 'envoy.run', (['"""mongorestore --db test_dlkit_assessment --drop tests/functional/fixtures/test_dlkit_assessment"""'], {}), "(\n 'mongorestore --db test_dlkit_assessment --drop tests/functional/fixtures/test_dlkit_assessment'\n )\n", (13222, 13330), False, 'import envoy\n'), ((13325, 13448), 'envoy.run', 'envoy.run', (['"""mongorestore --db test_dlkit_authorization --drop tests/functional/fixtures/test_dlkit_authorization"""'], {}), "(\n 'mongorestore --db test_dlkit_authorization --drop tests/functional/fixtures/test_dlkit_authorization'\n )\n", (13334, 13448), False, 'import envoy\n'), ((13443, 13554), 'envoy.run', 'envoy.run', (['"""mongorestore --db test_dlkit_grading --drop tests/functional/fixtures/test_dlkit_grading"""'], {}), "(\n 'mongorestore --db test_dlkit_grading --drop tests/functional/fixtures/test_dlkit_grading'\n )\n", (13452, 13554), False, 'import envoy\n'), ((13549, 13662), 'envoy.run', 'envoy.run', (['"""mongorestore --db test_dlkit_learning --drop tests/functional/fixtures/test_dlkit_learning"""'], {}), "(\n 'mongorestore --db test_dlkit_learning --drop tests/functional/fixtures/test_dlkit_learning'\n )\n", (13558, 13662), False, 'import envoy\n'), ((13657, 13768), 'envoy.run', 'envoy.run', (['"""mongorestore --db test_dlkit_logging --drop tests/functional/fixtures/test_dlkit_logging"""'], {}), "(\n 'mongorestore --db test_dlkit_logging --drop tests/functional/fixtures/test_dlkit_logging'\n )\n", (13666, 13768), False, 'import envoy\n'), ((13763, 13880), 'envoy.run', 'envoy.run', (['"""mongorestore --db test_dlkit_repository --drop tests/functional/fixtures/test_dlkit_repository"""'], {}), "(\n 'mongorestore --db test_dlkit_repository --drop tests/functional/fixtures/test_dlkit_repository'\n )\n", (13772, 13880), False, 'import envoy\n'), ((13875, 13988), 'envoy.run', 'envoy.run', (['"""mongorestore --db test_dlkit_resource --drop tests/functional/fixtures/test_dlkit_resource"""'], {}), "(\n 'mongorestore --db test_dlkit_resource --drop tests/functional/fixtures/test_dlkit_resource'\n )\n", (13884, 13988), False, 'import envoy\n'), ((14335, 14358), 'json.loads', 'json.loads', (['req.content'], {}), '(req.content)\n', (14345, 14358), False, 'import json\n'), ((14884, 14926), 'dlkit.runtime.proxy_example.SimpleRequest', 'SimpleRequest', ([], {'username': 'test_user.username'}), '(username=test_user.username)\n', (14897, 14926), False, 'from dlkit.runtime.proxy_example import SimpleRequest\n'), ((15116, 15139), 'dlkit.runtime.proxy_example.SimpleRequest', 'SimpleRequest', (['agent_id'], {}), '(agent_id)\n', (15129, 15139), False, 'from dlkit.runtime.proxy_example import SimpleRequest\n'), ((15156, 15191), 'dlkit.runtime.PROXY_SESSION.get_proxy_condition', 'PROXY_SESSION.get_proxy_condition', ([], {}), '()\n', (15189, 15191), False, 'from dlkit.runtime import RUNTIME, PROXY_SESSION\n'), ((15249, 15283), 'dlkit.runtime.PROXY_SESSION.get_proxy', 'PROXY_SESSION.get_proxy', (['condition'], {}), '(condition)\n', (15272, 15283), False, 'from dlkit.runtime import RUNTIME, PROXY_SESSION\n'), ((15295, 15394), 'dlkit.runtime.RUNTIME.get_service_manager', 'RUNTIME.get_service_manager', (['"""RESOURCE"""'], {'implementation': '"""TEST_SERVICE_FUNCTIONAL"""', 'proxy': 'proxy'}), "('RESOURCE', implementation=\n 'TEST_SERVICE_FUNCTIONAL', proxy=proxy)\n", (15322, 15394), False, 'from dlkit.runtime import RUNTIME, PROXY_SESSION\n'), ((15561, 15596), 'dlkit.runtime.PROXY_SESSION.get_proxy_condition', 'PROXY_SESSION.get_proxy_condition', ([], {}), '()\n', (15594, 15596), False, 'from dlkit.runtime import RUNTIME, PROXY_SESSION\n'), ((15649, 15683), 'dlkit.runtime.PROXY_SESSION.get_proxy', 'PROXY_SESSION.get_proxy', (['condition'], {}), '(condition)\n', (15672, 15683), False, 'from dlkit.runtime import RUNTIME, PROXY_SESSION\n'), ((9813, 9837), 'json.loads', 'json.loads', (['_req.content'], {}), '(_req.content)\n', (9823, 9837), False, 'import json\n'), ((10222, 10250), 'dlkit.runtime.proxy_example.SimpleRequest', 'SimpleRequest', (['self.username'], {}), '(self.username)\n', (10235, 10250), False, 'from dlkit.runtime.proxy_example import SimpleRequest\n'), ((10317, 10349), 'dlkit.runtime.proxy_example.SimpleRequest', 'SimpleRequest', (['self.student_name'], {}), '(self.student_name)\n', (10330, 10349), False, 'from dlkit.runtime.proxy_example import SimpleRequest\n'), ((10386, 10435), 'dlkit.runtime.proxy_example.SimpleRequest', 'SimpleRequest', (['self.username'], {'authenticated': '(False)'}), '(self.username, authenticated=False)\n', (10399, 10435), False, 'from dlkit.runtime.proxy_example import SimpleRequest\n'), ((10636, 10671), 'os.path.isdir', 'os.path.isdir', (['TEST_DATA_STORE_PATH'], {}), '(TEST_DATA_STORE_PATH)\n', (10649, 10671), False, 'import os\n'), ((12716, 12751), 'os.path.isdir', 'os.path.isdir', (['TEST_DATA_STORE_PATH'], {}), '(TEST_DATA_STORE_PATH)\n', (12729, 12751), False, 'import os\n'), ((8774, 8785), 'dlkit.runtime.primordium.Id', 'Id', (['book_id'], {}), '(book_id)\n', (8776, 8785), False, 'from dlkit.runtime.primordium import Id, DataInputStream, Type\n'), ((9301, 9312), 'dlkit.runtime.primordium.Id', 'Id', (['repo_id'], {}), '(repo_id)\n', (9303, 9312), False, 'from dlkit.runtime.primordium import Id, DataInputStream, Type\n'), ((10685, 10720), 'shutil.rmtree', 'shutil.rmtree', (['TEST_DATA_STORE_PATH'], {}), '(TEST_DATA_STORE_PATH)\n', (10698, 10720), False, 'import shutil\n'), ((10737, 10772), 'os.path.isdir', 'os.path.isdir', (['TEST_DATA_STORE_PATH'], {}), '(TEST_DATA_STORE_PATH)\n', (10750, 10772), False, 'import os\n'), ((10786, 10819), 'os.makedirs', 'os.makedirs', (['TEST_DATA_STORE_PATH'], {}), '(TEST_DATA_STORE_PATH)\n', (10797, 10819), False, 'import os\n'), ((11815, 11866), 'dlkit.runtime.primordium.Id', 'Id', (['"""parameter:assetContentRecordTypeForFiles@json"""'], {}), "('parameter:assetContentRecordTypeForFiles@json')\n", (11817, 11866), False, 'from dlkit.runtime.primordium import Id, DataInputStream, Type\n'), ((12765, 12800), 'shutil.rmtree', 'shutil.rmtree', (['TEST_DATA_STORE_PATH'], {}), '(TEST_DATA_STORE_PATH)\n', (12778, 12800), False, 'import shutil\n'), ((1875, 1888), 'pymongo.MongoClient', 'MongoClient', ([], {}), '()\n', (1886, 1888), False, 'from pymongo import MongoClient\n'), ((12370, 12392), 'dlkit.runtime.primordium.DataInputStream', 'DataInputStream', (['file_'], {}), '(file_)\n', (12385, 12392), False, 'from dlkit.runtime.primordium import Id, DataInputStream, Type\n')]
|
r"""
Ring `\ZZ/n\ZZ` of integers modulo `n`
EXAMPLES::
sage: R = Integers(97)
sage: a = R(5)
sage: a**100000000000000000000000000000000000000000000000000000000000000
61
This example illustrates the relation between
`\ZZ/p\ZZ` and `\GF{p}`. In
particular, there is a canonical map to `\GF{p}`, but not in
the other direction.
::
sage: r = Integers(7)
sage: s = GF(7)
sage: r.has_coerce_map_from(s)
False
sage: s.has_coerce_map_from(r)
True
sage: s(1) + r(1)
2
sage: parent(s(1) + r(1))
Finite Field of size 7
sage: parent(r(1) + s(1))
Finite Field of size 7
We list the elements of `\ZZ/3\ZZ`::
sage: R = Integers(3)
sage: list(R)
[0, 1, 2]
AUTHORS:
- <NAME> (initial code)
- <NAME> (2005-12-22): most examples
- <NAME> (2006-08-24): convert to SageX (Cython)
- <NAME> (2007-04-29): square_roots_of_one
- Simon King (2011-04-21): allow to prescribe a category
- Simon King (2013-09): Only allow to prescribe the category of fields
"""
#*****************************************************************************
# Copyright (C) 2005 <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
import sage.misc.prandom as random
from sage.arith.all import factor, primitive_root, CRT_basis
import sage.rings.ring as ring
from . import integer_mod
import sage.rings.integer as integer
import sage.rings.integer_ring as integer_ring
import sage.rings.quotient_ring as quotient_ring
from sage.libs.pari.all import pari, PariError
import sage.interfaces.all
from sage.misc.cachefunc import cached_method
from sage.structure.factory import UniqueFactory
from sage.structure.richcmp import richcmp, richcmp_method
class IntegerModFactory(UniqueFactory):
r"""
Return the quotient ring `\ZZ / n\ZZ`.
INPUT:
- ``order`` -- integer (default: 0); positive or negative
- ``is_field`` -- bool (default: ``False``); assert that
the order is prime and hence the quotient ring belongs to
the category of fields
- ``category`` (optional) - the category that the quotient ring belongs to.
.. NOTE::
The optional argument ``is_field`` is not part of the cache key.
Hence, this factory will create precisely one instance of `\ZZ /
n\ZZ`. However, if ``is_field`` is true, then a previously created
instance of the quotient ring will be updated to be in the category of
fields.
**Use with care!** Erroneously putting `\ZZ / n\ZZ` into the category
of fields may have consequences that can compromise a whole Sage
session, so that a restart will be needed.
EXAMPLES::
sage: IntegerModRing(15)
Ring of integers modulo 15
sage: IntegerModRing(7)
Ring of integers modulo 7
sage: IntegerModRing(-100)
Ring of integers modulo 100
Note that you can also use ``Integers``, which is a
synonym for ``IntegerModRing``.
::
sage: Integers(18)
Ring of integers modulo 18
sage: Integers() is Integers(0) is ZZ
True
.. NOTE::
Testing whether a quotient ring `\ZZ / n\ZZ` is a field can of
course be very costly. By default, it is not tested whether `n`
is prime or not, in contrast to
:func:`~sage.rings.finite_rings.finite_field_constructor.GF`. If the user
is sure that the modulus is prime and wants to avoid a primality
test, (s)he can provide ``category=Fields()`` when constructing
the quotient ring, and then the result will behave like a field.
If the category is not provided during initialisation, and it is
found out later that the ring is in fact a field, then the category
will be changed at runtime, having the same effect as providing
``Fields()`` during initialisation.
EXAMPLES::
sage: R = IntegerModRing(5)
sage: R.category()
Join of Category of finite commutative rings
and Category of subquotients of monoids
and Category of quotients of semigroups
and Category of finite enumerated sets
sage: R in Fields()
True
sage: R.category()
Join of Category of finite enumerated fields
and Category of subquotients of monoids
and Category of quotients of semigroups
sage: S = IntegerModRing(5, is_field=True)
sage: S is R
True
.. WARNING::
If the optional argument ``is_field`` was used by mistake, there is
currently no way to revert its impact, even though
:meth:`IntegerModRing_generic.is_field` with the optional argument
``proof=True`` would return the correct answer. So, prescribe
``is_field=True`` only if you know what your are doing!
EXAMPLES::
sage: R = IntegerModRing(33, is_field=True)
sage: R in Fields()
True
sage: R.is_field()
True
If the optional argument `proof=True` is provided, primality is tested and
the mistaken category assignment is reported::
sage: R.is_field(proof=True)
Traceback (most recent call last):
...
ValueError: THIS SAGE SESSION MIGHT BE SERIOUSLY COMPROMISED!
The order 33 is not prime, but this ring has been put
into the category of fields. This may already have consequences
in other parts of Sage. Either it was a mistake of the user,
or a probabilistic primality test has failed.
In the latter case, please inform the developers.
However, the mistaken assignment is not automatically corrected::
sage: R in Fields()
True
To avoid side-effects of this test on other tests, we clear the cache of
the ring factory::
sage: IntegerModRing._cache.clear()
"""
def get_object(self, version, key, extra_args):
out = super(IntegerModFactory,self).get_object(version, key, extra_args)
category = extra_args.get('category', None)
if category is not None:
out._refine_category_(category)
out._factory_data[3]['category'] = category
return out
def create_key_and_extra_args(self, order=0, is_field=False, category=None):
"""
An integer mod ring is specified uniquely by its order.
EXAMPLES::
sage: Zmod.create_key_and_extra_args(7)
(7, {})
sage: Zmod.create_key_and_extra_args(7, True)
(7, {'category': Category of fields})
"""
if is_field:
from sage.categories.fields import Fields
return order, {'category':Fields()}
return order, {}
def create_object(self, version, order, **kwds):
"""
EXAMPLES::
sage: R = Integers(10)
sage: TestSuite(R).run() # indirect doctest
"""
if isinstance(order, tuple):
# this is for unpickling old data
order, category = order
kwds.setdefault('category', category)
if order < 0:
order = -order
if order == 0:
return integer_ring.IntegerRing(**kwds)
else:
return IntegerModRing_generic(order, **kwds)
Zmod = Integers = IntegerModRing = IntegerModFactory("IntegerModRing")
def is_IntegerModRing(x):
"""
Return ``True`` if ``x`` is an integer modulo ring.
EXAMPLES::
sage: from sage.rings.finite_rings.integer_mod_ring import is_IntegerModRing
sage: R = IntegerModRing(17)
sage: is_IntegerModRing(R)
True
sage: is_IntegerModRing(GF(13))
True
sage: is_IntegerModRing(GF(4, 'a'))
False
sage: is_IntegerModRing(10)
False
sage: is_IntegerModRing(ZZ)
False
"""
return isinstance(x, IntegerModRing_generic)
from sage.categories.commutative_rings import CommutativeRings
from sage.categories.finite_enumerated_sets import FiniteEnumeratedSets
from sage.categories.category import JoinCategory
default_category = JoinCategory((CommutativeRings(), FiniteEnumeratedSets()))
ZZ = integer_ring.IntegerRing()
def _unit_gens_primepowercase(p, r):
r"""
Return a list of generators for `(\ZZ/p^r\ZZ)^*` and their orders.
EXAMPLES::
sage: from sage.rings.finite_rings.integer_mod_ring import _unit_gens_primepowercase
sage: _unit_gens_primepowercase(2, 3)
[(7, 2), (5, 2)]
sage: _unit_gens_primepowercase(17, 1)
[(3, 16)]
sage: _unit_gens_primepowercase(3, 3)
[(2, 18)]
"""
pr = p**r
if p == 2:
if r == 1:
return []
if r == 2:
return [(integer_mod.Mod(3, 4), integer.Integer(2))]
return [(integer_mod.Mod(-1, pr), integer.Integer(2)),
(integer_mod.Mod(5, pr), integer.Integer(2**(r - 2)))]
# odd prime
return [(integer_mod.Mod(primitive_root(pr, check=False), pr),
integer.Integer(p**(r - 1) * (p - 1)))]
@richcmp_method
class IntegerModRing_generic(quotient_ring.QuotientRing_generic):
"""
The ring of integers modulo `N`.
INPUT:
- ``order`` -- an integer
- ``category`` -- a subcategory of ``CommutativeRings()`` (the default)
OUTPUT:
The ring of integers modulo `N`.
EXAMPLES:
First we compute with integers modulo `29`.
::
sage: FF = IntegerModRing(29)
sage: FF
Ring of integers modulo 29
sage: FF.category()
Join of Category of finite commutative rings
and Category of subquotients of monoids
and Category of quotients of semigroups
and Category of finite enumerated sets
sage: FF.is_field()
True
sage: FF.characteristic()
29
sage: FF.order()
29
sage: gens = FF.unit_gens()
sage: a = gens[0]
sage: a
2
sage: a.is_square()
False
sage: def pow(i): return a**i
sage: [pow(i) for i in range(16)]
[1, 2, 4, 8, 16, 3, 6, 12, 24, 19, 9, 18, 7, 14, 28, 27]
sage: TestSuite(FF).run()
We have seen above that an integer mod ring is, by default, not
initialised as an object in the category of fields. However, one
can force it to be. Moreover, testing containment in the category
of fields my re-initialise the category of the integer mod ring::
sage: F19 = IntegerModRing(19, is_field=True)
sage: F19.category().is_subcategory(Fields())
True
sage: F23 = IntegerModRing(23)
sage: F23.category().is_subcategory(Fields())
False
sage: F23 in Fields()
True
sage: F23.category().is_subcategory(Fields())
True
sage: TestSuite(F19).run()
sage: TestSuite(F23).run()
By :trac:`15229`, there is a unique instance of the
integral quotient ring of a given order. Using the
:func:`IntegerModRing` factory twice, and using
``is_field=True`` the second time, will update the
category of the unique instance::
sage: F31a = IntegerModRing(31)
sage: F31a.category().is_subcategory(Fields())
False
sage: F31b = IntegerModRing(31, is_field=True)
sage: F31a is F31b
True
sage: F31a.category().is_subcategory(Fields())
True
Next we compute with the integers modulo `16`.
::
sage: Z16 = IntegerModRing(16)
sage: Z16.category()
Join of Category of finite commutative rings
and Category of subquotients of monoids
and Category of quotients of semigroups
and Category of finite enumerated sets
sage: Z16.is_field()
False
sage: Z16.order()
16
sage: Z16.characteristic()
16
sage: gens = Z16.unit_gens()
sage: gens
(15, 5)
sage: a = gens[0]
sage: b = gens[1]
sage: def powa(i): return a**i
sage: def powb(i): return b**i
sage: gp_exp = FF.unit_group_exponent()
sage: gp_exp
28
sage: [powa(i) for i in range(15)]
[1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1]
sage: [powb(i) for i in range(15)]
[1, 5, 9, 13, 1, 5, 9, 13, 1, 5, 9, 13, 1, 5, 9]
sage: a.multiplicative_order()
2
sage: b.multiplicative_order()
4
sage: TestSuite(Z16).run()
Saving and loading::
sage: R = Integers(100000)
sage: TestSuite(R).run() # long time (17s on sage.math, 2011)
Testing ideals and quotients::
sage: Z10 = Integers(10)
sage: I = Z10.principal_ideal(0)
sage: Z10.quotient(I) == Z10
True
sage: I = Z10.principal_ideal(2)
sage: Z10.quotient(I) == Z10
False
sage: I.is_prime()
True
::
sage: R = IntegerModRing(97)
sage: a = R(5)
sage: a**(10^62)
61
"""
def __init__(self, order, cache=None, category=None):
"""
Create with the command ``IntegerModRing(order)``.
TESTS::
sage: FF = IntegerModRing(29)
sage: TestSuite(FF).run()
sage: F19 = IntegerModRing(19, is_field=True)
sage: TestSuite(F19).run()
sage: F23 = IntegerModRing(23)
sage: F23 in Fields()
True
sage: TestSuite(F23).run()
sage: Z16 = IntegerModRing(16)
sage: TestSuite(Z16).run()
sage: R = Integers(100000)
sage: TestSuite(R).run() # long time (17s on sage.math, 2011)
sage: R = IntegerModRing(18)
sage: R.is_finite()
True
"""
order = ZZ(order)
if order <= 0:
raise ZeroDivisionError("order must be positive")
self.__order = order
self._pyx_order = integer_mod.NativeIntStruct(order)
global default_category
if category is None:
category = default_category
else:
# If the category is given, e.g., as Fields(), then we still
# know that the result will also live in default_category.
# Hence, we use the join of the default and the given category.
category = category.join([category,default_category])
# Give the generator a 'name' to make quotients work. The
# name 'x' is used because it's also used for the ring of
# integers: see the __init__ method for IntegerRing_class in
# sage/rings/integer_ring.pyx.
quotient_ring.QuotientRing_generic.__init__(self, ZZ, ZZ.ideal(order),
names=('x',),
category=category)
# We want that the ring is its own base ring.
self._base = self
if cache is None:
cache = order < 500
if cache:
self._precompute_table()
self._zero_element = integer_mod.IntegerMod(self, 0)
self._one_element = integer_mod.IntegerMod(self, 1)
def _macaulay2_init_(self, macaulay2=None):
"""
EXAMPLES::
sage: macaulay2(Integers(7)) # optional - macaulay2
ZZ
--
7
::
sage: macaulay2(Integers(10)) # optional - macaulay2
Traceback (most recent call last):
...
TypeError: Error evaluating Macaulay2 code.
IN:...
OUT:...error: ZZ/n not implemented yet for composite n...
"""
return "ZZ/{}".format(self.order())
def _axiom_init_(self):
"""
Returns a string representation of self in (Pan)Axiom.
EXAMPLES::
sage: Z7 = Integers(7)
sage: Z7._axiom_init_()
'IntegerMod(7)'
sage: axiom(Z7) #optional - axiom
IntegerMod 7
sage: fricas(Z7) #optional - fricas
IntegerMod(7)
"""
return 'IntegerMod({})'.format(self.order())
_fricas_init_ = _axiom_init_
def krull_dimension(self):
"""
Return the Krull dimension of ``self``.
EXAMPLES::
sage: Integers(18).krull_dimension()
0
"""
return integer.Integer(0)
def is_noetherian(self):
"""
Check if ``self`` is a Noetherian ring.
EXAMPLES::
sage: Integers(8).is_noetherian()
True
"""
return True
def extension(self, poly, name=None, names=None, **kwds):
"""
Return an algebraic extension of ``self``. See
:meth:`sage.rings.ring.CommutativeRing.extension()` for more
information.
EXAMPLES::
sage: R.<t> = QQ[]
sage: Integers(8).extension(t^2 - 3)
Univariate Quotient Polynomial Ring in t over Ring of integers modulo 8 with modulus t^2 + 5
"""
if self.modulus() == 1:
return self
from sage.rings.ring import CommutativeRing
return CommutativeRing.extension(self, poly, name, names, **kwds)
@cached_method
def is_prime_field(self):
"""
Return ``True`` if the order is prime.
EXAMPLES::
sage: Zmod(7).is_prime_field()
True
sage: Zmod(8).is_prime_field()
False
"""
return self.__order.is_prime()
def _precompute_table(self):
"""
Computes a table of elements so that elements are unique.
EXAMPLES::
sage: R = Zmod(500); R._precompute_table()
sage: R(7) + R(13) is R(3) + R(17)
True
"""
self._pyx_order.precompute_table(self)
def list_of_elements_of_multiplicative_group(self):
"""
Return a list of all invertible elements, as python ints.
EXAMPLES::
sage: R = Zmod(12)
sage: L = R.list_of_elements_of_multiplicative_group(); L
[1, 5, 7, 11]
sage: type(L[0])
<... 'int'>
sage: Zmod(1).list_of_elements_of_multiplicative_group()
[0]
"""
import sage.rings.fast_arith as a
if self.__order <= 46340: # todo: don't hard code
gcd = a.arith_int().gcd_int
elif self.__order <= 2147483647: # todo: don't hard code
gcd = a.arith_llong().gcd_longlong
else:
raise NotImplementedError("list_of_elements_of_multiplicative_group() is not implemented for large moduli")
N = self.__order
# Don't use N.coprime_integers() here because we want Python ints
return [i for i in range(N) if gcd(i, N) == 1]
@cached_method
def multiplicative_subgroups(self):
r"""
Return generators for each subgroup of `(\ZZ/N\ZZ)^*`.
EXAMPLES::
sage: Integers(5).multiplicative_subgroups()
((2,), (4,), ())
sage: Integers(15).multiplicative_subgroups()
((11, 7), (11, 4), (2,), (11,), (14,), (7,), (4,), ())
sage: Integers(2).multiplicative_subgroups()
((),)
sage: len(Integers(341).multiplicative_subgroups())
80
TESTS::
sage: IntegerModRing(1).multiplicative_subgroups()
((),)
sage: IntegerModRing(2).multiplicative_subgroups()
((),)
sage: IntegerModRing(3).multiplicative_subgroups()
((2,), ())
"""
return tuple(tuple(g.value() for g in H.gens())
for H in self.unit_group().subgroups())
def is_integral_domain(self, proof=None):
"""
Return ``True`` if and only if the order of ``self`` is prime.
EXAMPLES::
sage: Integers(389).is_integral_domain()
True
sage: Integers(389^2).is_integral_domain()
False
TESTS:
Check that :trac:`17453` is fixed::
sage: R = Zmod(5)
sage: R in IntegralDomains()
True
"""
return self.is_field(proof)
def is_unique_factorization_domain(self, proof=None):
"""
Return ``True`` if and only if the order of ``self`` is prime.
EXAMPLES::
sage: Integers(389).is_unique_factorization_domain()
True
sage: Integers(389^2).is_unique_factorization_domain()
False
"""
return self.is_field(proof)
@cached_method
def is_field(self, proof=None):
r"""
Return True precisely if the order is prime.
INPUT:
- ``proof`` (optional bool or None, default None):
If ``False``, then test whether the category of the quotient
is a subcategory of ``Fields()``, or do a probabilistic
primality test. If ``None``, then test the category and then
do a primality test according to the global arithmetic proof
settings. If True, do a deterministic primality test.
If it is found (perhaps probabilistically) that the ring is a field,
then the category of the ring is refined to include the category
of fields. This may change the Python class of the ring!
EXAMPLES::
sage: R = IntegerModRing(18)
sage: R.is_field()
False
sage: FF = IntegerModRing(17)
sage: FF.is_field()
True
By :trac:`15229`, the category of the ring is refined,
if it is found that the ring is in fact a field::
sage: R = IntegerModRing(127)
sage: R.category()
Join of Category of finite commutative rings
and Category of subquotients of monoids
and Category of quotients of semigroups
and Category of finite enumerated sets
sage: R.is_field()
True
sage: R.category()
Join of Category of finite enumerated fields
and Category of subquotients of monoids
and Category of quotients of semigroups
It is possible to mistakenly put `\ZZ/n\ZZ` into the category of fields.
In this case, :meth:`is_field` will return True without performing a
primality check. However, if the optional argument `proof=True` is
provided, primality is tested and the mistake is uncovered in a warning
message::
sage: R = IntegerModRing(21, is_field=True)
sage: R.is_field()
True
sage: R.is_field(proof=True)
Traceback (most recent call last):
...
ValueError: THIS SAGE SESSION MIGHT BE SERIOUSLY COMPROMISED!
The order 21 is not prime, but this ring has been put
into the category of fields. This may already have consequences
in other parts of Sage. Either it was a mistake of the user,
or a probabilistic primality test has failed.
In the latter case, please inform the developers.
To avoid side-effects of this test on other tests, we clear the cache
of the ring factory::
sage: IntegerModRing._cache.clear()
"""
from sage.categories.fields import Fields
if not proof:
if self.category().is_subcategory(Fields()):
return True
is_prime = self.order().is_prime(proof=proof)
if is_prime:
self._refine_category_(Fields())
self._factory_data[3]['category'] = Fields()
else:
if self.category().is_subcategory(Fields()):
raise ValueError("""THIS SAGE SESSION MIGHT BE SERIOUSLY COMPROMISED!
The order {} is not prime, but this ring has been put
into the category of fields. This may already have consequences
in other parts of Sage. Either it was a mistake of the user,
or a probabilistic primality test has failed.
In the latter case, please inform the developers.""".format(self.order()))
return is_prime
@cached_method
def field(self):
"""
If this ring is a field, return the corresponding field as a finite
field, which may have extra functionality and structure. Otherwise,
raise a ``ValueError``.
EXAMPLES::
sage: R = Integers(7); R
Ring of integers modulo 7
sage: R.field()
Finite Field of size 7
sage: R = Integers(9)
sage: R.field()
Traceback (most recent call last):
...
ValueError: self must be a field
"""
try:
return self.__field
except AttributeError:
if not self.is_field():
raise ValueError("self must be a field")
from . import finite_field_constructor
k = finite_field_constructor.FiniteField(self.order())
self.__field = k
return k
def _pseudo_fraction_field(self):
"""
If ``self`` is composite, we may still want to do division by elements
of ``self``.
EXAMPLES::
sage: Integers(15).fraction_field()
Traceback (most recent call last):
...
TypeError: self must be an integral domain.
sage: Integers(15)._pseudo_fraction_field()
Ring of integers modulo 15
sage: R.<x> = Integers(15)[]
sage: (x+5)/2
8*x + 10
This should be very fast::
sage: R.<x> = Integers(next_prime(10^101)*next_prime(10^100))[]
sage: x / R.base_ring()(2)
500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000013365000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000401*x
"""
return self
@cached_method
def multiplicative_group_is_cyclic(self):
"""
Return ``True`` if the multiplicative group of this field is cyclic.
This is the case exactly when the order is less than 8, a power
of an odd prime, or twice a power of an odd prime.
EXAMPLES::
sage: R = Integers(7); R
Ring of integers modulo 7
sage: R.multiplicative_group_is_cyclic()
True
sage: R = Integers(9)
sage: R.multiplicative_group_is_cyclic()
True
sage: Integers(8).multiplicative_group_is_cyclic()
False
sage: Integers(4).multiplicative_group_is_cyclic()
True
sage: Integers(25*3).multiplicative_group_is_cyclic()
False
We test that :trac:`5250` is fixed::
sage: Integers(162).multiplicative_group_is_cyclic()
True
"""
n = self.order()
if n < 8:
return True
if n % 4 == 0:
return False # know n > 7, so n=4 case not a problem
if n % 4 == 2:
n = n // 2
return n.is_prime_power()
@cached_method
def multiplicative_generator(self):
"""
Return a generator for the multiplicative group of this ring,
assuming the multiplicative group is cyclic.
Use the unit_gens function to obtain generators even in the
non-cyclic case.
EXAMPLES::
sage: R = Integers(7); R
Ring of integers modulo 7
sage: R.multiplicative_generator()
3
sage: R = Integers(9)
sage: R.multiplicative_generator()
2
sage: Integers(8).multiplicative_generator()
Traceback (most recent call last):
...
ValueError: multiplicative group of this ring is not cyclic
sage: Integers(4).multiplicative_generator()
3
sage: Integers(25*3).multiplicative_generator()
Traceback (most recent call last):
...
ValueError: multiplicative group of this ring is not cyclic
sage: Integers(25*3).unit_gens()
(26, 52)
sage: Integers(162).unit_gens()
(83,)
"""
try:
return self.__mult_gen
except AttributeError:
if self.is_field():
a = self(self.field().multiplicative_generator())
self.__mult_gen = a
return a
if self.multiplicative_group_is_cyclic():
v = self.unit_gens()
if len(v) != 1:
raise ArithmeticError
return v[0]
raise ValueError("multiplicative group of this ring is not cyclic")
def quadratic_nonresidue(self):
"""
Return a quadratic non-residue in ``self``.
EXAMPLES::
sage: R = Integers(17)
sage: R.quadratic_nonresidue()
3
sage: R(3).is_square()
False
"""
try:
return self._nonresidue
except AttributeError:
for a in self:
if not a.is_square():
self._nonresidue = a
return a
def square_roots_of_one(self):
"""
Return all square roots of 1 in self, i.e., all solutions to
`x^2 - 1 = 0`.
OUTPUT:
The square roots of 1 in ``self`` as a tuple.
EXAMPLES::
sage: R = Integers(2^10)
sage: [x for x in R if x^2 == 1]
[1, 511, 513, 1023]
sage: R.square_roots_of_one()
(1, 511, 513, 1023)
::
sage: v = Integers(9*5).square_roots_of_one(); v
(1, 19, 26, 44)
sage: [x^2 for x in v]
[1, 1, 1, 1]
sage: v = Integers(9*5*8).square_roots_of_one(); v
(1, 19, 71, 89, 91, 109, 161, 179, 181, 199, 251, 269, 271, 289, 341, 359)
sage: [x^2 for x in v]
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
"""
try:
return self.__square_roots_of_one
except AttributeError:
pass
n = self.__order
if n.is_prime_power():
if n % 2 == 0:
# power of 2
if n == 2:
v = [self(1)]
elif n == 4:
v = [self(1), self(3)]
else: # n >= 8
half_ord = n//2
v = [self(1), self(-1), self(half_ord-1), self(half_ord+1)]
else:
v = [self(1), self(-1)]
else:
# Reduce to the prime power case.
F = self.factored_order()
vmod = []
moduli = []
for p, e in F:
k = p**e
R = IntegerModRing(p**e)
w = [self(x) for x in R.square_roots_of_one()]
vmod.append(w)
moduli.append(k)
# Now combine in all possible ways using the CRT
basis = CRT_basis(moduli)
from sage.misc.mrange import cartesian_product_iterator
v = []
for x in cartesian_product_iterator(vmod):
# x is a specific choice of roots modulo each prime power divisor
a = sum([basis[i]*x[i] for i in range(len(x))])
v.append(a)
#end for
#end if
v.sort()
v = tuple(v)
self.__square_roots_of_one = v
return v
@cached_method
def factored_order(self):
"""
EXAMPLES::
sage: R = IntegerModRing(18)
sage: FF = IntegerModRing(17)
sage: R.factored_order()
2 * 3^2
sage: FF.factored_order()
17
"""
return factor(self.__order, int_=(self.__order < 2**31))
def factored_unit_order(self):
r"""
Return a list of :class:`Factorization` objects, each the factorization
of the order of the units in a `\ZZ / p^n \ZZ` component of this group
(using the Chinese Remainder Theorem).
EXAMPLES::
sage: R = Integers(8*9*25*17*29)
sage: R.factored_unit_order()
[2^2, 2 * 3, 2^2 * 5, 2^4, 2^2 * 7]
"""
ans = []
from sage.structure.factorization import Factorization
for p, e in self.factored_order():
ans.append(Factorization([(p,e-1)]) * factor(p-1, int_=(self.__order < 2**31)))
return ans
def characteristic(self):
"""
EXAMPLES::
sage: R = IntegerModRing(18)
sage: FF = IntegerModRing(17)
sage: FF.characteristic()
17
sage: R.characteristic()
18
"""
return self.__order
def _repr_(self):
"""
String representation.
EXAMPLES::
sage: Zmod(87)
Ring of integers modulo 87
"""
return "Ring of integers modulo {}".format(self.__order)
def _latex_(self):
r"""
Latex representation.
EXAMPLES::
sage: latex(Zmod(87))
\ZZ/87\ZZ
"""
return "\\ZZ/{}\\ZZ".format(self.__order)
def modulus(self):
r"""
Return the polynomial `x - 1` over this ring.
.. NOTE::
This function exists for consistency with the finite-field
modulus function.
EXAMPLES::
sage: R = IntegerModRing(18)
sage: R.modulus()
x + 17
sage: R = IntegerModRing(17)
sage: R.modulus()
x + 16
"""
try:
return self.__modulus
except AttributeError:
x = self['x'].gen()
self.__modulus = x - 1
return self.__modulus
def order(self):
"""
Return the order of this ring.
EXAMPLES::
sage: Zmod(87).order()
87
"""
return self.__order
def cardinality(self):
"""
Return the cardinality of this ring.
EXAMPLES::
sage: Zmod(87).cardinality()
87
"""
return self.order()
def _pari_order(self):
"""
Return the pari integer representing the order of this ring.
EXAMPLES::
sage: Zmod(87)._pari_order()
87
"""
try:
return self.__pari_order
except AttributeError:
self.__pari_order = pari(self.order())
return self.__pari_order
def _element_constructor_(self, x):
"""
TESTS::
sage: K2 = GF(2)
sage: K3 = GF(3)
sage: K8 = GF(8,'a')
sage: K8(5) # indirect doctest
1
sage: K8('a+1')
a + 1
sage: K8(K2(1))
1
The following test refers to :trac:`6468`::
sage: class foo_parent(Parent):
....: pass
sage: class foo(RingElement):
....: def lift(self):
....: raise PariError
sage: P = foo_parent()
sage: F = foo(P)
sage: GF(2)(F)
Traceback (most recent call last):
...
TypeError: error coercing to finite field
The following test refers to :trac:`8970`::
sage: R = Zmod(13); a = R(2)
sage: a == R(gap(a))
True
libgap interface (:trac:`23714`)::
sage: a = libgap.eval("Z(13)^2")
sage: a.sage()
4
sage: libgap(a.sage()) == a
True
"""
try:
return integer_mod.IntegerMod(self, x)
except (NotImplementedError, PariError):
raise TypeError("error coercing to finite field")
except TypeError:
if sage.interfaces.gap.is_GapElement(x):
from sage.interfaces.gap import intmod_gap_to_sage
y = intmod_gap_to_sage(x)
return integer_mod.IntegerMod(self, y)
raise # Continue up with the original TypeError
def __iter__(self):
"""
EXAMPLES::
sage: R = IntegerModRing(3)
sage: for i in R:
....: print(i)
0
1
2
sage: L = [i for i in R]
sage: L[0].parent()
Ring of integers modulo 3
"""
i = 0
order = int(self.__order)
while i < order:
yield self(i)
i = i + 1
def _coerce_map_from_(self, S):
r"""
EXAMPLES::
sage: R = Integers(15)
sage: f = R.coerce_map_from(Integers(450)); f # indirect doctest
Natural morphism:
From: Ring of integers modulo 450
To: Ring of integers modulo 15
sage: f(-1)
14
sage: f = R.coerce_map_from(int); f
Native morphism:
From: Set of Python objects of class 'int'
To: Ring of integers modulo 15
sage: f(-1r)
14
sage: f = R.coerce_map_from(ZZ); f
Natural morphism:
From: Integer Ring
To: Ring of integers modulo 15
sage: f(-1)
14
sage: f = R.coerce_map_from(Integers(10)); print(f)
None
sage: f = R.coerce_map_from(QQ); print(f)
None
sage: R = IntegerModRing(17)
sage: a = R(3)
sage: b = R._coerce_(3)
sage: b
3
sage: a==b
True
This is allowed::
sage: R(2/3)
12
But this is not, since there is no (canonical or not!) ring
homomorphism from `\QQ` to `\GF{17}`.
::
sage: R._coerce_(2/3)
Traceback (most recent call last):
...
TypeError: no canonical coercion from Rational Field to Ring of integers modulo 17
We do not allow the coercion ``GF(p) -> Z/pZ``, because in case of a
canonical isomorphism, there is a coercion map in only one
direction, i.e., to the object in the smaller category.
"""
if S is int:
return integer_mod.Int_to_IntegerMod(self)
elif S is integer_ring.ZZ:
return integer_mod.Integer_to_IntegerMod(self)
elif isinstance(S, IntegerModRing_generic):
if isinstance(S, ring.Field):
return None
try:
return integer_mod.IntegerMod_to_IntegerMod(S, self)
except TypeError:
pass
to_ZZ = integer_ring.ZZ._internal_coerce_map_from(S)
if to_ZZ is not None:
return integer_mod.Integer_to_IntegerMod(self) * to_ZZ
def _convert_map_from_(self, other):
"""
Conversion from p-adic fields.
EXAMPLES::
sage: Zmod(81).convert_map_from(Qp(3))
Reduction morphism:
From: 3-adic Field with capped relative precision 20
To: Ring of integers modulo 81
"""
from sage.rings.padics.padic_generic import pAdicGeneric, ResidueReductionMap
if isinstance(other, pAdicGeneric) and other.degree() == 1:
p = other.prime()
N = self.cardinality()
n = N.exact_log(p)
if p**n == N:
return ResidueReductionMap._create_(other, self)
def __richcmp__(self, other, op):
"""
EXAMPLES::
sage: Z11 = IntegerModRing(11); Z11
Ring of integers modulo 11
sage: Z12 = IntegerModRing(12); Z12
Ring of integers modulo 12
sage: Z13 = IntegerModRing(13); Z13
Ring of integers modulo 13
sage: F = GF(11); F
Finite Field of size 11
sage: Z11 == Z11, Z11 == Z12, Z11 == Z13, Z11 == F
(True, False, False, False)
In :trac:`15229`, the following was implemented::
sage: R1 = IntegerModRing(5)
sage: R2 = IntegerModRing(5, is_field=True)
sage: R1 is R2 # used to return False
True
sage: R2 == GF(5)
False
"""
# We want that GF(p) and IntegerModRing(p) evaluate unequal.
# However, we cannot just compare the types, since the
# choice of a different category also changes the type.
# But if we go to the base class, we avoid the influence
# of the category.
try:
c = bool(other.__class__.__base__ != self.__class__.__base__)
except AttributeError: # __base__ does not always exists
c = bool(type(other) != type(self))
if c:
return NotImplemented
return richcmp(self.__order, other.__order, op)
def unit_gens(self, **kwds):
r"""
Returns generators for the unit group `(\ZZ/N\ZZ)^*`.
We compute the list of generators using a deterministic algorithm, so
the generators list will always be the same. For each odd prime divisor
of `N` there will be exactly one corresponding generator; if `N` is
even there will be 0, 1 or 2 generators according to whether 2 divides
`N` to order 1, 2 or `\geq 3`.
OUTPUT:
A tuple containing the units of ``self``.
EXAMPLES::
sage: R = IntegerModRing(18)
sage: R.unit_gens()
(11,)
sage: R = IntegerModRing(17)
sage: R.unit_gens()
(3,)
sage: IntegerModRing(next_prime(10^30)).unit_gens()
(5,)
The choice of generators is affected by the optional keyword
``algorithm``; this can be ``'sage'`` (default) or ``'pari'``.
See :meth:`unit_group` for details.
sage: A = Zmod(55)
sage: A.unit_gens(algorithm='sage')
(12, 46)
sage: A.unit_gens(algorithm='pari')
(2, 21)
TESTS::
sage: IntegerModRing(2).unit_gens()
()
sage: IntegerModRing(4).unit_gens()
(3,)
sage: IntegerModRing(8).unit_gens()
(7, 5)
"""
return self.unit_group(**kwds).gens_values()
def unit_group_exponent(self):
"""
EXAMPLES::
sage: R = IntegerModRing(17)
sage: R.unit_group_exponent()
16
sage: R = IntegerModRing(18)
sage: R.unit_group_exponent()
6
"""
return self.unit_group().exponent()
def unit_group_order(self):
"""
Return the order of the unit group of this residue class ring.
EXAMPLES::
sage: R = Integers(500)
sage: R.unit_group_order()
200
"""
return self.unit_group().order()
@cached_method
def unit_group(self, algorithm='sage'):
r"""
Return the unit group of ``self``.
INPUT:
- ``self`` -- the ring `\ZZ/n\ZZ` for a positive integer `n`
- ``algorithm`` -- either ``'sage'`` (default) or ``'pari'``
OUTPUT:
The unit group of ``self``. This is a finite Abelian group
equipped with a distinguished set of generators, which is
computed using a deterministic algorithm depending on the
``algorithm`` parameter.
- If ``algorithm == 'sage'``, the generators correspond to the
prime factors `p \mid n` (one generator for each odd `p`;
the number of generators for `p = 2` is 0, 1 or 2 depending
on the order to which 2 divides `n`).
- If ``algorithm == 'pari'``, the generators are chosen such
that their orders form a decreasing sequence with respect to
divisibility.
EXAMPLES:
The output of the algorithms ``'sage'`` and ``'pari'`` can
differ in various ways. In the following example, the same
cyclic factors are computed, but in a different order::
sage: A = Zmod(15)
sage: G = A.unit_group(); G
Multiplicative Abelian group isomorphic to C2 x C4
sage: G.gens_values()
(11, 7)
sage: H = A.unit_group(algorithm='pari'); H
Multiplicative Abelian group isomorphic to C4 x C2
sage: H.gens_values()
(7, 11)
Here are two examples where the cyclic factors are isomorphic,
but are ordered differently and have different generators::
sage: A = Zmod(40)
sage: G = A.unit_group(); G
Multiplicative Abelian group isomorphic to C2 x C2 x C4
sage: G.gens_values()
(31, 21, 17)
sage: H = A.unit_group(algorithm='pari'); H
Multiplicative Abelian group isomorphic to C4 x C2 x C2
sage: H.gens_values()
(17, 31, 21)
sage: A = Zmod(192)
sage: G = A.unit_group(); G
Multiplicative Abelian group isomorphic to C2 x C16 x C2
sage: G.gens_values()
(127, 133, 65)
sage: H = A.unit_group(algorithm='pari'); H
Multiplicative Abelian group isomorphic to C16 x C2 x C2
sage: H.gens_values()
(133, 127, 65)
In the following examples, the cyclic factors are not even
isomorphic::
sage: A = Zmod(319)
sage: A.unit_group()
Multiplicative Abelian group isomorphic to C10 x C28
sage: A.unit_group(algorithm='pari')
Multiplicative Abelian group isomorphic to C140 x C2
sage: A = Zmod(30.factorial())
sage: A.unit_group()
Multiplicative Abelian group isomorphic to C2 x C16777216 x C3188646 x C62500 x C2058 x C110 x C156 x C16 x C18 x C22 x C28
sage: A.unit_group(algorithm='pari')
Multiplicative Abelian group isomorphic to C20499647385305088000000 x C55440 x C12 x C12 x C4 x C2 x C2 x C2 x C2 x C2 x C2
TESTS:
We test the cases where the unit group is trivial::
sage: A = Zmod(1)
sage: A.unit_group()
Trivial Abelian group
sage: A.unit_group(algorithm='pari')
Trivial Abelian group
sage: A = Zmod(2)
sage: A.unit_group()
Trivial Abelian group
sage: A.unit_group(algorithm='pari')
Trivial Abelian group
sage: Zmod(3).unit_group(algorithm='bogus')
Traceback (most recent call last):
...
ValueError: unknown algorithm 'bogus' for computing the unit group
"""
from sage.groups.abelian_gps.values import AbelianGroupWithValues
if algorithm == 'sage':
n = self.order()
gens = []
orders = []
for p, r in self.factored_order():
m = n/(p**r)
for g, o in _unit_gens_primepowercase(p, r):
x = g.crt(integer_mod.Mod(1, m))
gens.append(x)
orders.append(o)
elif algorithm == 'pari':
_, orders, gens = self.order().__pari__().znstar()
gens = [self(g) for g in gens]
orders = [integer.Integer(o) for o in orders]
else:
raise ValueError('unknown algorithm %r for computing the unit group' % algorithm)
return AbelianGroupWithValues(gens, orders, values_group=self)
def random_element(self, bound=None):
"""
Return a random element of this ring.
INPUT:
- ``bound``, a positive integer or ``None`` (the default). Is given,
return the coercion of an integer in the interval
``[-bound, bound]`` into this ring.
EXAMPLES::
sage: R = IntegerModRing(18)
sage: R.random_element()
2
We test ``bound``-option::
sage: R.random_element(2) in [R(16), R(17), R(0), R(1), R(2)]
True
"""
if bound is not None:
return ring.CommutativeRing.random_element(self, bound)
a = random.randint(0,self.order()-1)
return self(a)
#######################################################
# Suppose for interfaces
#######################################################
def _gap_init_(self):
"""
EXAMPLES::
sage: R = Integers(12345678900)
sage: R
Ring of integers modulo 12345678900
sage: gap(R) # indirect doctest
(Integers mod 12345678900)
"""
return 'ZmodnZ({})'.format(self.order())
def _magma_init_(self, magma):
"""
EXAMPLES::
sage: R = Integers(12345678900)
sage: R
Ring of integers modulo 12345678900
sage: magma(R) # indirect doctest, optional - magma
Residue class ring of integers modulo 12345678900
"""
return 'Integers({})'.format(self.order())
def degree(self):
"""
Return 1.
EXAMPLES::
sage: R = Integers(12345678900)
sage: R.degree()
1
"""
return integer.Integer(1)
Zmod = IntegerModRing
Integers = IntegerModRing
# Register unpickling methods for backward compatibility.
from sage.misc.persist import register_unpickle_override
register_unpickle_override('sage.rings.integer_mod_ring', 'IntegerModRing_generic', IntegerModRing_generic)
def crt(v):
"""
INPUT:
- ``v`` -- (list) a lift of elements of ``rings.IntegerMod(n)``, for
various coprime moduli ``n``
EXAMPLES::
sage: from sage.rings.finite_rings.integer_mod_ring import crt
sage: crt([mod(3, 8),mod(1,19),mod(7, 15)])
1027
"""
if len(v) == 0:
return IntegerModRing(1)(1)
x = v[0]
for i in range(1,len(v)):
x = x.crt(v[i])
return x
|
[
"sage.misc.mrange.cartesian_product_iterator",
"sage.rings.fast_arith.arith_llong",
"sage.categories.finite_enumerated_sets.FiniteEnumeratedSets",
"sage.categories.commutative_rings.CommutativeRings",
"sage.rings.ring.CommutativeRing.random_element",
"sage.rings.integer_ring.IntegerRing",
"sage.rings.ring.CommutativeRing.extension",
"sage.structure.factorization.Factorization",
"sage.arith.all.factor",
"sage.misc.persist.register_unpickle_override",
"sage.groups.abelian_gps.values.AbelianGroupWithValues",
"sage.rings.fast_arith.arith_int",
"sage.rings.integer_ring.ZZ._internal_coerce_map_from",
"sage.rings.padics.padic_generic.ResidueReductionMap._create_",
"sage.arith.all.primitive_root",
"sage.structure.richcmp.richcmp",
"sage.rings.fast_arith.is_square",
"sage.rings.integer.Integer",
"sage.categories.fields.Fields",
"sage.interfaces.gap.intmod_gap_to_sage",
"sage.arith.all.CRT_basis"
] |
[((8479, 8505), 'sage.rings.integer_ring.IntegerRing', 'integer_ring.IntegerRing', ([], {}), '()\n', (8503, 8505), True, 'import sage.rings.integer_ring as integer_ring\n'), ((49997, 50108), 'sage.misc.persist.register_unpickle_override', 'register_unpickle_override', (['"""sage.rings.integer_mod_ring"""', '"""IntegerModRing_generic"""', 'IntegerModRing_generic'], {}), "('sage.rings.integer_mod_ring',\n 'IntegerModRing_generic', IntegerModRing_generic)\n", (50023, 50108), False, 'from sage.misc.persist import register_unpickle_override\n'), ((8429, 8447), 'sage.categories.commutative_rings.CommutativeRings', 'CommutativeRings', ([], {}), '()\n', (8445, 8447), False, 'from sage.categories.commutative_rings import CommutativeRings\n'), ((8449, 8471), 'sage.categories.finite_enumerated_sets.FiniteEnumeratedSets', 'FiniteEnumeratedSets', ([], {}), '()\n', (8469, 8471), False, 'from sage.categories.finite_enumerated_sets import FiniteEnumeratedSets\n'), ((16672, 16690), 'sage.rings.integer.Integer', 'integer.Integer', (['(0)'], {}), '(0)\n', (16687, 16690), True, 'import sage.rings.integer as integer\n'), ((17459, 17517), 'sage.rings.ring.CommutativeRing.extension', 'CommutativeRing.extension', (['self', 'poly', 'name', 'names'], {}), '(self, poly, name, names, **kwds)\n', (17484, 17517), False, 'from sage.rings.ring import CommutativeRing\n'), ((32230, 32279), 'sage.arith.all.factor', 'factor', (['self.__order'], {'int_': '(self.__order < 2 ** 31)'}), '(self.__order, int_=self.__order < 2 ** 31)\n', (32236, 32279), False, 'from sage.arith.all import factor, primitive_root, CRT_basis\n'), ((39191, 39235), 'sage.rings.integer_ring.ZZ._internal_coerce_map_from', 'integer_ring.ZZ._internal_coerce_map_from', (['S'], {}), '(S)\n', (39232, 39235), True, 'import sage.rings.integer_ring as integer_ring\n'), ((41339, 41379), 'sage.structure.richcmp.richcmp', 'richcmp', (['self.__order', 'other.__order', 'op'], {}), '(self.__order, other.__order, op)\n', (41346, 41379), False, 'from sage.structure.richcmp import richcmp, richcmp_method\n'), ((48014, 48069), 'sage.groups.abelian_gps.values.AbelianGroupWithValues', 'AbelianGroupWithValues', (['gens', 'orders'], {'values_group': 'self'}), '(gens, orders, values_group=self)\n', (48036, 48069), False, 'from sage.groups.abelian_gps.values import AbelianGroupWithValues\n'), ((49812, 49830), 'sage.rings.integer.Integer', 'integer.Integer', (['(1)'], {}), '(1)\n', (49827, 49830), True, 'import sage.rings.integer as integer\n'), ((7487, 7519), 'sage.rings.integer_ring.IntegerRing', 'integer_ring.IntegerRing', ([], {}), '(**kwds)\n', (7511, 7519), True, 'import sage.rings.integer_ring as integer_ring\n'), ((9328, 9367), 'sage.rings.integer.Integer', 'integer.Integer', (['(p ** (r - 1) * (p - 1))'], {}), '(p ** (r - 1) * (p - 1))\n', (9343, 9367), True, 'import sage.rings.integer as integer\n'), ((23968, 23976), 'sage.categories.fields.Fields', 'Fields', ([], {}), '()\n', (23974, 23976), False, 'from sage.categories.fields import Fields\n'), ((31462, 31479), 'sage.arith.all.CRT_basis', 'CRT_basis', (['moduli'], {}), '(moduli)\n', (31471, 31479), False, 'from sage.arith.all import factor, primitive_root, CRT_basis\n'), ((31588, 31620), 'sage.misc.mrange.cartesian_product_iterator', 'cartesian_product_iterator', (['vmod'], {}), '(vmod)\n', (31614, 31620), False, 'from sage.misc.mrange import cartesian_product_iterator\n'), ((48674, 48722), 'sage.rings.ring.CommutativeRing.random_element', 'ring.CommutativeRing.random_element', (['self', 'bound'], {}), '(self, bound)\n', (48709, 48722), True, 'import sage.rings.ring as ring\n'), ((9139, 9157), 'sage.rings.integer.Integer', 'integer.Integer', (['(2)'], {}), '(2)\n', (9154, 9157), True, 'import sage.rings.integer as integer\n'), ((9201, 9230), 'sage.rings.integer.Integer', 'integer.Integer', (['(2 ** (r - 2))'], {}), '(2 ** (r - 2))\n', (9216, 9230), True, 'import sage.rings.integer as integer\n'), ((9277, 9308), 'sage.arith.all.primitive_root', 'primitive_root', (['pr'], {'check': '(False)'}), '(pr, check=False)\n', (9291, 9308), False, 'from sage.arith.all import factor, primitive_root, CRT_basis\n'), ((18684, 18697), 'sage.rings.fast_arith.arith_int', 'a.arith_int', ([], {}), '()\n', (18695, 18697), True, 'import sage.rings.fast_arith as a\n'), ((23761, 23769), 'sage.categories.fields.Fields', 'Fields', ([], {}), '()\n', (23767, 23769), False, 'from sage.categories.fields import Fields\n'), ((23910, 23918), 'sage.categories.fields.Fields', 'Fields', ([], {}), '()\n', (23916, 23918), False, 'from sage.categories.fields import Fields\n'), ((24037, 24045), 'sage.categories.fields.Fields', 'Fields', ([], {}), '()\n', (24043, 24045), False, 'from sage.categories.fields import Fields\n'), ((39955, 39996), 'sage.rings.padics.padic_generic.ResidueReductionMap._create_', 'ResidueReductionMap._create_', (['other', 'self'], {}), '(other, self)\n', (39983, 39996), False, 'from sage.rings.padics.padic_generic import pAdicGeneric, ResidueReductionMap\n'), ((7003, 7011), 'sage.categories.fields.Fields', 'Fields', ([], {}), '()\n', (7009, 7011), False, 'from sage.categories.fields import Fields\n'), ((9076, 9094), 'sage.rings.integer.Integer', 'integer.Integer', (['(2)'], {}), '(2)\n', (9091, 9094), True, 'import sage.rings.integer as integer\n'), ((18791, 18806), 'sage.rings.fast_arith.arith_llong', 'a.arith_llong', ([], {}), '()\n', (18804, 18806), True, 'import sage.rings.fast_arith as a\n'), ((32849, 32876), 'sage.structure.factorization.Factorization', 'Factorization', (['[(p, e - 1)]'], {}), '([(p, e - 1)])\n', (32862, 32876), False, 'from sage.structure.factorization import Factorization\n'), ((32876, 32918), 'sage.arith.all.factor', 'factor', (['(p - 1)'], {'int_': '(self.__order < 2 ** 31)'}), '(p - 1, int_=self.__order < 2 ** 31)\n', (32882, 32918), False, 'from sage.arith.all import factor, primitive_root, CRT_basis\n'), ((36466, 36487), 'sage.interfaces.gap.intmod_gap_to_sage', 'intmod_gap_to_sage', (['x'], {}), '(x)\n', (36484, 36487), False, 'from sage.interfaces.gap import intmod_gap_to_sage\n'), ((47855, 47873), 'sage.rings.integer.Integer', 'integer.Integer', (['o'], {}), '(o)\n', (47870, 47873), True, 'import sage.rings.integer as integer\n'), ((29534, 29547), 'sage.rings.fast_arith.is_square', 'a.is_square', ([], {}), '()\n', (29545, 29547), True, 'import sage.rings.fast_arith as a\n')]
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for conv_utils."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
from absl.testing import parameterized
import numpy as np
from tensorflow.python.frozen_keras.utils import conv_utils
from tensorflow.python.platform import test
def _get_const_output_shape(input_shape, dim):
return tuple([min(d, dim) for d in input_shape])
input_shapes = [
(0,),
(0, 0),
(1,),
(2,),
(3,),
(1, 0),
(0, 3),
(1, 1),
(1, 2),
(3, 1),
(2, 2),
(3, 3),
(1, 0, 1),
(5, 2, 3),
(3, 5, 6, 7, 0),
(3, 2, 2, 4, 4),
(1, 2, 3, 4, 7, 2),
]
class TestBasicConvUtilsTest(test.TestCase):
def test_convert_data_format(self):
self.assertEqual('NCDHW', conv_utils.convert_data_format(
'channels_first', 5))
self.assertEqual('NCHW', conv_utils.convert_data_format(
'channels_first', 4))
self.assertEqual('NCW', conv_utils.convert_data_format('channels_first', 3))
self.assertEqual('NHWC', conv_utils.convert_data_format('channels_last', 4))
self.assertEqual('NWC', conv_utils.convert_data_format('channels_last', 3))
self.assertEqual('NDHWC', conv_utils.convert_data_format(
'channels_last', 5))
with self.assertRaises(ValueError):
conv_utils.convert_data_format('invalid', 2)
def test_normalize_tuple(self):
self.assertEqual((2, 2, 2),
conv_utils.normalize_tuple(2, n=3, name='strides'))
self.assertEqual((2, 1, 2),
conv_utils.normalize_tuple((2, 1, 2), n=3, name='strides'))
with self.assertRaises(ValueError):
conv_utils.normalize_tuple((2, 1), n=3, name='strides')
with self.assertRaises(ValueError):
conv_utils.normalize_tuple(None, n=3, name='strides')
def test_normalize_data_format(self):
self.assertEqual('channels_last',
conv_utils.normalize_data_format('Channels_Last'))
self.assertEqual('channels_first',
conv_utils.normalize_data_format('CHANNELS_FIRST'))
with self.assertRaises(ValueError):
conv_utils.normalize_data_format('invalid')
def test_normalize_padding(self):
self.assertEqual('same', conv_utils.normalize_padding('SAME'))
self.assertEqual('valid', conv_utils.normalize_padding('VALID'))
with self.assertRaises(ValueError):
conv_utils.normalize_padding('invalid')
def test_conv_output_length(self):
self.assertEqual(4, conv_utils.conv_output_length(4, 2, 'same', 1, 1))
self.assertEqual(2, conv_utils.conv_output_length(4, 2, 'same', 2, 1))
self.assertEqual(3, conv_utils.conv_output_length(4, 2, 'valid', 1, 1))
self.assertEqual(2, conv_utils.conv_output_length(4, 2, 'valid', 2, 1))
self.assertEqual(5, conv_utils.conv_output_length(4, 2, 'full', 1, 1))
self.assertEqual(3, conv_utils.conv_output_length(4, 2, 'full', 2, 1))
self.assertEqual(2, conv_utils.conv_output_length(5, 2, 'valid', 2, 2))
def test_conv_input_length(self):
self.assertEqual(3, conv_utils.conv_input_length(4, 2, 'same', 1))
self.assertEqual(2, conv_utils.conv_input_length(2, 2, 'same', 2))
self.assertEqual(4, conv_utils.conv_input_length(3, 2, 'valid', 1))
self.assertEqual(4, conv_utils.conv_input_length(2, 2, 'valid', 2))
self.assertEqual(3, conv_utils.conv_input_length(4, 2, 'full', 1))
self.assertEqual(4, conv_utils.conv_input_length(3, 2, 'full', 2))
def test_deconv_output_length(self):
self.assertEqual(4, conv_utils.deconv_output_length(4, 2, 'same', stride=1))
self.assertEqual(8, conv_utils.deconv_output_length(4, 2, 'same', stride=2))
self.assertEqual(5, conv_utils.deconv_output_length(
4, 2, 'valid', stride=1))
self.assertEqual(8, conv_utils.deconv_output_length(
4, 2, 'valid', stride=2))
self.assertEqual(3, conv_utils.deconv_output_length(4, 2, 'full', stride=1))
self.assertEqual(6, conv_utils.deconv_output_length(4, 2, 'full', stride=2))
self.assertEqual(
5,
conv_utils.deconv_output_length(
4, 2, 'same', output_padding=2, stride=1))
self.assertEqual(
7,
conv_utils.deconv_output_length(
4, 2, 'same', output_padding=1, stride=2))
self.assertEqual(
7,
conv_utils.deconv_output_length(
4, 2, 'valid', output_padding=2, stride=1))
self.assertEqual(
9,
conv_utils.deconv_output_length(
4, 2, 'valid', output_padding=1, stride=2))
self.assertEqual(
5,
conv_utils.deconv_output_length(
4, 2, 'full', output_padding=2, stride=1))
self.assertEqual(
7,
conv_utils.deconv_output_length(
4, 2, 'full', output_padding=1, stride=2))
self.assertEqual(
5,
conv_utils.deconv_output_length(
4, 2, 'same', output_padding=1, stride=1, dilation=2))
self.assertEqual(
12,
conv_utils.deconv_output_length(
4, 2, 'valid', output_padding=2, stride=2, dilation=3))
self.assertEqual(
6,
conv_utils.deconv_output_length(
4, 2, 'full', output_padding=2, stride=2, dilation=3))
@parameterized.parameters(input_shapes)
class TestConvUtils(test.TestCase, parameterized.TestCase):
def test_conv_kernel_mask_fc(self, *input_shape):
padding = 'valid'
kernel_shape = input_shape
ndims = len(input_shape)
strides = (1,) * ndims
output_shape = _get_const_output_shape(input_shape, dim=1)
mask = np.ones(input_shape + output_shape, np.bool)
self.assertAllEqual(
mask,
conv_utils.conv_kernel_mask(
input_shape,
kernel_shape,
strides,
padding
)
)
def test_conv_kernel_mask_diag(self, *input_shape):
ndims = len(input_shape)
kernel_shape = (1,) * ndims
strides = (1,) * ndims
for padding in ['valid', 'same']:
mask = np.identity(int(np.prod(input_shape)), np.bool)
mask = np.reshape(mask, input_shape * 2)
self.assertAllEqual(
mask,
conv_utils.conv_kernel_mask(
input_shape,
kernel_shape,
strides,
padding
)
)
def test_conv_kernel_mask_full_stride(self, *input_shape):
padding = 'valid'
ndims = len(input_shape)
kernel_shape = (1,) * ndims
strides = tuple([max(d, 1) for d in input_shape])
output_shape = _get_const_output_shape(input_shape, dim=1)
mask = np.zeros(input_shape + output_shape, np.bool)
if all(d > 0 for d in mask.shape):
mask[(0,) * len(output_shape)] = True
self.assertAllEqual(
mask,
conv_utils.conv_kernel_mask(
input_shape,
kernel_shape,
strides,
padding
)
)
def test_conv_kernel_mask_almost_full_stride(self, *input_shape):
padding = 'valid'
ndims = len(input_shape)
kernel_shape = (1,) * ndims
strides = tuple([max(d - 1, 1) for d in input_shape])
output_shape = _get_const_output_shape(input_shape, dim=2)
mask = np.zeros(input_shape + output_shape, np.bool)
if all(d > 0 for d in mask.shape):
for in_position in itertools.product(*[[0, d - 1] for d in input_shape]):
out_position = tuple([min(p, 1) for p in in_position])
mask[in_position + out_position] = True
self.assertAllEqual(
mask,
conv_utils.conv_kernel_mask(
input_shape,
kernel_shape,
strides,
padding
)
)
def test_conv_kernel_mask_rect_kernel(self, *input_shape):
padding = 'valid'
ndims = len(input_shape)
strides = (1,) * ndims
for d in range(ndims):
kernel_shape = [1] * ndims
kernel_shape[d] = input_shape[d]
output_shape = list(input_shape)
output_shape[d] = min(1, input_shape[d])
mask = np.identity(int(np.prod(input_shape)), np.bool)
mask = np.reshape(mask, input_shape * 2)
for p in itertools.product(*[range(input_shape[dim])
for dim in range(ndims)]):
p = list(p)
p[d] = slice(None)
mask[p * 2] = True
mask = np.take(mask, range(0, min(1, input_shape[d])), ndims + d)
self.assertAllEqual(
mask,
conv_utils.conv_kernel_mask(
input_shape,
kernel_shape,
strides,
padding
)
)
def test_conv_kernel_mask_wrong_padding(self, *input_shape):
ndims = len(input_shape)
kernel_shape = (1,) * ndims
strides = (1,) * ndims
conv_utils.conv_kernel_mask(
input_shape,
kernel_shape,
strides,
'valid'
)
conv_utils.conv_kernel_mask(
input_shape,
kernel_shape,
strides,
'same'
)
self.assertRaises(NotImplementedError,
conv_utils.conv_kernel_mask,
input_shape, kernel_shape, strides, 'full')
def test_conv_kernel_mask_wrong_dims(self, *input_shape):
kernel_shape = 1
strides = 1
conv_utils.conv_kernel_mask(
input_shape,
kernel_shape,
strides,
'valid'
)
ndims = len(input_shape)
kernel_shape = (2,) * (ndims + 1)
self.assertRaises(ValueError,
conv_utils.conv_kernel_mask,
input_shape, kernel_shape, strides, 'same')
strides = (1,) * ndims
self.assertRaises(ValueError,
conv_utils.conv_kernel_mask,
input_shape, kernel_shape, strides, 'valid')
kernel_shape = (1,) * ndims
strides = (2,) * (ndims - 1)
self.assertRaises(ValueError,
conv_utils.conv_kernel_mask,
input_shape, kernel_shape, strides, 'valid')
strides = (2,) * ndims
conv_utils.conv_kernel_mask(
input_shape,
kernel_shape,
strides,
'valid'
)
if __name__ == '__main__':
test.main()
|
[
"tensorflow.python.platform.test.main",
"tensorflow.python.frozen_keras.utils.conv_utils.convert_data_format",
"tensorflow.python.frozen_keras.utils.conv_utils.normalize_data_format",
"tensorflow.python.frozen_keras.utils.conv_utils.conv_input_length",
"numpy.zeros",
"numpy.ones",
"absl.testing.parameterized.parameters",
"tensorflow.python.frozen_keras.utils.conv_utils.conv_kernel_mask",
"tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length",
"numpy.reshape",
"itertools.product",
"tensorflow.python.frozen_keras.utils.conv_utils.normalize_padding",
"tensorflow.python.frozen_keras.utils.conv_utils.normalize_tuple",
"tensorflow.python.frozen_keras.utils.conv_utils.conv_output_length",
"numpy.prod"
] |
[((5908, 5946), 'absl.testing.parameterized.parameters', 'parameterized.parameters', (['input_shapes'], {}), '(input_shapes)\n', (5932, 5946), False, 'from absl.testing import parameterized\n'), ((10756, 10767), 'tensorflow.python.platform.test.main', 'test.main', ([], {}), '()\n', (10765, 10767), False, 'from tensorflow.python.platform import test\n'), ((6243, 6287), 'numpy.ones', 'np.ones', (['(input_shape + output_shape)', 'np.bool'], {}), '(input_shape + output_shape, np.bool)\n', (6250, 6287), True, 'import numpy as np\n'), ((7238, 7283), 'numpy.zeros', 'np.zeros', (['(input_shape + output_shape)', 'np.bool'], {}), '(input_shape + output_shape, np.bool)\n', (7246, 7283), True, 'import numpy as np\n'), ((7837, 7882), 'numpy.zeros', 'np.zeros', (['(input_shape + output_shape)', 'np.bool'], {}), '(input_shape + output_shape, np.bool)\n', (7845, 7882), True, 'import numpy as np\n'), ((9363, 9435), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_kernel_mask', 'conv_utils.conv_kernel_mask', (['input_shape', 'kernel_shape', 'strides', '"""valid"""'], {}), "(input_shape, kernel_shape, strides, 'valid')\n", (9390, 9435), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((9479, 9550), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_kernel_mask', 'conv_utils.conv_kernel_mask', (['input_shape', 'kernel_shape', 'strides', '"""same"""'], {}), "(input_shape, kernel_shape, strides, 'same')\n", (9506, 9550), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((9853, 9925), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_kernel_mask', 'conv_utils.conv_kernel_mask', (['input_shape', 'kernel_shape', 'strides', '"""valid"""'], {}), "(input_shape, kernel_shape, strides, 'valid')\n", (9880, 9925), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((10614, 10686), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_kernel_mask', 'conv_utils.conv_kernel_mask', (['input_shape', 'kernel_shape', 'strides', '"""valid"""'], {}), "(input_shape, kernel_shape, strides, 'valid')\n", (10641, 10686), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((1478, 1529), 'tensorflow.python.frozen_keras.utils.conv_utils.convert_data_format', 'conv_utils.convert_data_format', (['"""channels_first"""', '(5)'], {}), "('channels_first', 5)\n", (1508, 1529), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((1569, 1620), 'tensorflow.python.frozen_keras.utils.conv_utils.convert_data_format', 'conv_utils.convert_data_format', (['"""channels_first"""', '(4)'], {}), "('channels_first', 4)\n", (1599, 1620), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((1659, 1710), 'tensorflow.python.frozen_keras.utils.conv_utils.convert_data_format', 'conv_utils.convert_data_format', (['"""channels_first"""', '(3)'], {}), "('channels_first', 3)\n", (1689, 1710), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((1741, 1791), 'tensorflow.python.frozen_keras.utils.conv_utils.convert_data_format', 'conv_utils.convert_data_format', (['"""channels_last"""', '(4)'], {}), "('channels_last', 4)\n", (1771, 1791), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((1821, 1871), 'tensorflow.python.frozen_keras.utils.conv_utils.convert_data_format', 'conv_utils.convert_data_format', (['"""channels_last"""', '(3)'], {}), "('channels_last', 3)\n", (1851, 1871), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((1903, 1953), 'tensorflow.python.frozen_keras.utils.conv_utils.convert_data_format', 'conv_utils.convert_data_format', (['"""channels_last"""', '(5)'], {}), "('channels_last', 5)\n", (1933, 1953), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((2011, 2055), 'tensorflow.python.frozen_keras.utils.conv_utils.convert_data_format', 'conv_utils.convert_data_format', (['"""invalid"""', '(2)'], {}), "('invalid', 2)\n", (2041, 2055), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((2144, 2194), 'tensorflow.python.frozen_keras.utils.conv_utils.normalize_tuple', 'conv_utils.normalize_tuple', (['(2)'], {'n': '(3)', 'name': '"""strides"""'}), "(2, n=3, name='strides')\n", (2170, 2194), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((2249, 2307), 'tensorflow.python.frozen_keras.utils.conv_utils.normalize_tuple', 'conv_utils.normalize_tuple', (['(2, 1, 2)'], {'n': '(3)', 'name': '"""strides"""'}), "((2, 1, 2), n=3, name='strides')\n", (2275, 2307), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((2356, 2411), 'tensorflow.python.frozen_keras.utils.conv_utils.normalize_tuple', 'conv_utils.normalize_tuple', (['(2, 1)'], {'n': '(3)', 'name': '"""strides"""'}), "((2, 1), n=3, name='strides')\n", (2382, 2411), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((2459, 2512), 'tensorflow.python.frozen_keras.utils.conv_utils.normalize_tuple', 'conv_utils.normalize_tuple', (['None'], {'n': '(3)', 'name': '"""strides"""'}), "(None, n=3, name='strides')\n", (2485, 2512), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((2613, 2662), 'tensorflow.python.frozen_keras.utils.conv_utils.normalize_data_format', 'conv_utils.normalize_data_format', (['"""Channels_Last"""'], {}), "('Channels_Last')\n", (2645, 2662), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((2724, 2774), 'tensorflow.python.frozen_keras.utils.conv_utils.normalize_data_format', 'conv_utils.normalize_data_format', (['"""CHANNELS_FIRST"""'], {}), "('CHANNELS_FIRST')\n", (2756, 2774), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((2823, 2866), 'tensorflow.python.frozen_keras.utils.conv_utils.normalize_data_format', 'conv_utils.normalize_data_format', (['"""invalid"""'], {}), "('invalid')\n", (2855, 2866), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((2933, 2969), 'tensorflow.python.frozen_keras.utils.conv_utils.normalize_padding', 'conv_utils.normalize_padding', (['"""SAME"""'], {}), "('SAME')\n", (2961, 2969), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3001, 3038), 'tensorflow.python.frozen_keras.utils.conv_utils.normalize_padding', 'conv_utils.normalize_padding', (['"""VALID"""'], {}), "('VALID')\n", (3029, 3038), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3087, 3126), 'tensorflow.python.frozen_keras.utils.conv_utils.normalize_padding', 'conv_utils.normalize_padding', (['"""invalid"""'], {}), "('invalid')\n", (3115, 3126), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3189, 3238), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_output_length', 'conv_utils.conv_output_length', (['(4)', '(2)', '"""same"""', '(1)', '(1)'], {}), "(4, 2, 'same', 1, 1)\n", (3218, 3238), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3264, 3313), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_output_length', 'conv_utils.conv_output_length', (['(4)', '(2)', '"""same"""', '(2)', '(1)'], {}), "(4, 2, 'same', 2, 1)\n", (3293, 3313), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3339, 3389), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_output_length', 'conv_utils.conv_output_length', (['(4)', '(2)', '"""valid"""', '(1)', '(1)'], {}), "(4, 2, 'valid', 1, 1)\n", (3368, 3389), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3415, 3465), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_output_length', 'conv_utils.conv_output_length', (['(4)', '(2)', '"""valid"""', '(2)', '(1)'], {}), "(4, 2, 'valid', 2, 1)\n", (3444, 3465), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3491, 3540), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_output_length', 'conv_utils.conv_output_length', (['(4)', '(2)', '"""full"""', '(1)', '(1)'], {}), "(4, 2, 'full', 1, 1)\n", (3520, 3540), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3566, 3615), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_output_length', 'conv_utils.conv_output_length', (['(4)', '(2)', '"""full"""', '(2)', '(1)'], {}), "(4, 2, 'full', 2, 1)\n", (3595, 3615), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3641, 3691), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_output_length', 'conv_utils.conv_output_length', (['(5)', '(2)', '"""valid"""', '(2)', '(2)'], {}), "(5, 2, 'valid', 2, 2)\n", (3670, 3691), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3754, 3799), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_input_length', 'conv_utils.conv_input_length', (['(4)', '(2)', '"""same"""', '(1)'], {}), "(4, 2, 'same', 1)\n", (3782, 3799), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3825, 3870), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_input_length', 'conv_utils.conv_input_length', (['(2)', '(2)', '"""same"""', '(2)'], {}), "(2, 2, 'same', 2)\n", (3853, 3870), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3896, 3942), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_input_length', 'conv_utils.conv_input_length', (['(3)', '(2)', '"""valid"""', '(1)'], {}), "(3, 2, 'valid', 1)\n", (3924, 3942), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((3968, 4014), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_input_length', 'conv_utils.conv_input_length', (['(2)', '(2)', '"""valid"""', '(2)'], {}), "(2, 2, 'valid', 2)\n", (3996, 4014), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((4040, 4085), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_input_length', 'conv_utils.conv_input_length', (['(4)', '(2)', '"""full"""', '(1)'], {}), "(4, 2, 'full', 1)\n", (4068, 4085), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((4111, 4156), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_input_length', 'conv_utils.conv_input_length', (['(3)', '(2)', '"""full"""', '(2)'], {}), "(3, 2, 'full', 2)\n", (4139, 4156), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((4222, 4277), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""same"""'], {'stride': '(1)'}), "(4, 2, 'same', stride=1)\n", (4253, 4277), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((4303, 4358), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""same"""'], {'stride': '(2)'}), "(4, 2, 'same', stride=2)\n", (4334, 4358), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((4384, 4440), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""valid"""'], {'stride': '(1)'}), "(4, 2, 'valid', stride=1)\n", (4415, 4440), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((4475, 4531), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""valid"""'], {'stride': '(2)'}), "(4, 2, 'valid', stride=2)\n", (4506, 4531), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((4566, 4621), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""full"""'], {'stride': '(1)'}), "(4, 2, 'full', stride=1)\n", (4597, 4621), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((4647, 4702), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""full"""'], {'stride': '(2)'}), "(4, 2, 'full', stride=2)\n", (4678, 4702), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((4745, 4818), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""same"""'], {'output_padding': '(2)', 'stride': '(1)'}), "(4, 2, 'same', output_padding=2, stride=1)\n", (4776, 4818), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((4874, 4947), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""same"""'], {'output_padding': '(1)', 'stride': '(2)'}), "(4, 2, 'same', output_padding=1, stride=2)\n", (4905, 4947), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((5003, 5077), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""valid"""'], {'output_padding': '(2)', 'stride': '(1)'}), "(4, 2, 'valid', output_padding=2, stride=1)\n", (5034, 5077), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((5133, 5207), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""valid"""'], {'output_padding': '(1)', 'stride': '(2)'}), "(4, 2, 'valid', output_padding=1, stride=2)\n", (5164, 5207), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((5263, 5336), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""full"""'], {'output_padding': '(2)', 'stride': '(1)'}), "(4, 2, 'full', output_padding=2, stride=1)\n", (5294, 5336), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((5392, 5465), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""full"""'], {'output_padding': '(1)', 'stride': '(2)'}), "(4, 2, 'full', output_padding=1, stride=2)\n", (5423, 5465), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((5521, 5610), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""same"""'], {'output_padding': '(1)', 'stride': '(1)', 'dilation': '(2)'}), "(4, 2, 'same', output_padding=1, stride=1,\n dilation=2)\n", (5552, 5610), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((5663, 5753), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""valid"""'], {'output_padding': '(2)', 'stride': '(2)', 'dilation': '(3)'}), "(4, 2, 'valid', output_padding=2, stride=2,\n dilation=3)\n", (5694, 5753), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((5805, 5894), 'tensorflow.python.frozen_keras.utils.conv_utils.deconv_output_length', 'conv_utils.deconv_output_length', (['(4)', '(2)', '"""full"""'], {'output_padding': '(2)', 'stride': '(2)', 'dilation': '(3)'}), "(4, 2, 'full', output_padding=2, stride=2,\n dilation=3)\n", (5836, 5894), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((6335, 6407), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_kernel_mask', 'conv_utils.conv_kernel_mask', (['input_shape', 'kernel_shape', 'strides', 'padding'], {}), '(input_shape, kernel_shape, strides, padding)\n', (6362, 6407), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((6728, 6761), 'numpy.reshape', 'np.reshape', (['mask', '(input_shape * 2)'], {}), '(mask, input_shape * 2)\n', (6738, 6761), True, 'import numpy as np\n'), ((7415, 7487), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_kernel_mask', 'conv_utils.conv_kernel_mask', (['input_shape', 'kernel_shape', 'strides', 'padding'], {}), '(input_shape, kernel_shape, strides, padding)\n', (7442, 7487), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((7947, 8000), 'itertools.product', 'itertools.product', (['*[[0, d - 1] for d in input_shape]'], {}), '(*[[0, d - 1] for d in input_shape])\n', (7964, 8000), False, 'import itertools\n'), ((8161, 8233), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_kernel_mask', 'conv_utils.conv_kernel_mask', (['input_shape', 'kernel_shape', 'strides', 'padding'], {}), '(input_shape, kernel_shape, strides, padding)\n', (8188, 8233), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((8700, 8733), 'numpy.reshape', 'np.reshape', (['mask', '(input_shape * 2)'], {}), '(mask, input_shape * 2)\n', (8710, 8733), True, 'import numpy as np\n'), ((6815, 6887), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_kernel_mask', 'conv_utils.conv_kernel_mask', (['input_shape', 'kernel_shape', 'strides', 'padding'], {}), '(input_shape, kernel_shape, strides, padding)\n', (6842, 6887), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((9057, 9129), 'tensorflow.python.frozen_keras.utils.conv_utils.conv_kernel_mask', 'conv_utils.conv_kernel_mask', (['input_shape', 'kernel_shape', 'strides', 'padding'], {}), '(input_shape, kernel_shape, strides, padding)\n', (9084, 9129), False, 'from tensorflow.python.frozen_keras.utils import conv_utils\n'), ((6683, 6703), 'numpy.prod', 'np.prod', (['input_shape'], {}), '(input_shape)\n', (6690, 6703), True, 'import numpy as np\n'), ((8655, 8675), 'numpy.prod', 'np.prod', (['input_shape'], {}), '(input_shape)\n', (8662, 8675), True, 'import numpy as np\n')]
|
import json
from pprint import *
import guiLib
from guiLib import *
class CSGOParser:
"""
A class that parses CS:GO JSON item/loot dumps.
"""
dictionary = None
rarities = None
jitems = None
items = []
verbose = False
def __init__(self, filepath: str, verbose=False):
"""
Initialize ``self`` with JSON data located at ``filepath``.
:param filepath: The location of the JSON datafile.
:param verbose: Should we print everything?
"""
self.verbose = verbose
dictionary = self.parse(filepath)
self.dictionary = dictionary
self.rarities = dictionary["rarities"]
self.qualities = dictionary["qualities"]
self.jitems = dictionary["items"]
printif("Items:", self.verbose)
if self.verbose: pprint(self.jitems)
# go through all JSON objects
for item_id in self.jitems:
jitem = self.jitems[item_id]
item = self.jitem_to_item(jitem)
# if item exists
if item:
self.items.append(item)
print(item, end="\n\n")
def item_weight(self, jitem: dict) -> int:
"""
Get the weight for a JSON item.
:param jitem: A JSON item.
:return: The weight associated with this item.
Large weights are more common, small weights are less common.
"""
if "item_quality" in jitem:
return self.quality_to_weight(jitem)
elif "item_rarity" in jitem:
return self.rarity_to_weight(jitem)
return 0
def item_percent(self, jitem: dict) -> float:
"""
Get the percent for a JSON item.
:param jitem: A JSON item.
:return: The percentage chance to get with this item.
"""
weight = total = None
percent = 0
# higher weight means LOWER chance
if "item_quality" in jitem:
weight = self.quality_to_weight(jitem["item_quality"])
total = self.total_qualities()
percent = 1 / weight
printif(f"Quality: higher weight means LOWER chance.", self.verbose)
printif(f"{weight} weight, {total} total.", self.verbose)
printif("1 / weight = percent", self.verbose)
printif(f"{1} / {weight} = {percent}", self.verbose)
# higher weight means larger chance
elif "item_rarity" in jitem:
weight = self.rarity_to_weight(jitem["item_rarity"])
total = self.total_rarities()
percent = weight / total
# print(f"Rarity: higher weight means LARGER chance")
# print(f"{weight} weight, {total} total.")
# print("weight / total = percent")
# print(f"{weight} / {total} = {percent}")
return percent
def total_rarities(self) -> int:
"""
Get all rarity weights summed up.
:return: The sum of all weight values for all rarities.
"""
total = 0
for rarity in self.rarities:
if "weight" in self.rarities[rarity]:
printif(f"Rarity {rarity} is {self.rarities[rarity]['weight']}", self.verbose)
total += int(self.rarities[rarity]["weight"])
# print(f"Total rarities: {total}")
return total
def total_qualities(self) -> int:
"""
Get all quality weights summed up.
:return: The sum of all weight values for all qualities.
"""
total = 0
for quality in self.qualities:
if "weight" in self.qualities[quality]:
printif(f"Quality {quality} is {self.qualities[quality]['weight']}", self.verbose)
total += int(self.qualities[quality]["weight"])
# print(f"Total qualities: {total}")
return total
def rarity_to_weight(self, rarity: str) -> int:
"""
Turn an ``item_rarity`` into a weight value.
:param rarity: The string representing a rarity.
:return: The weight associated with the string.
Example:\n
``rarity_to_weight("common") -> 10000000``\n
``rarity_to_weight("mythical") -> 80000``\n
"""
try:
return int(self.rarities[rarity]["weight"])
except Exception:
pass
return 0
def quality_to_weight(self, quality: str) -> int:
"""
Turn an ``item_quality`` into a weight value.
:param rarity: The string representing a quality.
:return: The weight associated with the string.
Example:\n
``quality_to_weight("normal") -> 0``\n
``quality_to_weight("genuine") -> 30``\n
"""
try:
return int(self.qualities[quality]["weight"])
except Exception:
pass
return 0
def jitem_to_item(self, jitem: dict) -> LootItem:
"""
Parses a single CSGO JSON item into a ``LootItem.``\n
Returns ``None`` upon failure.
:param jitem: A CSGO JSON item.
:return: A ``LootItem.``
"""
print(jitem)
if "name" in jitem:
name = jitem["name"]
# return None if JSON item doesn't have these vars
try:
name
except NameError:
return None
percent = self.item_percent(jitem)
item = LootItem(name, percent)
return item
def parse(self, filepath: str) -> {}:
"""
Parses a CSGO JSON Dump file into a list of ``LootItem`` objects.
:param filepath: The JSON file's location.
:return: a list of ``LootItem`` objects.
"""
file = open(filepath)
json_data = json.load(file)
if self.verbose: pprint(json_data)
return json_data
def test():
"""
Test out the functionality of our ``CSGOParser`` class.
"""
csgo = CSGOParser("_data/items_game_CSGO.json", verbose=True)
if __name__ == '__main__':
test()
|
[
"json.load"
] |
[((5722, 5737), 'json.load', 'json.load', (['file'], {}), '(file)\n', (5731, 5737), False, 'import json\n')]
|
# From ParaView email list. (TODO: Find link.)
# See also
# https://discourse.paraview.org/t/run-paraview-in-pyqt5-widget-python/8243/2
# https://discourse.paraview.org/t/dynamic-paraview-plugins-using-python/1793
# https://public.kitware.com/pipermail/paraview-developers/2012-April/001491.html
# https://markmail.org/message/pga4zilxynswhdef#query:+page:1+mid:f5zuw7izv5e262md+state:results
from paraview.util.vtkAlgorithm import *
from paraview import vtk
from PyQt5 import QtCore, QtGui, QtWidgets
# ------------------------------------
# A simple dialog
# ------------------------------------
class TestDialog(QtWidgets.QDialog):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
layout = QtWidgets.QVBoxLayout(self)
qbtn = QtWidgets.QPushButton('Quit', self)
# qbtn.clicked.connect(QtCore.QCoreApplication.instance().quit)
qbtn.clicked.connect(self.on_click)
self.setWindowTitle('Quit button')
layout.addWidget(qbtn)
self.setLayout(layout)
self.show()
def on_click(self):
print('Clicked')
self.close()
#------------------------------------------------------------------------------
# A filter example.
#------------------------------------------------------------------------------
@smproxy.filter()
@smproperty.input(name="InputDataset", port_index=0)
@smdomain.datatype(dataTypes=["vtkDataSet"], composite_data_supported=False)
class TestEmbeddingPyQt(VTKPythonAlgorithmBase):
def __init__(self):
VTKPythonAlgorithmBase.__init__(self, nInputPorts=1, nOutputPorts=1, outputType="vtkPolyData")
def FillInputPortInformation(self, port, info):
if port == 0:
info.Set(self.INPUT_REQUIRED_DATA_TYPE(), "vtkDataSet")
return 1
@smproperty.xml("""
<Property
name="NextStep"
command="NextStep"
panel_widget="command_button"/>
""")
def NextStep(self):
print('NextStep')
dialog = TestDialog()
dialog.exec_()
def RequestData(self, request, inInfoVec, outInfoVec):
from vtkmodules.vtkCommonDataModel import vtkDataSet, vtkPolyData
input0 = vtkDataSet.GetData(inInfoVec[0], 0)
output = vtkPolyData.GetData(outInfoVec, 0)
# do work
print("Pretend work done!")
return 1
|
[
"vtkmodules.vtkCommonDataModel.vtkPolyData.GetData",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtWidgets.QPushButton",
"vtkmodules.vtkCommonDataModel.vtkDataSet.GetData"
] |
[((757, 784), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self'], {}), '(self)\n', (778, 784), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((800, 835), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['"""Quit"""', 'self'], {}), "('Quit', self)\n", (821, 835), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2281, 2316), 'vtkmodules.vtkCommonDataModel.vtkDataSet.GetData', 'vtkDataSet.GetData', (['inInfoVec[0]', '(0)'], {}), '(inInfoVec[0], 0)\n', (2299, 2316), False, 'from vtkmodules.vtkCommonDataModel import vtkDataSet, vtkPolyData\n'), ((2334, 2368), 'vtkmodules.vtkCommonDataModel.vtkPolyData.GetData', 'vtkPolyData.GetData', (['outInfoVec', '(0)'], {}), '(outInfoVec, 0)\n', (2353, 2368), False, 'from vtkmodules.vtkCommonDataModel import vtkDataSet, vtkPolyData\n')]
|
import os
import math
import wandb
import tensorflow as tf
from absl import app, flags, logging
from configs.default import get_default_config
from model import X3D
import dataloader
import utils
flags.DEFINE_string('config', None,
'(Relative) path to config (.yaml) file.')
flags.DEFINE_string('train_file_pattern', None,
'Path to .txt file containing paths to video and integer label for training dataset.')
flags.DEFINE_string('val_file_pattern', None,
'Path to .txt file containing paths to video and integer label for validation dataset.')
flags.DEFINE_string('model_dir', None,
'Path to directory where model info, like checkpoints are (to be) stored.')
flags.DEFINE_string('pretrained_ckpt', None,
'Path to directory where pretrained model checkpoints are stored.')
flags.DEFINE_integer('num_gpus', 1,
'Number of gpus to use for training.', lower_bound=0)
flags.DEFINE_integer('save_checkpoints_step', None,
'Number of training steps to save checkpoints.', lower_bound=0)
flags.DEFINE_bool('mixed_precision', False,
'Whether to use mixed precision during training.')
flags.DEFINE_bool('use_tfrecord', False,
'Whether data should be loaded from tfrecord files.')
flags.DEFINE_bool('debug', False,
'Whether to run in debug mode.')
flags.mark_flags_as_required(['config', 'train_file_pattern', 'model_dir'])
FLAGS = flags.FLAGS
def main(_):
assert '.yaml' in FLAGS.config, 'Please provide path to yaml file.'
cfg = get_default_config()
cfg.merge_from_file(FLAGS.config)
cfg.freeze()
model_dir = FLAGS.model_dir
if not tf.io.gfile.exists(model_dir):
tf.io.gfile.makedirs(model_dir)
# init wandb
if cfg.WANDB.ENABLE:
wandb.tensorboard.patch(root_logdir=model_dir)
wandb.init(
job_type='train',
group=cfg.WANDB.GROUP_NAME,
project=cfg.WANDB.PROJECT_NAME,
sync_tensorboard=cfg.WANDB.TENSORBOARD,
mode=cfg.WANDB.MODE,
config=dict(cfg),
resume=True
)
if FLAGS.debug:
tf.config.run_functions_eagerly(True)
tf.debugging.set_log_device_placement(True)
os.environ['TF_DETERMINISTIC_OPS'] = '1'
tf.random.set_seed(1111)
logging.set_verbosity(logging.DEBUG)
tf.debugging.experimental.enable_dump_debug_info(model_dir,
tensor_debug_mode="FULL_HEALTH", circular_buffer_size=-1)
strategy = utils.get_strategy(FLAGS.num_gpus)
# mixed precision
precision = utils.get_precision(FLAGS.mixed_precision)
policy = tf.keras.mixed_precision.Policy(precision)
tf.keras.mixed_precision.set_global_policy(policy)
def get_dataset(cfg, file_pattern, is_training):
"""Returns a tf.data.Dataset"""
return dataloader.InputReader(
cfg,
is_training,
FLAGS.use_tfrecord,
FLAGS.mixed_precision
)(file_pattern, cfg.TRAIN.BATCH_SIZE if is_training else cfg.TEST.BATCH_SIZE)
def load_model(model, cfg):
"""Compile model with loss function, model optimizers and metrics."""
opt_str = cfg.TRAIN.OPTIMIZER.lower()
if opt_str == 'sgd':
opt = tf.optimizers.SGD(
learning_rate=cfg.TRAIN.WARMUP_LR,
momentum=cfg.TRAIN.MOMENTUM,
nesterov=True)
elif opt_str == 'adam':
opt = tf.optimizers.Adam(
learning_rate=cfg.TRAIN.WARMUP_LR)
else:
raise NotImplementedError(f'{opt_str} not supported')
if FLAGS.mixed_precision:
opt = tf.keras.mixed_precision.LossScaleOptimizer(opt)
model.compile(
optimizer=opt,
loss=tf.keras.losses.SparseCategoricalCrossentropy(),
metrics=[
tf.keras.metrics.SparseCategoricalAccuracy(name='acc'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(
k=5,
name='top_5_acc')])
return model
# learning rate schedule
def lr_schedule(epoch, lr):
"""
Implements the learning rate schedule used in
https://arxiv.org/abs/2004.04730
"""
if epoch > cfg.TRAIN.WARMUP_EPOCHS:
new_lr = cfg.TRAIN.BASE_LR * (
0.5 * (tf.math.cos(tf.constant(math.pi) * (epoch/cfg.TRAIN.EPOCHS)) + 1))
else:
new_lr = cfg.TRAIN.WARMUP_LR + (
epoch * (cfg.TRAIN.BASE_LR - cfg.TRAIN.WARMUP_LR) / cfg.TRAIN.WARMUP_EPOCHS)
return new_lr
with strategy.scope():
model = X3D(cfg)
model = load_model(model, cfg)
# resume training from latest checkpoint, if available
current_epoch = 0
ckpt_path = tf.train.latest_checkpoint(model_dir)
if ckpt_path:
current_epoch = int(os.path.basename(ckpt_path).split('-')[1])
logging.info(f'Found checkpoint {ckpt_path} at epoch {current_epoch}')
model.load_weights(ckpt_path)
elif FLAGS.pretrained_ckpt:
logging.info(f'Loading model from pretrained weights at {FLAGS.pretrained_ckpt}')
if tf.io.gfile.isdir(FLAGS.pretrained_ckpt):
model.load_weights(tf.train.latest_checkpoint(FLAGS.pretrained_ckpt))
else:
model.load_weights(FLAGS.pretrained_ckpt)
model.fit(
get_dataset(cfg, FLAGS.train_file_pattern, True),
verbose=1,
epochs=cfg.TRAIN.EPOCHS,
initial_epoch = current_epoch,
steps_per_epoch=cfg.TRAIN.DATASET_SIZE//cfg.TRAIN.BATCH_SIZE,
validation_data=get_dataset(cfg, FLAGS.val_file_pattern, False) if FLAGS.val_file_pattern else None,
callbacks=utils.get_callbacks(cfg, lr_schedule, FLAGS))
if __name__ == "__main__":
app.run(main)
|
[
"tensorflow.random.set_seed",
"absl.logging.info",
"tensorflow.io.gfile.makedirs",
"tensorflow.train.latest_checkpoint",
"tensorflow.io.gfile.isdir",
"absl.logging.set_verbosity",
"dataloader.InputReader",
"absl.flags.mark_flags_as_required",
"tensorflow.keras.losses.SparseCategoricalCrossentropy",
"absl.flags.DEFINE_bool",
"tensorflow.keras.mixed_precision.Policy",
"absl.flags.DEFINE_integer",
"tensorflow.keras.mixed_precision.LossScaleOptimizer",
"utils.get_callbacks",
"tensorflow.keras.metrics.SparseTopKCategoricalAccuracy",
"os.path.basename",
"configs.default.get_default_config",
"tensorflow.constant",
"tensorflow.debugging.experimental.enable_dump_debug_info",
"wandb.tensorboard.patch",
"tensorflow.optimizers.Adam",
"tensorflow.debugging.set_log_device_placement",
"tensorflow.io.gfile.exists",
"utils.get_precision",
"tensorflow.config.run_functions_eagerly",
"tensorflow.keras.mixed_precision.set_global_policy",
"absl.flags.DEFINE_string",
"model.X3D",
"absl.app.run",
"utils.get_strategy",
"tensorflow.keras.metrics.SparseCategoricalAccuracy",
"tensorflow.optimizers.SGD"
] |
[((209, 287), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""config"""', 'None', '"""(Relative) path to config (.yaml) file."""'], {}), "('config', None, '(Relative) path to config (.yaml) file.')\n", (228, 287), False, 'from absl import app, flags, logging\n'), ((294, 437), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""train_file_pattern"""', 'None', '"""Path to .txt file containing paths to video and integer label for training dataset."""'], {}), "('train_file_pattern', None,\n 'Path to .txt file containing paths to video and integer label for training dataset.'\n )\n", (313, 437), False, 'from absl import app, flags, logging\n'), ((435, 578), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""val_file_pattern"""', 'None', '"""Path to .txt file containing paths to video and integer label for validation dataset."""'], {}), "('val_file_pattern', None,\n 'Path to .txt file containing paths to video and integer label for validation dataset.'\n )\n", (454, 578), False, 'from absl import app, flags, logging\n'), ((576, 694), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""model_dir"""', 'None', '"""Path to directory where model info, like checkpoints are (to be) stored."""'], {}), "('model_dir', None,\n 'Path to directory where model info, like checkpoints are (to be) stored.')\n", (595, 694), False, 'from absl import app, flags, logging\n'), ((697, 813), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""pretrained_ckpt"""', 'None', '"""Path to directory where pretrained model checkpoints are stored."""'], {}), "('pretrained_ckpt', None,\n 'Path to directory where pretrained model checkpoints are stored.')\n", (716, 813), False, 'from absl import app, flags, logging\n'), ((816, 909), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_gpus"""', '(1)', '"""Number of gpus to use for training."""'], {'lower_bound': '(0)'}), "('num_gpus', 1, 'Number of gpus to use for training.',\n lower_bound=0)\n", (836, 909), False, 'from absl import app, flags, logging\n'), ((912, 1031), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""save_checkpoints_step"""', 'None', '"""Number of training steps to save checkpoints."""'], {'lower_bound': '(0)'}), "('save_checkpoints_step', None,\n 'Number of training steps to save checkpoints.', lower_bound=0)\n", (932, 1031), False, 'from absl import app, flags, logging\n'), ((1034, 1132), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""mixed_precision"""', '(False)', '"""Whether to use mixed precision during training."""'], {}), "('mixed_precision', False,\n 'Whether to use mixed precision during training.')\n", (1051, 1132), False, 'from absl import app, flags, logging\n'), ((1135, 1233), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""use_tfrecord"""', '(False)', '"""Whether data should be loaded from tfrecord files."""'], {}), "('use_tfrecord', False,\n 'Whether data should be loaded from tfrecord files.')\n", (1152, 1233), False, 'from absl import app, flags, logging\n'), ((1236, 1302), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""debug"""', '(False)', '"""Whether to run in debug mode."""'], {}), "('debug', False, 'Whether to run in debug mode.')\n", (1253, 1302), False, 'from absl import app, flags, logging\n'), ((1311, 1386), 'absl.flags.mark_flags_as_required', 'flags.mark_flags_as_required', (["['config', 'train_file_pattern', 'model_dir']"], {}), "(['config', 'train_file_pattern', 'model_dir'])\n", (1339, 1386), False, 'from absl import app, flags, logging\n'), ((1506, 1526), 'configs.default.get_default_config', 'get_default_config', ([], {}), '()\n', (1524, 1526), False, 'from configs.default import get_default_config\n'), ((2418, 2452), 'utils.get_strategy', 'utils.get_strategy', (['FLAGS.num_gpus'], {}), '(FLAGS.num_gpus)\n', (2436, 2452), False, 'import utils\n'), ((2493, 2535), 'utils.get_precision', 'utils.get_precision', (['FLAGS.mixed_precision'], {}), '(FLAGS.mixed_precision)\n', (2512, 2535), False, 'import utils\n'), ((2548, 2590), 'tensorflow.keras.mixed_precision.Policy', 'tf.keras.mixed_precision.Policy', (['precision'], {}), '(precision)\n', (2579, 2590), True, 'import tensorflow as tf\n'), ((2594, 2644), 'tensorflow.keras.mixed_precision.set_global_policy', 'tf.keras.mixed_precision.set_global_policy', (['policy'], {}), '(policy)\n', (2636, 2644), True, 'import tensorflow as tf\n'), ((5579, 5592), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (5586, 5592), False, 'from absl import app, flags, logging\n'), ((1623, 1652), 'tensorflow.io.gfile.exists', 'tf.io.gfile.exists', (['model_dir'], {}), '(model_dir)\n', (1641, 1652), True, 'import tensorflow as tf\n'), ((1659, 1690), 'tensorflow.io.gfile.makedirs', 'tf.io.gfile.makedirs', (['model_dir'], {}), '(model_dir)\n', (1679, 1690), True, 'import tensorflow as tf\n'), ((1738, 1784), 'wandb.tensorboard.patch', 'wandb.tensorboard.patch', ([], {'root_logdir': 'model_dir'}), '(root_logdir=model_dir)\n', (1761, 1784), False, 'import wandb\n'), ((2067, 2104), 'tensorflow.config.run_functions_eagerly', 'tf.config.run_functions_eagerly', (['(True)'], {}), '(True)\n', (2098, 2104), True, 'import tensorflow as tf\n'), ((2110, 2153), 'tensorflow.debugging.set_log_device_placement', 'tf.debugging.set_log_device_placement', (['(True)'], {}), '(True)\n', (2147, 2153), True, 'import tensorflow as tf\n'), ((2205, 2229), 'tensorflow.random.set_seed', 'tf.random.set_seed', (['(1111)'], {}), '(1111)\n', (2223, 2229), True, 'import tensorflow as tf\n'), ((2235, 2271), 'absl.logging.set_verbosity', 'logging.set_verbosity', (['logging.DEBUG'], {}), '(logging.DEBUG)\n', (2256, 2271), False, 'from absl import app, flags, logging\n'), ((2277, 2398), 'tensorflow.debugging.experimental.enable_dump_debug_info', 'tf.debugging.experimental.enable_dump_debug_info', (['model_dir'], {'tensor_debug_mode': '"""FULL_HEALTH"""', 'circular_buffer_size': '(-1)'}), "(model_dir,\n tensor_debug_mode='FULL_HEALTH', circular_buffer_size=-1)\n", (2325, 2398), True, 'import tensorflow as tf\n'), ((4423, 4431), 'model.X3D', 'X3D', (['cfg'], {}), '(cfg)\n', (4426, 4431), False, 'from model import X3D\n'), ((4570, 4607), 'tensorflow.train.latest_checkpoint', 'tf.train.latest_checkpoint', (['model_dir'], {}), '(model_dir)\n', (4596, 4607), True, 'import tensorflow as tf\n'), ((2748, 2836), 'dataloader.InputReader', 'dataloader.InputReader', (['cfg', 'is_training', 'FLAGS.use_tfrecord', 'FLAGS.mixed_precision'], {}), '(cfg, is_training, FLAGS.use_tfrecord, FLAGS.\n mixed_precision)\n', (2770, 2836), False, 'import dataloader\n'), ((3141, 3242), 'tensorflow.optimizers.SGD', 'tf.optimizers.SGD', ([], {'learning_rate': 'cfg.TRAIN.WARMUP_LR', 'momentum': 'cfg.TRAIN.MOMENTUM', 'nesterov': '(True)'}), '(learning_rate=cfg.TRAIN.WARMUP_LR, momentum=cfg.TRAIN.\n MOMENTUM, nesterov=True)\n', (3158, 3242), True, 'import tensorflow as tf\n'), ((3498, 3546), 'tensorflow.keras.mixed_precision.LossScaleOptimizer', 'tf.keras.mixed_precision.LossScaleOptimizer', (['opt'], {}), '(opt)\n', (3541, 3546), True, 'import tensorflow as tf\n'), ((4704, 4774), 'absl.logging.info', 'logging.info', (['f"""Found checkpoint {ckpt_path} at epoch {current_epoch}"""'], {}), "(f'Found checkpoint {ckpt_path} at epoch {current_epoch}')\n", (4716, 4774), False, 'from absl import app, flags, logging\n'), ((3314, 3367), 'tensorflow.optimizers.Adam', 'tf.optimizers.Adam', ([], {'learning_rate': 'cfg.TRAIN.WARMUP_LR'}), '(learning_rate=cfg.TRAIN.WARMUP_LR)\n', (3332, 3367), True, 'import tensorflow as tf\n'), ((3611, 3658), 'tensorflow.keras.losses.SparseCategoricalCrossentropy', 'tf.keras.losses.SparseCategoricalCrossentropy', ([], {}), '()\n', (3656, 3658), True, 'import tensorflow as tf\n'), ((4852, 4938), 'absl.logging.info', 'logging.info', (['f"""Loading model from pretrained weights at {FLAGS.pretrained_ckpt}"""'], {}), "(\n f'Loading model from pretrained weights at {FLAGS.pretrained_ckpt}')\n", (4864, 4938), False, 'from absl import app, flags, logging\n'), ((4944, 4984), 'tensorflow.io.gfile.isdir', 'tf.io.gfile.isdir', (['FLAGS.pretrained_ckpt'], {}), '(FLAGS.pretrained_ckpt)\n', (4961, 4984), True, 'import tensorflow as tf\n'), ((5500, 5544), 'utils.get_callbacks', 'utils.get_callbacks', (['cfg', 'lr_schedule', 'FLAGS'], {}), '(cfg, lr_schedule, FLAGS)\n', (5519, 5544), False, 'import utils\n'), ((3692, 3746), 'tensorflow.keras.metrics.SparseCategoricalAccuracy', 'tf.keras.metrics.SparseCategoricalAccuracy', ([], {'name': '"""acc"""'}), "(name='acc')\n", (3734, 3746), True, 'import tensorflow as tf\n'), ((3761, 3830), 'tensorflow.keras.metrics.SparseTopKCategoricalAccuracy', 'tf.keras.metrics.SparseTopKCategoricalAccuracy', ([], {'k': '(5)', 'name': '"""top_5_acc"""'}), "(k=5, name='top_5_acc')\n", (3807, 3830), True, 'import tensorflow as tf\n'), ((5014, 5063), 'tensorflow.train.latest_checkpoint', 'tf.train.latest_checkpoint', (['FLAGS.pretrained_ckpt'], {}), '(FLAGS.pretrained_ckpt)\n', (5040, 5063), True, 'import tensorflow as tf\n'), ((4654, 4681), 'os.path.basename', 'os.path.basename', (['ckpt_path'], {}), '(ckpt_path)\n', (4670, 4681), False, 'import os\n'), ((4169, 4189), 'tensorflow.constant', 'tf.constant', (['math.pi'], {}), '(math.pi)\n', (4180, 4189), True, 'import tensorflow as tf\n')]
|
# Modulo VMUI
# Direitos reservados por <NAME> e <NAME>
#
# Programa responsável por mostrar o arquivo gerado pelo compilador, mostrar os valores da pilha e os valores.
#
# Este módulo é responsável por criar a interface da máquina virtual e ler o arquivo gerado pelo compilador.
#
#
# O intuito do programa é fazer uma analise completa da linguagem proposta
# pelo professor a ponto de compor um sistema, sendo este o nosso compilador.
from tkinter import *
from tkinter import ttk
from tkinter import filedialog
from MaquinaVirtual.VM import *
from MaquinaVirtual.Exec import Exec
class VMUI:
# Declara a raiz da interface
app = Tk()
app.resizable(False, False)
#Cria os títulos das áreas
framepc = LabelFrame(app, text="Instruções", font=(20))
frametv = LabelFrame(app, text="Código", font=(20))
framestack = LabelFrame(app, text="Pilha", font=(20))
frameoutput = LabelFrame(app, text="Dados", font=(20))
#Criar as telas
tv = ttk.Treeview(frametv, columns=('Linha', 'Instrução', 'Atributo 1', 'Atributo 2', 'Comentário'), show='headings',
height=20)
tv2 = ttk.Treeview(framestack, columns=('Endereço', 'Valor'), show='headings', height=15)
tv3 = ttk.Treeview(frameoutput, columns='Dados', show='headings')
tv4 = ttk.Treeview(framepc, column=("Endereço","PC"), show="headings", heigh=2)
#Cria o botão para compilar
button = Button(app, text="Compilar", state="disabled")
type_comp = -1
def printVMUI(self):
#Configura a raiz
self.app.title("Máquina Virtual")
self.app.geometry("1000x900")
self.app.configure(background="#dde")
#Confira as áreas
self.frametv.grid(row=1, column=0, sticky=W)
self.framestack.grid(row=1, column=1, sticky=NE)
self.framepc.grid(row=1,column=1, sticky=S)
self.frameoutput.grid(row=2, column=0, sticky=W)
#Cria um menu bar
menubar = Menu(self.app)
#Cria as opçoes do Menu
filemenu = Menu(menubar, tearoff=0)
filemenu.add_command(label="Open",command=lambda:self.lerArquivo(self))
menubar.add_cascade(label="File", menu=filemenu)
#Configura o botão
self.button.grid(row=0, column=1, sticky=NE, padx=10, pady=10)
self.button["command"]=lambda :self.comp(self)
# Cria as colunas da tela "Código"
self.tv.column('Linha', minwidth=0, width=90)
self.tv.column('Instrução', minwidth=0, width=150)
self.tv.column('Atributo 1', minwidth=0, width=100)
self.tv.column('Atributo 2', minwidth=0, width=100)
self.tv.column('Comentário', minwidth=0, width=250)
self.tv.heading('Linha', text='LINHA')
self.tv.heading('Instrução', text='INSTRUÇÃO')
self.tv.heading('Atributo 1', text='ATRIBUTO 1')
self.tv.heading('Atributo 2', text='ATRIBUTO 2')
self.tv.heading('Comentário', text='COMENTÁRIO')
self.tv.grid(row=1, column=0, sticky=W, pady=10, padx=20)
# Cria as colunas da tela "Pilha"
self.tv2.column('Endereço', minwidth=0, width=125)
self.tv2.column('Valor', minwidth=0, width=125)
self.tv2.heading('Endereço', text='Endereço')
self.tv2.heading('Valor', text='Valor')
self.tv2.grid(row=1, column=1, sticky=NE, pady=10, padx=3)
# Cria as colunas da tela "Dados"
self.tv3.column('Dados', minwidth=0, width=150)
self.tv3.heading('Dados', text='Dados')
self.tv3.grid(row=2, column=0, sticky=SW, padx=20, pady=15)
# Cria as colunas da tela "Instruções"
self.tv4.column("Endereço", minwidth=0, width=100)
self.tv4.column("PC", minwidth=0, width=100)
self.tv4.heading("Endereço", text="Endereço")
self.tv4.heading("PC",text="PC")
self.tv4.grid(column= 3, row= 3, sticky=S, padx=9, pady=5)
# Cria o modo de compilar
frameradio = LabelFrame(self.app, text="Modo de Compilar", font=(20))
frameradio.grid(column=1, row=2, sticky=W)
radioB1 = Radiobutton(frameradio, text="Default", command=lambda : self.printar(self,0))
radioB1.grid(sticky=W)
radioB2 = Radiobutton(frameradio, text='Rápido', command= lambda: self.printar(self,1))
radioB2.grid(sticky=W)
radioB3 = Radiobutton(frameradio, text='Passo-a-Passo', command=lambda: self.printar(self,2))
radioB3.grid(sticky=W)
self.app.config(menu=menubar)
self.app.mainloop()
#Le arquivo
def lerArquivo(self):
arquivo = filedialog.askopenfilename(title="Choose a file", filetypes=(("Obj File", "*.obj"),))
VM.replace(VM, arquivo)
VM.fileTolist(VM)
VM.defineEnd(VM)
VM.jmpEnd(VM)
VM.newObjfile(VM, self.tv)
# Função chamada na hora de compilar
#Caso seja 1, executa o rápido, caso seja 2, executa o passo-a-passo, caso seja 0, não faz nada
def comp(self):
if self.type_comp == 1:
#Envia o lugar aonde tem que imprimir as informações
Exec.eitas = self.tv2
Exec.eitas2 = self.app
Exec.eitas4 = self.tv4
Exec.exec2(Exec)
VM.printOutput(VM, Exec.saida, self.tv3)
elif self.type_comp == 2:
Exec.eitas = self.tv2
Exec.eitas4=self.tv4
Exec.exec(Exec)
if Exec.flag == 1:
VM.printOutput(VM, Exec.saida, self.tv3)
Exec.flag = 0
else:
pass
#Cria o estilo do botão
def printar(self,valor):
if valor == 0:
self.button["state"]="disabled"
self.button["bg"]= "white"
elif valor == 1:
self.button["state"]="normal"
self.button["bg"]="red"
self.type_comp = valor
else:
self.button["state"]="normal"
self.button["bg"]="blue"
self.type_comp = valor
|
[
"tkinter.filedialog.askopenfilename",
"MaquinaVirtual.Exec.Exec.exec2",
"MaquinaVirtual.Exec.Exec.exec",
"tkinter.ttk.Treeview"
] |
[((1032, 1159), 'tkinter.ttk.Treeview', 'ttk.Treeview', (['frametv'], {'columns': "('Linha', 'Instrução', 'Atributo 1', 'Atributo 2', 'Comentário')", 'show': '"""headings"""', 'height': '(20)'}), "(frametv, columns=('Linha', 'Instrução', 'Atributo 1',\n 'Atributo 2', 'Comentário'), show='headings', height=20)\n", (1044, 1159), False, 'from tkinter import ttk\n'), ((1194, 1281), 'tkinter.ttk.Treeview', 'ttk.Treeview', (['framestack'], {'columns': "('Endereço', 'Valor')", 'show': '"""headings"""', 'height': '(15)'}), "(framestack, columns=('Endereço', 'Valor'), show='headings',\n height=15)\n", (1206, 1281), False, 'from tkinter import ttk\n'), ((1289, 1348), 'tkinter.ttk.Treeview', 'ttk.Treeview', (['frameoutput'], {'columns': '"""Dados"""', 'show': '"""headings"""'}), "(frameoutput, columns='Dados', show='headings')\n", (1301, 1348), False, 'from tkinter import ttk\n'), ((1360, 1434), 'tkinter.ttk.Treeview', 'ttk.Treeview', (['framepc'], {'column': "('Endereço', 'PC')", 'show': '"""headings"""', 'heigh': '(2)'}), "(framepc, column=('Endereço', 'PC'), show='headings', heigh=2)\n", (1372, 1434), False, 'from tkinter import ttk\n'), ((4702, 4791), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {'title': '"""Choose a file"""', 'filetypes': "(('Obj File', '*.obj'),)"}), "(title='Choose a file', filetypes=(('Obj File',\n '*.obj'),))\n", (4728, 4791), False, 'from tkinter import filedialog\n'), ((5318, 5334), 'MaquinaVirtual.Exec.Exec.exec2', 'Exec.exec2', (['Exec'], {}), '(Exec)\n', (5328, 5334), False, 'from MaquinaVirtual.Exec import Exec\n'), ((5506, 5521), 'MaquinaVirtual.Exec.Exec.exec', 'Exec.exec', (['Exec'], {}), '(Exec)\n', (5515, 5521), False, 'from MaquinaVirtual.Exec import Exec\n')]
|
# Natural Language Toolkit: Semantic Interpretation
#
# Author: <NAME> <<EMAIL>>
#
# Copyright (C) 2001-2022 NLTK Project
# URL: <https://www.nltk.org/>
# For license information, see LICENSE.TXT
from nltk.sem.logic import (
AllExpression,
AndExpression,
ApplicationExpression,
EqualityExpression,
ExistsExpression,
IffExpression,
ImpExpression,
NegatedExpression,
OrExpression,
VariableExpression,
skolem_function,
unique_variable,
)
def skolemize(expression, univ_scope=None, used_variables=None):
"""
Skolemize the expression and convert to conjunctive normal form (CNF)
"""
if univ_scope is None:
univ_scope = set()
if used_variables is None:
used_variables = set()
if isinstance(expression, AllExpression):
term = skolemize(
expression.term,
univ_scope | {expression.variable},
used_variables | {expression.variable},
)
return term.replace(
expression.variable,
VariableExpression(unique_variable(ignore=used_variables)),
)
elif isinstance(expression, AndExpression):
return skolemize(expression.first, univ_scope, used_variables) & skolemize(
expression.second, univ_scope, used_variables
)
elif isinstance(expression, OrExpression):
return to_cnf(
skolemize(expression.first, univ_scope, used_variables),
skolemize(expression.second, univ_scope, used_variables),
)
elif isinstance(expression, ImpExpression):
return to_cnf(
skolemize(-expression.first, univ_scope, used_variables),
skolemize(expression.second, univ_scope, used_variables),
)
elif isinstance(expression, IffExpression):
return to_cnf(
skolemize(-expression.first, univ_scope, used_variables),
skolemize(expression.second, univ_scope, used_variables),
) & to_cnf(
skolemize(expression.first, univ_scope, used_variables),
skolemize(-expression.second, univ_scope, used_variables),
)
elif isinstance(expression, EqualityExpression):
return expression
elif isinstance(expression, NegatedExpression):
negated = expression.term
if isinstance(negated, AllExpression):
term = skolemize(
-negated.term, univ_scope, used_variables | {negated.variable}
)
if univ_scope:
return term.replace(negated.variable, skolem_function(univ_scope))
else:
skolem_constant = VariableExpression(
unique_variable(ignore=used_variables)
)
return term.replace(negated.variable, skolem_constant)
elif isinstance(negated, AndExpression):
return to_cnf(
skolemize(-negated.first, univ_scope, used_variables),
skolemize(-negated.second, univ_scope, used_variables),
)
elif isinstance(negated, OrExpression):
return skolemize(-negated.first, univ_scope, used_variables) & skolemize(
-negated.second, univ_scope, used_variables
)
elif isinstance(negated, ImpExpression):
return skolemize(negated.first, univ_scope, used_variables) & skolemize(
-negated.second, univ_scope, used_variables
)
elif isinstance(negated, IffExpression):
return to_cnf(
skolemize(-negated.first, univ_scope, used_variables),
skolemize(-negated.second, univ_scope, used_variables),
) & to_cnf(
skolemize(negated.first, univ_scope, used_variables),
skolemize(negated.second, univ_scope, used_variables),
)
elif isinstance(negated, EqualityExpression):
return expression
elif isinstance(negated, NegatedExpression):
return skolemize(negated.term, univ_scope, used_variables)
elif isinstance(negated, ExistsExpression):
term = skolemize(
-negated.term,
univ_scope | {negated.variable},
used_variables | {negated.variable},
)
return term.replace(
negated.variable,
VariableExpression(unique_variable(ignore=used_variables)),
)
elif isinstance(negated, ApplicationExpression):
return expression
else:
raise Exception("'%s' cannot be skolemized" % expression)
elif isinstance(expression, ExistsExpression):
term = skolemize(
expression.term, univ_scope, used_variables | {expression.variable}
)
if univ_scope:
return term.replace(expression.variable, skolem_function(univ_scope))
else:
skolem_constant = VariableExpression(unique_variable(ignore=used_variables))
return term.replace(expression.variable, skolem_constant)
elif isinstance(expression, ApplicationExpression):
return expression
else:
raise Exception("'%s' cannot be skolemized" % expression)
def to_cnf(first, second):
"""
Convert this split disjunction to conjunctive normal form (CNF)
"""
if isinstance(first, AndExpression):
r_first = to_cnf(first.first, second)
r_second = to_cnf(first.second, second)
return r_first & r_second
elif isinstance(second, AndExpression):
r_first = to_cnf(first, second.first)
r_second = to_cnf(first, second.second)
return r_first & r_second
else:
return first | second
|
[
"nltk.sem.logic.unique_variable",
"nltk.sem.logic.skolem_function"
] |
[((1062, 1100), 'nltk.sem.logic.unique_variable', 'unique_variable', ([], {'ignore': 'used_variables'}), '(ignore=used_variables)\n', (1077, 1100), False, 'from nltk.sem.logic import AllExpression, AndExpression, ApplicationExpression, EqualityExpression, ExistsExpression, IffExpression, ImpExpression, NegatedExpression, OrExpression, VariableExpression, skolem_function, unique_variable\n'), ((2550, 2577), 'nltk.sem.logic.skolem_function', 'skolem_function', (['univ_scope'], {}), '(univ_scope)\n', (2565, 2577), False, 'from nltk.sem.logic import AllExpression, AndExpression, ApplicationExpression, EqualityExpression, ExistsExpression, IffExpression, ImpExpression, NegatedExpression, OrExpression, VariableExpression, skolem_function, unique_variable\n'), ((2671, 2709), 'nltk.sem.logic.unique_variable', 'unique_variable', ([], {'ignore': 'used_variables'}), '(ignore=used_variables)\n', (2686, 2709), False, 'from nltk.sem.logic import AllExpression, AndExpression, ApplicationExpression, EqualityExpression, ExistsExpression, IffExpression, ImpExpression, NegatedExpression, OrExpression, VariableExpression, skolem_function, unique_variable\n'), ((4854, 4881), 'nltk.sem.logic.skolem_function', 'skolem_function', (['univ_scope'], {}), '(univ_scope)\n', (4869, 4881), False, 'from nltk.sem.logic import AllExpression, AndExpression, ApplicationExpression, EqualityExpression, ExistsExpression, IffExpression, ImpExpression, NegatedExpression, OrExpression, VariableExpression, skolem_function, unique_variable\n'), ((4946, 4984), 'nltk.sem.logic.unique_variable', 'unique_variable', ([], {'ignore': 'used_variables'}), '(ignore=used_variables)\n', (4961, 4984), False, 'from nltk.sem.logic import AllExpression, AndExpression, ApplicationExpression, EqualityExpression, ExistsExpression, IffExpression, ImpExpression, NegatedExpression, OrExpression, VariableExpression, skolem_function, unique_variable\n'), ((4385, 4423), 'nltk.sem.logic.unique_variable', 'unique_variable', ([], {'ignore': 'used_variables'}), '(ignore=used_variables)\n', (4400, 4423), False, 'from nltk.sem.logic import AllExpression, AndExpression, ApplicationExpression, EqualityExpression, ExistsExpression, IffExpression, ImpExpression, NegatedExpression, OrExpression, VariableExpression, skolem_function, unique_variable\n')]
|
import numpy as np
from numpy.testing import *
from supreme.register.radial_sum import radial_sum
def test_basic():
x = np.array([[1, 0, 2],
[0, 5, 0],
[3, 0, 4]], dtype=np.double)
R = radial_sum(x)
pi = np.pi
assert_array_equal(R[[135, 45, 225, 315]], [1, 2, 3, 4])
if __name__ == "__main__":
run_module_suite()
|
[
"supreme.register.radial_sum.radial_sum",
"numpy.array"
] |
[((126, 186), 'numpy.array', 'np.array', (['[[1, 0, 2], [0, 5, 0], [3, 0, 4]]'], {'dtype': 'np.double'}), '([[1, 0, 2], [0, 5, 0], [3, 0, 4]], dtype=np.double)\n', (134, 186), True, 'import numpy as np\n'), ((231, 244), 'supreme.register.radial_sum.radial_sum', 'radial_sum', (['x'], {}), '(x)\n', (241, 244), False, 'from supreme.register.radial_sum import radial_sum\n')]
|
import logging
from typing import List
from dispatch.config import DISPATCH_HELP_EMAIL, INCIDENT_RESPONSE_TEAM_EMAIL
from dispatch.database.core import SessionLocal
from dispatch.messaging.strings import (
INCIDENT_FEEDBACK_DAILY_REPORT,
MessageType,
)
from dispatch.plugin import service as plugin_service
from .models import Feedback
log = logging.getLogger(__name__)
def send_incident_feedback_daily_report(
commander_email: str, feedback: List[Feedback], project_id: int, db_session: SessionLocal
):
"""
Sends an incident feedback daily report to all incident commanders who received feedback.
"""
plugin = plugin_service.get_active_instance(
db_session=db_session, project_id=project_id, plugin_type="email"
)
if not plugin:
log.warning("Incident feedback daily report not sent. Email plugin is not enabled.")
return
items = []
for piece in feedback:
participant = piece.participant.individual.name if piece.participant else "Anonymous"
items.append(
{
"name": piece.incident.name,
"title": piece.incident.title,
"rating": piece.rating,
"feedback": piece.feedback,
"participant": participant,
"created_at": piece.created_at,
}
)
name = subject = notification_text = "Incident Feedback Daily Report"
contact_fullname = contact_weblink = DISPATCH_HELP_EMAIL
plugin.instance.send(
commander_email,
notification_text,
INCIDENT_FEEDBACK_DAILY_REPORT,
MessageType.incident_feedback_daily_report,
name=name,
subject=subject,
cc=INCIDENT_RESPONSE_TEAM_EMAIL,
contact_fullname=contact_fullname,
contact_weblink=contact_weblink,
items=items,
)
log.debug(f"Incident feedback daily report sent to {commander_email}.")
|
[
"dispatch.plugin.service.get_active_instance",
"logging.getLogger"
] |
[((354, 381), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (371, 381), False, 'import logging\n'), ((645, 751), 'dispatch.plugin.service.get_active_instance', 'plugin_service.get_active_instance', ([], {'db_session': 'db_session', 'project_id': 'project_id', 'plugin_type': '"""email"""'}), "(db_session=db_session, project_id=\n project_id, plugin_type='email')\n", (679, 751), True, 'from dispatch.plugin import service as plugin_service\n')]
|
# Insert Location Types and Group Roles into DB
from django.db import migrations
class Migration(migrations.Migration):
# Migration script to insert default metadata dependencies for Projects, Datasets, Files and Folders
dependencies = [
('api', '0018_add_hubzero_locationtype'),
]
operations = [
# Metadata
migrations.RunSQL("UPDATE rdm_research_groups set name = 'Radiam' where id = 'cb03b55d-873f-4a0e-8208-07f960fa5032';"),
]
|
[
"django.db.migrations.RunSQL"
] |
[((354, 482), 'django.db.migrations.RunSQL', 'migrations.RunSQL', (['"""UPDATE rdm_research_groups set name = \'Radiam\' where id = \'cb03b55d-873f-4a0e-8208-07f960fa5032\';"""'], {}), '(\n "UPDATE rdm_research_groups set name = \'Radiam\' where id = \'cb03b55d-873f-4a0e-8208-07f960fa5032\';"\n )\n', (371, 482), False, 'from django.db import migrations\n')]
|
# --------------
# import the libraries
import numpy as np
import pandas as pd
import seaborn as sns
from sklearn.model_selection import train_test_split
import warnings
warnings.filterwarnings('ignore')
# Code starts here
df = pd.read_csv(path)
print(df.head())
X = df.drop('insuranceclaim',axis=1)
y = df['insuranceclaim']
X_train,X_test,y_train,y_test = train_test_split(X,y,test_size=0.2,random_state=6)
# Code ends here
# --------------
import matplotlib.pyplot as plt
# Code starts here
plt.boxplot(X_train['bmi'])
q_value = X_train['bmi'].quantile(0.95)
print(y_train.value_counts())
# Code ends here
# --------------
# Code starts here
import seaborn as sns
relation = X_train.corr()
print(relation)
sns.pairplot(X_train)
# Code ends here
# --------------
import seaborn as sns
import matplotlib.pyplot as plt
# Code starts here
cols = ['children','sex','region','smoker']
nrows=2
ncols=2
fig, axes = plt.subplots(nrows=2, ncols=2)
for i in range(nrows):
for j in range(ncols):
col = cols[i*2 + j]
sns.countplot(x=X_train[col], hue=y_train, ax=axes[i,j])
# Code ends here
# --------------
from sklearn.model_selection import GridSearchCV, RandomizedSearchCV
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
# parameters for grid search
parameters = {'C':[0.1,0.5,1,5]}
# Code starts here
lr = LogisticRegression()
grid = GridSearchCV(estimator=lr,param_grid=parameters)
grid.fit(X_train,y_train)
y_pred = grid.predict(X_test)
accuracy = accuracy_score(y_test,y_pred)
print(accuracy)
# Code ends here
# --------------
from sklearn.metrics import roc_auc_score
from sklearn import metrics
# Code starts here
score = roc_auc_score(y_test,y_pred)
y_test = y_test.as_matrix()
y_pred_proba = grid.predict_proba(X_test)
y_pred_proba = y_pred_proba[:,1]
fpr,tpr,thresholds = metrics.roc_curve(y_test,y_pred_proba)
roc_auc = roc_auc_score(y_test,y_pred_proba)
plt.plot(fpr,tpr)
plt.title("Logistic model, auc="+str(roc_auc))
# Code ends here
|
[
"sklearn.model_selection.GridSearchCV",
"sklearn.metrics.roc_curve",
"warnings.filterwarnings",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"matplotlib.pyplot.boxplot",
"sklearn.metrics.accuracy_score",
"matplotlib.pyplot.plot",
"sklearn.metrics.roc_auc_score",
"sklearn.linear_model.LogisticRegression",
"seaborn.countplot",
"seaborn.pairplot",
"matplotlib.pyplot.subplots"
] |
[((170, 203), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (193, 203), False, 'import warnings\n'), ((230, 247), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (241, 247), True, 'import pandas as pd\n'), ((361, 414), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.2)', 'random_state': '(6)'}), '(X, y, test_size=0.2, random_state=6)\n', (377, 414), False, 'from sklearn.model_selection import train_test_split\n'), ((504, 531), 'matplotlib.pyplot.boxplot', 'plt.boxplot', (["X_train['bmi']"], {}), "(X_train['bmi'])\n", (515, 531), True, 'import matplotlib.pyplot as plt\n'), ((727, 748), 'seaborn.pairplot', 'sns.pairplot', (['X_train'], {}), '(X_train)\n', (739, 748), True, 'import seaborn as sns\n'), ((933, 963), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(2)', 'ncols': '(2)'}), '(nrows=2, ncols=2)\n', (945, 963), True, 'import matplotlib.pyplot as plt\n'), ((1398, 1418), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (1416, 1418), False, 'from sklearn.linear_model import LogisticRegression\n'), ((1426, 1475), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', ([], {'estimator': 'lr', 'param_grid': 'parameters'}), '(estimator=lr, param_grid=parameters)\n', (1438, 1475), False, 'from sklearn.model_selection import GridSearchCV, RandomizedSearchCV\n'), ((1545, 1575), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (1559, 1575), False, 'from sklearn.metrics import accuracy_score\n'), ((1728, 1757), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (1741, 1757), False, 'from sklearn.metrics import roc_auc_score\n'), ((1885, 1924), 'sklearn.metrics.roc_curve', 'metrics.roc_curve', (['y_test', 'y_pred_proba'], {}), '(y_test, y_pred_proba)\n', (1902, 1924), False, 'from sklearn import metrics\n'), ((1935, 1970), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['y_test', 'y_pred_proba'], {}), '(y_test, y_pred_proba)\n', (1948, 1970), False, 'from sklearn.metrics import roc_auc_score\n'), ((1971, 1989), 'matplotlib.pyplot.plot', 'plt.plot', (['fpr', 'tpr'], {}), '(fpr, tpr)\n', (1979, 1989), True, 'import matplotlib.pyplot as plt\n'), ((1051, 1108), 'seaborn.countplot', 'sns.countplot', ([], {'x': 'X_train[col]', 'hue': 'y_train', 'ax': 'axes[i, j]'}), '(x=X_train[col], hue=y_train, ax=axes[i, j])\n', (1064, 1108), True, 'import seaborn as sns\n')]
|
def num_beats_test(threshold=0.7, voltage_array=None):
"""returns number_beats
This function calculates the number of beats in each strip.
The function requires the peakutils package to determine the indexes
of every peak/beat in the voltage array. The second line in the function
calculates the number of beats in the strip.
:param threshold: threshold value for peak voltage
:param voltage_array: array of voltage values
:type threshold: float
:type voltage_array: ndarray, none
:return: number_beats
:rtype: float
"""
import peakutils
import logging
from logging import config
logging.config.fileConfig('logger_config.ini', disable_existing_loggers=False)
indexes = peakutils.indexes(voltage_array, thres=threshold)
number_beats = len(indexes)
logging.info(number_beats)
return number_beats
|
[
"logging.info",
"peakutils.indexes",
"logging.config.fileConfig"
] |
[((650, 728), 'logging.config.fileConfig', 'logging.config.fileConfig', (['"""logger_config.ini"""'], {'disable_existing_loggers': '(False)'}), "('logger_config.ini', disable_existing_loggers=False)\n", (675, 728), False, 'import logging\n'), ((744, 793), 'peakutils.indexes', 'peakutils.indexes', (['voltage_array'], {'thres': 'threshold'}), '(voltage_array, thres=threshold)\n', (761, 793), False, 'import peakutils\n'), ((830, 856), 'logging.info', 'logging.info', (['number_beats'], {}), '(number_beats)\n', (842, 856), False, 'import logging\n')]
|
from tqdm import tqdm
import json
SYMBOLS = {'(':'#bracket','&':"#amp","%": "#Percnt", "*":"#Ast", ":": "#Colon", "…": "#Period3", ",":"#Comma", "—":"#Dash","/":"#Slash",";":'#Semicolon'}
def load_mrp(mrp):
raw_mrp = json.loads(mrp)
nodes = raw_mrp["nodes"]
if len(nodes) < 3:
return False
dependent = raw_mrp["companion"][1]
dependent_dict = {}
for depend in dependent:
dependent_dict[depend["target"]] = depend
companion_list = raw_mrp["companion"][0]
companion = {}
for idx,comp in enumerate(companion_list):
try:
for key,value in comp.items():
companion[key] = value
if idx in dependent_dict:
companion[key]["dep"] = dependent_dict[idx]["label"]
else:
companion[key]["dep"] = ''
except AttributeError:
continue
single_potentials = []
gold_spans = []
for idx,node in enumerate(nodes):
single_tokens = []
if idx == 0:
continue
try:
if len(node["anchors"]) > 1:
anchors = [f'{anchor["from"]}:{anchor["to"]}' for anchor in node["anchors"]]
gold_span = node["label"]
for anchor in anchors:
if companion[anchor]["lemma"] != '.' and companion[anchor]["lemma"] != '"':
single_tokens.append(companion[anchor])
single_potentials.append(single_tokens)
gold_spans.append(gold_span)
except KeyError:
continue
return (gold_spans,single_potentials)
def single(single_tokens):
if single_tokens[0]["lemma"] in SYMBOLS:
return [SYMBOLS[single_tokens[0]["lemma"]]]
else:
return [single_tokens[0]["lemma"]]
def symbol_filter(single_tokens):
pred = []
for token in single_tokens:
if token["upos"] == "SYM" :
if token["lemma"] in SYMBOLS:
pred.append(SYMBOLS[token["lemma"]])
else:
pred.append(token["lemma"])
return pred
def pos_filter(single_tokens):
pred = []
for token in single_tokens:
if token["upos"] == "NOUN" or \
token["xpos"] == "NNP" or \
token["xpos"] == "RB" and token["upos"] != "ADV" or \
token["xpos"] == "RB" and token["dep"] == "cc" or \
token["xpos"] == "RB" and token["dep"] == "advmod" or\
token["upos"] == "VERB" and token["dep"] != "case" or \
token["upos"] == "ADJ" or \
token["upos"] == "NUM" or \
token["upos"] == "X" or \
"comp" in token["dep"] or\
token["dep"] == "obl":
pred.append(token["lemma"])
return pred
def match_twice(single_tokens):
pred = []
for token in single_tokens:
if token["upos"] == "PRON" and token["xpos"] == "PRP":
return ['#PersPron']
elif token["upos"] == "PROPN":
return [token["lemma"]]
elif token["xpos"] == "RB":
return [token["lemma"]]
elif token["upos"] == "CCONJ" or\
token["dep"] == "fixed":
pred.append(token["lemma"])
if pred:
return pred
else:
return [single_tokens[0]["lemma"]]
def dep_filter(single_tokens,pred):
for token in single_tokens:
if "dep" in token:
if token["dep"] == "fixed":
pred.append(token["lemma"])
return pred
def run(mrp):
if load_mrp(mrp):
gold_spans,single_potentials = load_mrp(mrp)
else:
return
preds_spans = []
match = 0
for single_tokens in single_potentials:
if len(single_tokens) == 1:
pred = single(single_tokens)
else:
pred = symbol_filter(single_tokens)
if not pred:
pred = pos_filter(single_tokens)
# pred = dep_filter(single_tokens,pred)
if not pred:
pred = match_twice(single_tokens)
pred = '_'.join(pred)
preds_spans.append(pred)
for idx,pred in enumerate(preds_spans):
if gold_spans[idx].lower() == pred.lower():
match += 1
# elif gold_spans[idx] == "#Dash":
# print('ok')
# exit(0)
else:
print(gold_spans[idx].lower(),pred.lower())
return (len(gold_spans),match)
if __name__ == '__main__':
mrp = '/Users/jyq/Desktop/研一/7conll/mrp/2020/split/ptg/span_anchors/ptg_train_4000.aug.mrp'
with open(mrp) as fi:
mrp_lines = [line.strip() for line in fi.readlines()]
all = 0
match = 0
a = 0
for mrp_line in tqdm(mrp_lines):
a+=1
# # print(a)
if run(mrp_line):
gold_num , match_num = run(mrp_line)
else:
continue
all += gold_num
match += match_num
#debug
# gold_num,match_num = run(mrp_lines[3981])
print(match/all)
|
[
"tqdm.tqdm",
"json.loads"
] |
[((224, 239), 'json.loads', 'json.loads', (['mrp'], {}), '(mrp)\n', (234, 239), False, 'import json\n'), ((4712, 4727), 'tqdm.tqdm', 'tqdm', (['mrp_lines'], {}), '(mrp_lines)\n', (4716, 4727), False, 'from tqdm import tqdm\n')]
|
from pathlib import Path
import base64
with (Path(__file__).parent / 'icon.png').open('rb') as f:
MAINICON = base64.b64encode(f.read())
|
[
"pathlib.Path"
] |
[((47, 61), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (51, 61), False, 'from pathlib import Path\n')]
|
# Generated by Django 3.1.6 on 2021-04-18 00:58
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('groovesweeperapp', '0002_auto_20210417_2331'),
]
operations = [
migrations.AlterModelManagers(
name='songmodel',
managers=[
],
),
]
|
[
"django.db.migrations.AlterModelManagers"
] |
[((236, 296), 'django.db.migrations.AlterModelManagers', 'migrations.AlterModelManagers', ([], {'name': '"""songmodel"""', 'managers': '[]'}), "(name='songmodel', managers=[])\n", (265, 296), False, 'from django.db import migrations\n')]
|
"""
Django settings for CrossTab project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '<KEY>'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
AUTH_USER_MODEL = 'app.ExtendedUser'
SOCIAL_AUTH_USER_MODEL = 'app.ExtendedUser'
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'app',
'social.apps.django_app.default'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
AUTHENTICATION_BACKENDS = (
'social.backends.facebook.FacebookOAuth2',
'django.contrib.auth.backends.ModelBackend'
)
TEMPLATE_CONTEXT_PROCESSORS = (
'social.apps.django_app.context_processors.backends',
'social.apps.django_app.context_processors.login_redirect',
'django.contrib.messages.context_processors.messages',
'django.contrib.auth.context_processors.auth'
)
SOCIAL_AUTH_FACEBOOK_KEY = '1534164213501831'
SOCIAL_AUTH_FACEBOOK_SECRET = '800e635231eae35da754443d5c497f2a'
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
ROOT_URLCONF = 'CrossTab.urls'
LOGIN_URL = '/login/'
WSGI_APPLICATION = 'CrossTab.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
CONTENT_TYPES = ['text/csv', 'application/octet-stream']
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s",
'datefmt': "%d/%b/%Y %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.path.join(BASE_DIR, 'logs', 'full.log'),
'formatter': 'verbose'
},
},
'loggers': {
'django': {
'handlers': ['file'],
'propagate': True,
'level': 'DEBUG',
},
'app': {
'handlers': ['file'],
'level': 'DEBUG',
},
}
}
|
[
"os.path.dirname",
"os.path.join"
] |
[((2805, 2836), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""media"""'], {}), "(BASE_DIR, 'media')\n", (2817, 2836), False, 'import os\n'), ((356, 381), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (371, 381), False, 'import os\n'), ((2298, 2334), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""db.sqlite3"""'], {}), "(BASE_DIR, 'db.sqlite3')\n", (2310, 2334), False, 'import os\n'), ((3391, 3433), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""logs"""', '"""full.log"""'], {}), "(BASE_DIR, 'logs', 'full.log')\n", (3403, 3433), False, 'import os\n')]
|
import os
from dotenv import load_dotenv
# Initialize environment variables
load_dotenv('.env')
PATH_TO_ORCA = os.environ.get('PATH_TO_ORCA')
SPREADSHEET_ID = os.environ.get('SPREADSHEET_ID')
LIST_OF_BENCHS = ['geek_bench4', 'sling_shot_extreme', 'antutu7',
'battery_test']
GOOGLE_CREDENTIAL_FILE = os.environ.get('GOOGLE_CREDENTIAL_FILE')
|
[
"dotenv.load_dotenv",
"os.environ.get"
] |
[((78, 97), 'dotenv.load_dotenv', 'load_dotenv', (['""".env"""'], {}), "('.env')\n", (89, 97), False, 'from dotenv import load_dotenv\n'), ((115, 145), 'os.environ.get', 'os.environ.get', (['"""PATH_TO_ORCA"""'], {}), "('PATH_TO_ORCA')\n", (129, 145), False, 'import os\n'), ((163, 195), 'os.environ.get', 'os.environ.get', (['"""SPREADSHEET_ID"""'], {}), "('SPREADSHEET_ID')\n", (177, 195), False, 'import os\n'), ((321, 361), 'os.environ.get', 'os.environ.get', (['"""GOOGLE_CREDENTIAL_FILE"""'], {}), "('GOOGLE_CREDENTIAL_FILE')\n", (335, 361), False, 'import os\n')]
|
#!/usr/bin/env python3
""" Launch and organize servers for the python tests. """
import logging
import os
import shutil
import subprocess
import threading
import queue
import psutil
# ### Private constants
_KIVALOO_TEST_DIR = "/tmp/kivaloo-test/"
# - this is relative to TEST_DIR
# - this may be subjected to rm -rf
_DISK_LBS = "lbs-disk"
# These filenames are relative to this directory
_BIN_LBS = "../../lbs/lbs"
_BIN_KVLDS = "../../kvlds/kvlds"
# ### Public constants
LBS_BLOCKSIZE = 512
LBS_SOCK = os.path.join(_KIVALOO_TEST_DIR, "kivaloo-lbs-sock")
KVLDS_SOCK = os.path.join(_KIVALOO_TEST_DIR, "kivaloo-kvlds-sock")
def _enqueue_output(output, queue_toadd):
""" Reads data from a file descriptor and queues it. Usually launched
in a separate thread to provide non-blocking output.
"""
for line in iter(output.readline, b''):
queue_toadd.put(line)
output.close()
class StartError(RuntimeError):
""" We failed to start a server. """
pass
class Server(object):
""" Base class for interacting with a server. """
# Constants (will be set by subclasses)
cmd = []
pidfile = None
sock = None
# Variables
proc = None
def __init__(self):
if not os.path.exists(_KIVALOO_TEST_DIR):
os.mkdir(_KIVALOO_TEST_DIR)
# Variables to support non-blocking stderr from the server
self.stderr_queue = queue.Queue()
self.stderr_thread = None
def _start(self):
""" Start the server, or find an existing server. Should be called
automatically by the subclass.
"""
# cls refers to the derived class. Concretely, this gives us:
# - one cls.proc shared between all Server_lbs objects
# - one cls.proc shared between all Server_kvlds objects
cls = type(self)
if cls.proc:
logging.info("Server %s, pid %i: exists; reusing", self.cmd[0],
self.get_pid_from_file())
return
proc_unowned = self._search_for_process()
if proc_unowned:
logging.info("Terminating old process: %s", proc_unowned)
proc_unowned.terminate()
if os.path.exists(cls.pidfile):
os.remove(cls.pidfile)
# Clean up previous files
if self.sock and os.path.exists(self.sock):
os.remove(self.sock)
# Initialize server and start gathering stderr
cls.proc = subprocess.Popen(self.cmd, stderr=subprocess.PIPE)
self.stderr_thread = threading.Thread(target=_enqueue_output,
args=(cls.proc.stderr,
self.stderr_queue))
self.stderr_thread.start()
# Check for server fail
ret = cls.proc.wait()
if ret is not 0:
msg = "Error when running:\n%s\n\texitcode: %i, stderr:\n%s" % (
" ".join(self.cmd), ret, self.get_stderr())
# We don't have a running server
cls.proc = None
raise StartError(msg)
# Get server's daemon-forked pid
logging.info("Server %s, pid %i: started", self.cmd[0],
self.get_pid_from_file())
def get_stderr(self):
""" Get stderr from the server. Does not block. """
if self.stderr_queue.qsize():
stderr = ""
while self.stderr_queue.qsize():
stderr += self.stderr_queue.get_nowait().decode()
else:
stderr = None
return stderr
@classmethod
def get_pid_from_file(cls):
""" Get the server's daemon-forked pid. """
if not os.path.exists(cls.pidfile):
return None
with open(cls.pidfile) as filep:
return int(filep.read())
@classmethod
def _search_for_process(cls):
""" Try to find an existing server process. """
# Check existing pidfile
pid = cls.get_pid_from_file()
if pid:
proc = psutil.Process(pid)
if proc.cmdline() == cls.cmd:
return proc
# Look for the process
for proc in psutil.process_iter():
if proc.cmdline() == cls.cmd:
return proc
return None
@classmethod
def shutdown(cls):
""" Shut down the server. """
# The pid of self.proc is the pre-forked server's pid, so we get the
# pid of the daemonized server.
proc_unowned = cls._search_for_process()
if proc_unowned is not None:
proc_unowned.terminate()
ret = proc_unowned.wait()
# Did the server exit correctly?
if ret is not None and ret != 0:
raise Exception("Failed to shut down properly.")
logging.info("Server %s exited", cls.cmd[0])
if os.path.exists(cls.pidfile):
os.remove(cls.pidfile)
# One way or another, the previous server is unusable.
cls.proc = None
class Server_lbs(Server):
""" Interact with an lbs server. """
# Constant for Server_lbs
disk = os.path.join(_KIVALOO_TEST_DIR, _DISK_LBS)
# Constants for Server
sock = LBS_SOCK
cmd = ("%s -s %s -d %s -b %i" % (_BIN_LBS, sock, disk,
LBS_BLOCKSIZE)).split()
pidfile = sock + ".pid"
# Variable shared between all Server_lbs objects
proc = None
def __init__(self):
super().__init__()
if not os.path.exists(self.disk):
os.mkdir(self.disk)
self._start()
def reset(self):
""" Delete the lbs data and start the server again. """
self.shutdown()
shutil.rmtree(self.disk)
os.mkdir(self.disk)
self._start()
class Server_kvlds(Server):
""" Interact with a kvlds server. """
# Constant for Server_kvlds
sock_lbs = LBS_SOCK
# Constants for Server
sock = os.path.join(_KIVALOO_TEST_DIR, KVLDS_SOCK)
cmd = ("%s -s %s -l %s" % (_BIN_KVLDS, sock, sock_lbs)).split()
pidfile = sock + ".pid"
# Variable shared between all Server_kvlds objects
proc = None
def __init__(self):
super().__init__()
self._start()
|
[
"threading.Thread",
"subprocess.Popen",
"psutil.process_iter",
"os.mkdir",
"os.remove",
"psutil.Process",
"os.path.exists",
"logging.info",
"shutil.rmtree",
"os.path.join",
"queue.Queue"
] |
[((510, 561), 'os.path.join', 'os.path.join', (['_KIVALOO_TEST_DIR', '"""kivaloo-lbs-sock"""'], {}), "(_KIVALOO_TEST_DIR, 'kivaloo-lbs-sock')\n", (522, 561), False, 'import os\n'), ((575, 628), 'os.path.join', 'os.path.join', (['_KIVALOO_TEST_DIR', '"""kivaloo-kvlds-sock"""'], {}), "(_KIVALOO_TEST_DIR, 'kivaloo-kvlds-sock')\n", (587, 628), False, 'import os\n'), ((5134, 5176), 'os.path.join', 'os.path.join', (['_KIVALOO_TEST_DIR', '_DISK_LBS'], {}), '(_KIVALOO_TEST_DIR, _DISK_LBS)\n', (5146, 5176), False, 'import os\n'), ((5949, 5992), 'os.path.join', 'os.path.join', (['_KIVALOO_TEST_DIR', 'KVLDS_SOCK'], {}), '(_KIVALOO_TEST_DIR, KVLDS_SOCK)\n', (5961, 5992), False, 'import os\n'), ((1407, 1420), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (1418, 1420), False, 'import queue\n'), ((2462, 2512), 'subprocess.Popen', 'subprocess.Popen', (['self.cmd'], {'stderr': 'subprocess.PIPE'}), '(self.cmd, stderr=subprocess.PIPE)\n', (2478, 2512), False, 'import subprocess\n'), ((2542, 2630), 'threading.Thread', 'threading.Thread', ([], {'target': '_enqueue_output', 'args': '(cls.proc.stderr, self.stderr_queue)'}), '(target=_enqueue_output, args=(cls.proc.stderr, self.\n stderr_queue))\n', (2558, 2630), False, 'import threading\n'), ((4171, 4192), 'psutil.process_iter', 'psutil.process_iter', ([], {}), '()\n', (4190, 4192), False, 'import psutil\n'), ((5708, 5732), 'shutil.rmtree', 'shutil.rmtree', (['self.disk'], {}), '(self.disk)\n', (5721, 5732), False, 'import shutil\n'), ((5741, 5760), 'os.mkdir', 'os.mkdir', (['self.disk'], {}), '(self.disk)\n', (5749, 5760), False, 'import os\n'), ((1236, 1269), 'os.path.exists', 'os.path.exists', (['_KIVALOO_TEST_DIR'], {}), '(_KIVALOO_TEST_DIR)\n', (1250, 1269), False, 'import os\n'), ((1283, 1310), 'os.mkdir', 'os.mkdir', (['_KIVALOO_TEST_DIR'], {}), '(_KIVALOO_TEST_DIR)\n', (1291, 1310), False, 'import os\n'), ((2089, 2146), 'logging.info', 'logging.info', (['"""Terminating old process: %s"""', 'proc_unowned'], {}), "('Terminating old process: %s', proc_unowned)\n", (2101, 2146), False, 'import logging\n'), ((2199, 2226), 'os.path.exists', 'os.path.exists', (['cls.pidfile'], {}), '(cls.pidfile)\n', (2213, 2226), False, 'import os\n'), ((2327, 2352), 'os.path.exists', 'os.path.exists', (['self.sock'], {}), '(self.sock)\n', (2341, 2352), False, 'import os\n'), ((2366, 2386), 'os.remove', 'os.remove', (['self.sock'], {}), '(self.sock)\n', (2375, 2386), False, 'import os\n'), ((3684, 3711), 'os.path.exists', 'os.path.exists', (['cls.pidfile'], {}), '(cls.pidfile)\n', (3698, 3711), False, 'import os\n'), ((4029, 4048), 'psutil.Process', 'psutil.Process', (['pid'], {}), '(pid)\n', (4043, 4048), False, 'import psutil\n'), ((4808, 4852), 'logging.info', 'logging.info', (['"""Server %s exited"""', 'cls.cmd[0]'], {}), "('Server %s exited', cls.cmd[0])\n", (4820, 4852), False, 'import logging\n'), ((4868, 4895), 'os.path.exists', 'os.path.exists', (['cls.pidfile'], {}), '(cls.pidfile)\n', (4882, 4895), False, 'import os\n'), ((5508, 5533), 'os.path.exists', 'os.path.exists', (['self.disk'], {}), '(self.disk)\n', (5522, 5533), False, 'import os\n'), ((5547, 5566), 'os.mkdir', 'os.mkdir', (['self.disk'], {}), '(self.disk)\n', (5555, 5566), False, 'import os\n'), ((2244, 2266), 'os.remove', 'os.remove', (['cls.pidfile'], {}), '(cls.pidfile)\n', (2253, 2266), False, 'import os\n'), ((4913, 4935), 'os.remove', 'os.remove', (['cls.pidfile'], {}), '(cls.pidfile)\n', (4922, 4935), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-06-12 11:19
from __future__ import unicode_literals
import bluebottle.utils.fields
from decimal import Decimal
from django.db import migrations, models
import django.db.models.deletion
import djmoney.models.fields
class Migration(migrations.Migration):
dependencies = [
('funding', '0008_auto_20190612_0941'),
]
operations = [
migrations.AlterModelOptions(
name='reward',
options={'ordering': ['-activity__created', 'amount'], 'verbose_name': 'Gift', 'verbose_name_plural': 'Gifts'},
),
migrations.AddField(
model_name='donation',
name='fundraiser',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='donations', to='funding.Fundraiser'),
),
migrations.AddField(
model_name='donation',
name='reward',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='donations', to='funding.Reward'),
),
migrations.AlterField(
model_name='budgetline',
name='activity',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='budgetlines', to='funding.Funding'),
),
]
|
[
"django.db.models.ForeignKey",
"django.db.migrations.AlterModelOptions"
] |
[((422, 586), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""reward"""', 'options': "{'ordering': ['-activity__created', 'amount'], 'verbose_name': 'Gift',\n 'verbose_name_plural': 'Gifts'}"}), "(name='reward', options={'ordering': [\n '-activity__created', 'amount'], 'verbose_name': 'Gift',\n 'verbose_name_plural': 'Gifts'})\n", (450, 586), False, 'from django.db import migrations, models\n'), ((727, 855), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""donations"""', 'to': '"""funding.Fundraiser"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n related_name='donations', to='funding.Fundraiser')\n", (744, 855), False, 'from django.db import migrations, models\n'), ((973, 1097), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""donations"""', 'to': '"""funding.Reward"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n related_name='donations', to='funding.Reward')\n", (990, 1097), False, 'from django.db import migrations, models\n'), ((1221, 1338), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""budgetlines"""', 'to': '"""funding.Funding"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='budgetlines', to='funding.Funding')\n", (1238, 1338), False, 'from django.db import migrations, models\n')]
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...vss_client import VssClient
from . import models
class ServiceHooksClient(VssClient):
"""ServiceHooks
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(ServiceHooksClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = None
def get_consumer_action(self, consumer_id, consumer_action_id, publisher_id=None):
"""GetConsumerAction.
:param str consumer_id:
:param str consumer_action_id:
:param str publisher_id:
:rtype: :class:`<ConsumerAction> <service-hooks.v4_0.models.ConsumerAction>`
"""
route_values = {}
if consumer_id is not None:
route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')
if consumer_action_id is not None:
route_values['consumerActionId'] = self._serialize.url('consumer_action_id', consumer_action_id, 'str')
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='c3428e90-7a69-4194-8ed8-0f153185ee0d',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ConsumerAction', response)
def list_consumer_actions(self, consumer_id, publisher_id=None):
"""ListConsumerActions.
:param str consumer_id:
:param str publisher_id:
:rtype: [ConsumerAction]
"""
route_values = {}
if consumer_id is not None:
route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='c3428e90-7a69-4194-8ed8-0f153185ee0d',
version='4.0',
route_values=route_values,
query_parameters=query_parameters,
returns_collection=True)
return self._deserialize('[ConsumerAction]', response)
def get_consumer(self, consumer_id, publisher_id=None):
"""GetConsumer.
:param str consumer_id:
:param str publisher_id:
:rtype: :class:`<Consumer> <service-hooks.v4_0.models.Consumer>`
"""
route_values = {}
if consumer_id is not None:
route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='4301c514-5f34-4f5d-a145-f0ea7b5b7d19',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Consumer', response)
def list_consumers(self, publisher_id=None):
"""ListConsumers.
:param str publisher_id:
:rtype: [Consumer]
"""
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='4301c514-5f34-4f5d-a145-f0ea7b5b7d19',
version='4.0',
query_parameters=query_parameters,
returns_collection=True)
return self._deserialize('[Consumer]', response)
def get_event_type(self, publisher_id, event_type_id):
"""GetEventType.
:param str publisher_id:
:param str event_type_id:
:rtype: :class:`<EventTypeDescriptor> <service-hooks.v4_0.models.EventTypeDescriptor>`
"""
route_values = {}
if publisher_id is not None:
route_values['publisherId'] = self._serialize.url('publisher_id', publisher_id, 'str')
if event_type_id is not None:
route_values['eventTypeId'] = self._serialize.url('event_type_id', event_type_id, 'str')
response = self._send(http_method='GET',
location_id='db4777cd-8e08-4a84-8ba3-c974ea033718',
version='4.0',
route_values=route_values)
return self._deserialize('EventTypeDescriptor', response)
def list_event_types(self, publisher_id):
"""ListEventTypes.
:param str publisher_id:
:rtype: [EventTypeDescriptor]
"""
route_values = {}
if publisher_id is not None:
route_values['publisherId'] = self._serialize.url('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='db4777cd-8e08-4a84-8ba3-c974ea033718',
version='4.0',
route_values=route_values,
returns_collection=True)
return self._deserialize('[EventTypeDescriptor]', response)
def publish_external_event(self, publisher_id, channel_id=None):
"""PublishExternalEvent.
:param str publisher_id:
:param str channel_id:
:rtype: [PublisherEvent]
"""
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
if channel_id is not None:
query_parameters['channelId'] = self._serialize.query('channel_id', channel_id, 'str')
response = self._send(http_method='POST',
location_id='e0e0a1c9-beeb-4fb7-a8c8-b18e3161a50e',
version='4.0',
query_parameters=query_parameters,
returns_collection=True)
return self._deserialize('[PublisherEvent]', response)
def get_notification(self, subscription_id, notification_id):
"""GetNotification.
:param str subscription_id:
:param int notification_id:
:rtype: :class:`<Notification> <service-hooks.v4_0.models.Notification>`
"""
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
if notification_id is not None:
route_values['notificationId'] = self._serialize.url('notification_id', notification_id, 'int')
response = self._send(http_method='GET',
location_id='0c62d343-21b0-4732-997b-017fde84dc28',
version='4.0',
route_values=route_values)
return self._deserialize('Notification', response)
def get_notifications(self, subscription_id, max_results=None, status=None, result=None):
"""GetNotifications.
:param str subscription_id:
:param int max_results:
:param str status:
:param str result:
:rtype: [Notification]
"""
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
query_parameters = {}
if max_results is not None:
query_parameters['maxResults'] = self._serialize.query('max_results', max_results, 'int')
if status is not None:
query_parameters['status'] = self._serialize.query('status', status, 'str')
if result is not None:
query_parameters['result'] = self._serialize.query('result', result, 'str')
response = self._send(http_method='GET',
location_id='0c62d343-21b0-4732-997b-017fde84dc28',
version='4.0',
route_values=route_values,
query_parameters=query_parameters,
returns_collection=True)
return self._deserialize('[Notification]', response)
def query_notifications(self, query):
"""QueryNotifications.
:param :class:`<NotificationsQuery> <service-hooks.v4_0.models.NotificationsQuery>` query:
:rtype: :class:`<NotificationsQuery> <service-hooks.v4_0.models.NotificationsQuery>`
"""
content = self._serialize.body(query, 'NotificationsQuery')
response = self._send(http_method='POST',
location_id='1a57562f-160a-4b5c-9185-905e95b39d36',
version='4.0',
content=content)
return self._deserialize('NotificationsQuery', response)
def query_input_values(self, input_values_query, publisher_id):
"""QueryInputValues.
:param :class:`<InputValuesQuery> <service-hooks.v4_0.models.InputValuesQuery>` input_values_query:
:param str publisher_id:
:rtype: :class:`<InputValuesQuery> <service-hooks.v4_0.models.InputValuesQuery>`
"""
route_values = {}
if publisher_id is not None:
route_values['publisherId'] = self._serialize.url('publisher_id', publisher_id, 'str')
content = self._serialize.body(input_values_query, 'InputValuesQuery')
response = self._send(http_method='POST',
location_id='d815d352-a566-4dc1-a3e3-fd245acf688c',
version='4.0',
route_values=route_values,
content=content)
return self._deserialize('InputValuesQuery', response)
def get_publisher(self, publisher_id):
"""GetPublisher.
:param str publisher_id:
:rtype: :class:`<Publisher> <service-hooks.v4_0.models.Publisher>`
"""
route_values = {}
if publisher_id is not None:
route_values['publisherId'] = self._serialize.url('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='1e83a210-5b53-43bc-90f0-d476a4e5d731',
version='4.0',
route_values=route_values)
return self._deserialize('Publisher', response)
def list_publishers(self):
"""ListPublishers.
:rtype: [Publisher]
"""
response = self._send(http_method='GET',
location_id='1e83a210-5b53-43bc-90f0-d476a4e5d731',
version='4.0',
returns_collection=True)
return self._deserialize('[Publisher]', response)
def query_publishers(self, query):
"""QueryPublishers.
:param :class:`<PublishersQuery> <service-hooks.v4_0.models.PublishersQuery>` query:
:rtype: :class:`<PublishersQuery> <service-hooks.v4_0.models.PublishersQuery>`
"""
content = self._serialize.body(query, 'PublishersQuery')
response = self._send(http_method='POST',
location_id='99b44a8a-65a8-4670-8f3e-e7f7842cce64',
version='4.0',
content=content)
return self._deserialize('PublishersQuery', response)
def create_subscription(self, subscription):
"""CreateSubscription.
:param :class:`<Subscription> <service-hooks.v4_0.models.Subscription>` subscription:
:rtype: :class:`<Subscription> <service-hooks.v4_0.models.Subscription>`
"""
content = self._serialize.body(subscription, 'Subscription')
response = self._send(http_method='POST',
location_id='fc50d02a-849f-41fb-8af1-0a5216103269',
version='4.0',
content=content)
return self._deserialize('Subscription', response)
def delete_subscription(self, subscription_id):
"""DeleteSubscription.
:param str subscription_id:
"""
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
self._send(http_method='DELETE',
location_id='fc50d02a-849f-41fb-8af1-0a5216103269',
version='4.0',
route_values=route_values)
def get_subscription(self, subscription_id):
"""GetSubscription.
:param str subscription_id:
:rtype: :class:`<Subscription> <service-hooks.v4_0.models.Subscription>`
"""
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
response = self._send(http_method='GET',
location_id='fc50d02a-849f-41fb-8af1-0a5216103269',
version='4.0',
route_values=route_values)
return self._deserialize('Subscription', response)
def list_subscriptions(self, publisher_id=None, event_type=None, consumer_id=None, consumer_action_id=None):
"""ListSubscriptions.
:param str publisher_id:
:param str event_type:
:param str consumer_id:
:param str consumer_action_id:
:rtype: [Subscription]
"""
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
if event_type is not None:
query_parameters['eventType'] = self._serialize.query('event_type', event_type, 'str')
if consumer_id is not None:
query_parameters['consumerId'] = self._serialize.query('consumer_id', consumer_id, 'str')
if consumer_action_id is not None:
query_parameters['consumerActionId'] = self._serialize.query('consumer_action_id', consumer_action_id, 'str')
response = self._send(http_method='GET',
location_id='fc50d02a-849f-41fb-8af1-0a5216103269',
version='4.0',
query_parameters=query_parameters,
returns_collection=True)
return self._deserialize('[Subscription]', response)
def replace_subscription(self, subscription, subscription_id=None):
"""ReplaceSubscription.
:param :class:`<Subscription> <service-hooks.v4_0.models.Subscription>` subscription:
:param str subscription_id:
:rtype: :class:`<Subscription> <service-hooks.v4_0.models.Subscription>`
"""
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
content = self._serialize.body(subscription, 'Subscription')
response = self._send(http_method='PUT',
location_id='fc50d02a-849f-41fb-8af1-0a5216103269',
version='4.0',
route_values=route_values,
content=content)
return self._deserialize('Subscription', response)
def create_subscriptions_query(self, query):
"""CreateSubscriptionsQuery.
:param :class:`<SubscriptionsQuery> <service-hooks.v4_0.models.SubscriptionsQuery>` query:
:rtype: :class:`<SubscriptionsQuery> <service-hooks.v4_0.models.SubscriptionsQuery>`
"""
content = self._serialize.body(query, 'SubscriptionsQuery')
response = self._send(http_method='POST',
location_id='c7c3c1cf-9e05-4c0d-a425-a0f922c2c6ed',
version='4.0',
content=content)
return self._deserialize('SubscriptionsQuery', response)
def create_test_notification(self, test_notification, use_real_data=None):
"""CreateTestNotification.
:param :class:`<Notification> <service-hooks.v4_0.models.Notification>` test_notification:
:param bool use_real_data:
:rtype: :class:`<Notification> <service-hooks.v4_0.models.Notification>`
"""
query_parameters = {}
if use_real_data is not None:
query_parameters['useRealData'] = self._serialize.query('use_real_data', use_real_data, 'bool')
content = self._serialize.body(test_notification, 'Notification')
response = self._send(http_method='POST',
location_id='1139462c-7e27-4524-a997-31b9b73551fe',
version='4.0',
query_parameters=query_parameters,
content=content)
return self._deserialize('Notification', response)
|
[
"msrest.Serializer",
"msrest.Deserializer"
] |
[((1054, 1079), 'msrest.Serializer', 'Serializer', (['client_models'], {}), '(client_models)\n', (1064, 1079), False, 'from msrest import Serializer, Deserializer\n'), ((1108, 1135), 'msrest.Deserializer', 'Deserializer', (['client_models'], {}), '(client_models)\n', (1120, 1135), False, 'from msrest import Serializer, Deserializer\n')]
|
from __future__ import print_function, unicode_literals
import errno
import os
import posixpath
import re
import sys
import shutil
from abc import ABCMeta, abstractmethod
try:
from urlparse import urljoin
except ImportError:
from urllib.parse import urljoin
import requests
from flask import safe_join
from .constants import (
STYLE_URLS_SOURCE, STYLE_URLS_RES, STYLE_ASSET_URLS_RE,
STYLE_ASSET_URLS_SUB_FORMAT)
from .vendor.six import add_metaclass
@add_metaclass(ABCMeta)
class ReadmeAssetManager(object):
"""
Manages the style and font assets rendered with Readme pages.
Set cache_path to None to disable caching.
"""
def __init__(self, cache_path, style_urls=None, quiet=None):
super(ReadmeAssetManager, self).__init__()
self.cache_path = cache_path
self.style_urls = list(style_urls) if style_urls else []
self.styles = []
self.quiet = quiet
def _strip_url_params(self, url):
return url.rsplit('?', 1)[0].rsplit('#', 1)[0]
def clear(self):
"""
Clears the asset cache.
"""
if self.cache_path and os.path.exists(self.cache_path):
shutil.rmtree(self.cache_path)
def cache_filename(self, url):
"""
Gets a suitable relative filename for the specified URL.
"""
# FUTURE: Use url exactly instead of flattening it here
url = posixpath.basename(url)
return self._strip_url_params(url)
@abstractmethod
def retrieve_styles(self, asset_url_path):
"""
Get style URLs from the source HTML page and specified cached asset
URL path.
"""
pass
class GitHubAssetManager(ReadmeAssetManager):
"""
Reads the styles used for rendering Readme pages.
Set cache_path to None to disable caching.
"""
def __init__(self, cache_path, style_urls=None, quiet=None):
super(GitHubAssetManager, self).__init__(cache_path, style_urls, quiet)
def _get_style_urls(self, asset_url_path):
"""
Gets the specified resource and parses all style URLs and their
assets in the form of the specified patterns.
"""
# Check cache
if self.cache_path:
cached = self._get_cached_style_urls(asset_url_path)
# Skip fetching styles if there's any already cached
if cached:
return cached
# Find style URLs
r = requests.get(STYLE_URLS_SOURCE)
if not 200 <= r.status_code < 300:
print('Warning: retrieving styles gave status code',
r.status_code, file=sys.stderr)
urls = []
for style_urls_re in STYLE_URLS_RES:
urls.extend(re.findall(style_urls_re, r.text))
if not urls:
print('Warning: no styles found - see https://github.com/joeyespo/'
'grip/issues/265', file=sys.stderr)
# Cache the styles and their assets
if self.cache_path:
is_cached = self._cache_contents(urls, asset_url_path)
if is_cached:
urls = self._get_cached_style_urls(asset_url_path)
return urls
def _get_cached_style_urls(self, asset_url_path):
"""
Gets the URLs of the cached styles.
"""
try:
cached_styles = os.listdir(self.cache_path)
except IOError as ex:
if ex.errno != errno.ENOENT and ex.errno != errno.ESRCH:
raise
return []
except OSError:
return []
return [posixpath.join(asset_url_path, style)
for style in cached_styles
if style.endswith('.css')]
def _cache_contents(self, style_urls, asset_url_path):
"""
Fetches the given URLs and caches their contents
and their assets in the given directory.
"""
files = {}
asset_urls = []
for style_url in style_urls:
if not self.quiet:
print(' * Downloading style', style_url, file=sys.stderr)
r = requests.get(style_url)
if not 200 <= r.status_code < 300:
print(' -> Warning: Style request responded with',
r.status_code, file=sys.stderr)
files = None
continue
asset_content = r.text
# Find assets and replace their base URLs with the cache directory
for url in re.findall(STYLE_ASSET_URLS_RE, asset_content):
asset_urls.append(urljoin(style_url, url))
contents = re.sub(
STYLE_ASSET_URLS_RE,
STYLE_ASSET_URLS_SUB_FORMAT.format(asset_url_path.rstrip('/')),
asset_content)
# Prepare cache
if files is not None:
filename = self.cache_filename(style_url)
files[filename] = contents.encode('utf-8')
for asset_url in asset_urls:
if not self.quiet:
print(' * Downloading asset', asset_url, file=sys.stderr)
# Retrieve binary file and show message
r = requests.get(asset_url, stream=True)
if not 200 <= r.status_code < 300:
print(' -> Warning: Asset request responded with',
r.status_code, file=sys.stderr)
files = None
continue
# Prepare cache
if files is not None:
filename = self.cache_filename(asset_url)
files[filename] = r.raw.read(decode_content=True)
# Skip caching if something went wrong to try again next time
if not files:
return False
# Cache files if all downloads were successful
cache = {}
for relname in files:
cache[safe_join(self.cache_path, relname)] = files[relname]
if not os.path.exists(self.cache_path):
os.makedirs(self.cache_path)
for filename in cache:
with open(filename, 'wb') as f:
f.write(cache[filename])
if not self.quiet:
print(
' * Cached all downloads in', self.cache_path, file=sys.stderr)
return True
def retrieve_styles(self, asset_url_path):
"""
Get style URLs from the source HTML page and specified cached
asset base URL.
"""
if not asset_url_path.endswith('/'):
asset_url_path += '/'
self.style_urls.extend(self._get_style_urls(asset_url_path))
|
[
"urllib.parse.urljoin",
"os.makedirs",
"flask.safe_join",
"os.path.exists",
"posixpath.join",
"re.findall",
"requests.get",
"posixpath.basename",
"shutil.rmtree",
"os.listdir"
] |
[((1411, 1434), 'posixpath.basename', 'posixpath.basename', (['url'], {}), '(url)\n', (1429, 1434), False, 'import posixpath\n'), ((2458, 2489), 'requests.get', 'requests.get', (['STYLE_URLS_SOURCE'], {}), '(STYLE_URLS_SOURCE)\n', (2470, 2489), False, 'import requests\n'), ((1132, 1163), 'os.path.exists', 'os.path.exists', (['self.cache_path'], {}), '(self.cache_path)\n', (1146, 1163), False, 'import os\n'), ((1177, 1207), 'shutil.rmtree', 'shutil.rmtree', (['self.cache_path'], {}), '(self.cache_path)\n', (1190, 1207), False, 'import shutil\n'), ((3343, 3370), 'os.listdir', 'os.listdir', (['self.cache_path'], {}), '(self.cache_path)\n', (3353, 3370), False, 'import os\n'), ((3576, 3613), 'posixpath.join', 'posixpath.join', (['asset_url_path', 'style'], {}), '(asset_url_path, style)\n', (3590, 3613), False, 'import posixpath\n'), ((4092, 4115), 'requests.get', 'requests.get', (['style_url'], {}), '(style_url)\n', (4104, 4115), False, 'import requests\n'), ((4475, 4521), 're.findall', 're.findall', (['STYLE_ASSET_URLS_RE', 'asset_content'], {}), '(STYLE_ASSET_URLS_RE, asset_content)\n', (4485, 4521), False, 'import re\n'), ((5151, 5187), 'requests.get', 'requests.get', (['asset_url'], {'stream': '(True)'}), '(asset_url, stream=True)\n', (5163, 5187), False, 'import requests\n'), ((5906, 5937), 'os.path.exists', 'os.path.exists', (['self.cache_path'], {}), '(self.cache_path)\n', (5920, 5937), False, 'import os\n'), ((5951, 5979), 'os.makedirs', 'os.makedirs', (['self.cache_path'], {}), '(self.cache_path)\n', (5962, 5979), False, 'import os\n'), ((2735, 2768), 're.findall', 're.findall', (['style_urls_re', 'r.text'], {}), '(style_urls_re, r.text)\n', (2745, 2768), False, 'import re\n'), ((5837, 5872), 'flask.safe_join', 'safe_join', (['self.cache_path', 'relname'], {}), '(self.cache_path, relname)\n', (5846, 5872), False, 'from flask import safe_join\n'), ((4557, 4580), 'urllib.parse.urljoin', 'urljoin', (['style_url', 'url'], {}), '(style_url, url)\n', (4564, 4580), False, 'from urllib.parse import urljoin\n')]
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from botbuilder.core import MessageFactory, TurnContext
from botbuilder.schema import ChannelAccount
from .dialog_bot import DialogBot
class RichCardsBot(DialogBot):
"""
RichCardsBot prompts a user to select a Rich Card and then returns the card
that matches the user's selection.
"""
def __init__(self, conversation_state, user_state, dialog):
super().__init__(conversation_state, user_state, dialog)
async def on_members_added_activity(
self, members_added: ChannelAccount, turn_context: TurnContext
):
for member in members_added:
if member.id != turn_context.activity.recipient.id:
reply = MessageFactory.text(
"Welcome to CardBot. "
+ "This bot will show you different types of Rich Cards. "
+ "Please type anything to get started."
)
await turn_context.send_activity(reply)
|
[
"botbuilder.core.MessageFactory.text"
] |
[((773, 924), 'botbuilder.core.MessageFactory.text', 'MessageFactory.text', (["('Welcome to CardBot. ' +\n 'This bot will show you different types of Rich Cards. ' +\n 'Please type anything to get started.')"], {}), "('Welcome to CardBot. ' +\n 'This bot will show you different types of Rich Cards. ' +\n 'Please type anything to get started.')\n", (792, 924), False, 'from botbuilder.core import MessageFactory, TurnContext\n')]
|
"""
xml - utilities for XML parsing
===============================
This module is not intended for end users. It implements the abstract classes
for all XML parsers, :py:class:`XML` and :py:class:`IndexedXML`, and some utility functions.
Dependencies
------------
This module requres :py:mod:`lxml` and :py:mod:`numpy`.
--------------------------------------------------------------------------------
"""
# Copyright 2012 <NAME>, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import socket
from traceback import format_exc
import warnings
from collections import OrderedDict, namedtuple
from itertools import islice
from lxml import etree
import numpy as np
from .auxiliary import FileReader, PyteomicsError, basestring, _file_obj, HierarchicalOffsetIndex
from .auxiliary import unitint, unitfloat, unitstr, cvstr
from .auxiliary import _keepstate_method as _keepstate
from .auxiliary import BinaryDataArrayTransformer
from .auxiliary import TaskMappingMixin, IndexedReaderMixin, IndexSavingMixin
try: # Python 2.7
from urllib2 import urlopen, URLError
except ImportError: # Python 3.x
from urllib.request import urlopen, URLError
def _local_name(element):
"""Strip namespace from the XML element's name"""
tag = element.tag
if tag and tag[0] == '{':
return tag.rpartition('}')[2]
return tag
def xsd_parser(schema_url):
"""Parse an XSD file from the specified URL into a schema dictionary
that can be used by :class:`XML` parsers to automatically cast data to
the appropriate type.
Parameters
----------
schema_url : str
The URL to retrieve the schema from
Returns
-------
dict
"""
ret = {}
if not (schema_url.startswith('http://') or
schema_url.startswith('https://') or
schema_url.startswith('file://')):
schema_url = 'file://' + schema_url
schema_file = urlopen(schema_url)
p = etree.XMLParser(remove_comments=True)
schema_tree = etree.parse(schema_file, parser=p)
types = {'ints': {'int', 'long', 'nonNegativeInteger', 'positiveInt',
'integer', 'unsignedInt'},
'floats': {'float', 'double'},
'bools': {'boolean'},
'intlists': {'listOfIntegers'},
'floatlists': {'listOfFloats'},
'charlists': {'listOfChars', 'listOfCharsOrAny'}}
for k, val in types.items():
tuples = set()
for elem in schema_tree.iter():
if _local_name(elem) == 'attribute' and elem.attrib.get(
'type', '').split(':')[-1] in val:
anc = elem.getparent()
anc_name = _local_name(anc)
while not (
(anc_name == 'complexType' and 'name' in anc.attrib) or anc_name == 'element'):
anc = anc.getparent()
anc_name = _local_name(anc)
if anc is None:
break
else:
if anc_name == 'complexType':
elnames = [x.attrib['name'] for x in
schema_tree.iter()
if x.attrib.get('type', '').split(':')[-1] == anc.attrib['name']]
else:
elnames = (anc.attrib['name'],)
for elname in elnames:
tuples.add(
(elname, elem.attrib['name']))
ret[k] = tuples
ret['lists'] = set(elem.attrib['name'] for elem in schema_tree.xpath(
'//*[local-name()="element"]') if 'name' in elem.attrib and
elem.attrib.get('maxOccurs', '1') != '1')
return ret
class XMLValueConverter(object):
# Adapted from http://stackoverflow.com/questions/2764269/parsing-an-xsduration-datatype-into-a-python-datetime-timedelta-object
_duration_parser = re.compile(
(r'(?P<sign>-?)P(?:(?P<years>\d+\.?\d*)Y)?(?:(?P<months>\d+\.?\d*)M)?(?:(?P<days>\d+\.?\d*)D)?(?:T(?:(?P<hours>\d+\.?\d*)H)?(?:(?P<minutes>\d+\.?\d*)M)?(?:(?P<seconds>\d+\.?\d*)S)?)?'))
@classmethod
def duration_str_to_float(cls, s):
# Not a duration, so pass along
if not s.startswith('P'):
try:
return unitfloat(s, 'duration')
except ValueError:
return unitstr(s, 'duration')
match = cls._duration_parser.search(s)
if match:
matchdict = match.groupdict()
hours = float(matchdict.get('hours', 0) or 0)
minutes = float(matchdict.get('minutes', 0) or 0)
seconds = float(matchdict.get('seconds', 0) or 0)
minutes += hours * 60.
minutes += (seconds / 60.)
return unitfloat(minutes, 'minute')
else:
return unitstr(s, 'duration')
@classmethod
def str_to_bool(cls, s):
if s.lower() in {'true', '1', 'y'}:
return True
if s.lower() in {'false', '0', 'n'}:
return False
raise PyteomicsError('Cannot convert string to bool: ' + s)
@classmethod
def str_to_num(cls, s, numtype):
return numtype(s) if s else None
@classmethod
def to(cls, t):
def convert_from(s):
return cls.str_to_num(s, t)
return convert_from
@classmethod
def converters(cls):
return {
'ints': cls.to(unitint), 'floats': cls.to(unitfloat), 'bools': cls.str_to_bool,
'intlists': lambda x: np.fromstring(x.replace('\n', ' '), dtype=int, sep=' '),
'floatlists': lambda x: np.fromstring(x.replace('\n', ' '), sep=' '),
'charlists': list,
'duration': cls.duration_str_to_float
}
class _XMLParam(namedtuple("XMLParam", ("name", "value", "type"))):
'''A holder for semantic parameters used in several common XML formats
Attributes
----------
name: :class:`~.cvstr`
The name of the attribute, carrying the accession and unit information
value: :class:`~.unitfloat`, :class:`~.unitint` or :class:`~.unitstr`
The value of the parameter
type: :class:`str`
The parameter's local XML tag name.
'''
__slots__ = ()
def is_empty(self):
value = self.value
return value == "" or value is None
class XML(FileReader):
"""Base class for all format-specific XML parsers. The instances can be used
as context managers and as iterators.
"""
# Configurable data
file_format = 'XML'
_root_element = None
_default_schema = {}
_read_schema = False
_default_version = 0
_default_iter_tag = None
_default_iter_path = None
_structures_to_flatten = []
_schema_location_param = 'schemaLocation'
_default_id_attr = 'id'
_huge_tree = False
_retrieve_refs_enabled = None # only some subclasses implement this
_iterative = True
# Configurable plugin logic
_converters = XMLValueConverter.converters()
_element_handlers = {}
# Must be implemented by subclasses
def _get_info_smart(self, element, **kwargs):
raise NotImplementedError
def __init__(self, source, read_schema=None, iterative=None, build_id_cache=False, **kwargs):
"""Create an XML parser object.
Parameters
----------
source : str or file
File name or file-like object corresponding to an XML file.
read_schema : bool, optional
Defines whether schema file referenced in the file header
should be used to extract information about value conversion.
Default is :py:const:`False`.
iterative : bool, optional
Defines whether an :py:class:`ElementTree` object should be
constructed and stored on the instance or if iterative parsing
should be used instead. Iterative parsing keeps the memory usage
low for large XML files. Default is :py:const:`True`.
build_id_cache : bool, optional
Defines whether a dictionary mapping IDs to XML tree elements
should be built and stored on the instance. It is used in
:py:meth:`XML.get_by_id`, e.g. when using
:py:class:`pyteomics.mzid.MzIdentML` with ``retrieve_refs=True``.
huge_tree : bool, optional
This option is passed to the `lxml` parser and defines whether
security checks for XML tree depth and node size should be disabled.
Default is :py:const:`False`.
Enable this option for trusted files to avoid XMLSyntaxError exceptions
(e.g. `XMLSyntaxError: xmlSAX2Characters: huge text node`).
"""
super(XML, self).__init__(source, mode='rb', parser_func=self.iterfind, pass_file=False,
args=(self._default_iter_path or self._default_iter_tag,), kwargs=kwargs)
if iterative is None:
iterative = self._iterative
if iterative:
self._tree = None
else:
self.build_tree()
if build_id_cache:
self.build_id_cache()
else:
self._id_dict = None
self.version_info = self._get_version_info()
if read_schema is not None:
self._read_schema = read_schema
self.schema_info = self._get_schema_info(read_schema)
self._converters_items = self._converters.items()
self._huge_tree = kwargs.get('huge_tree', self._huge_tree)
self._retrieve_refs_enabled = kwargs.get('retrieve_refs')
def __reduce_ex__(self, protocol):
return self.__class__, (
self._source_init, self._read_schema, self._tree is None,
False,
), self.__getstate__()
def __getstate__(self):
state = super(XML, self).__getstate__()
state['_huge_tree'] = self._huge_tree
state['_retrieve_refs_enabled'] = self._retrieve_refs_enabled
state['_id_dict'] = self._id_dict
return state
def __setstate__(self, state):
super(XML, self).__setstate__(state)
self._huge_tree = state['_huge_tree']
self._retrieve_refs_enabled = state['_retrieve_refs_enabled']
self._id_dict = state['_id_dict']
@_keepstate
def _get_version_info(self):
"""
Provide version information about the XML file.
Returns
-------
out : tuple
A (version, schema URL) tuple, both elements are strings or None.
"""
for _, elem in etree.iterparse(
self._source, events=('start',), remove_comments=True, huge_tree=self._huge_tree):
if _local_name(elem) == self._root_element:
return (elem.attrib.get('version'),
elem.attrib.get(('{{{}}}'.format(elem.nsmap['xsi'])
if 'xsi' in elem.nsmap else '') + self._schema_location_param))
@_keepstate
def _get_schema_info(self, read_schema=True):
"""Stores defaults for the schema, tries to retrieve the schema for
other versions. Keys are: 'floats', 'ints', 'bools', 'lists',
'intlists', 'floatlists', 'charlists'."""
if not read_schema:
return self._default_schema
version, schema = self.version_info
if version == self._default_version:
return self._default_schema
ret = {}
try:
if not schema:
schema_url = ''
raise PyteomicsError(
'Schema information not found in {}.'.format(self.name))
schema_url = schema.split()[-1]
ret = xsd_parser(schema_url)
except Exception as e:
if isinstance(e, (URLError, socket.error, socket.timeout)):
warnings.warn("Can't get the {0.file_format} schema for version "
"`{1}` from <{2}> at the moment.\n"
"Using defaults for {0._default_version}.\n"
"You can disable reading the schema by specifying "
"`read_schema=False`.".format(self, version, schema_url))
else:
warnings.warn("Unknown {0.file_format} version `{1}`.\n"
"Attempt to use schema "
"information from <{2}> failed.\n"
"Exception information:\n{3}\n"
"Falling back to defaults for {0._default_version}\n"
"NOTE: This is just a warning, probably from a badly-"
"generated XML file.\nYou will still most probably get "
"decent results.\nLook here for suppressing warnings:\n"
"http://docs.python.org/library/warnings.html#"
"temporarily-suppressing-warnings\n"
"You can also disable reading the schema by specifying "
"`read_schema=False`.\n"
"If you think this shouldn't have happened, please "
"report this to\n"
"http://github.com/levitsky/pyteomics/issues\n"
"".format(self, version, schema_url, format_exc()))
ret = self._default_schema
return ret
def _handle_param(self, element, **kwargs):
"""Unpacks cvParam and userParam tags into key-value pairs"""
types = {'int': unitint, 'float': unitfloat, 'string': unitstr}
attribs = element.attrib
unit_info = None
unit_accesssion = None
if 'unitCvRef' in attribs or 'unitName' in attribs:
unit_accesssion = attribs.get('unitAccession')
unit_name = attribs.get('unitName', unit_accesssion)
unit_info = unit_name
accession = attribs.get('accession')
value = attribs.get('value', '')
try:
if attribs.get('type') in types:
value = types[attribs['type']](value, unit_info)
else:
value = unitfloat(value, unit_info)
except ValueError:
value = unitstr(value, unit_info)
# return {cvstr(attribs['name'], accession, unit_accesssion): value}
return _XMLParam(cvstr(attribs['name'], accession, unit_accesssion), value, _local_name(element))
def _find_immediate_params(self, element, **kwargs):
return element.xpath(
'./*[local-name()="cvParam" or local-name()="userParam" or local-name()="UserParam"]')
def _insert_param(self, info_dict, param):
key = param.name
if key in info_dict:
if isinstance(info_dict[key], list):
info_dict[key].append(param.value)
else:
info_dict[key] = [info_dict[key], param.value]
else:
info_dict[key] = param.value
def _promote_empty_parameter_to_name(self, info, params):
empty_values = []
not_empty_values = []
for param in params:
if param.is_empty():
empty_values.append(param)
else:
not_empty_values.append(param)
if len(empty_values) == 1 and 'name' not in info:
info['name'] = empty_values[0].name
return info, not_empty_values
return info, params
def _get_info(self, element, **kwargs):
"""Extract info from element's attributes, possibly recursive.
<cvParam> and <userParam> elements are treated in a special way."""
try:
name = kwargs.pop('ename')
except KeyError:
name = _local_name(element)
schema_info = self.schema_info
if name in {'cvParam', 'userParam', 'UserParam'}:
return self._handle_param(element, **kwargs)
info = dict(element.attrib)
# process subelements
params = []
if kwargs.get('recursive'):
for child in element.iterchildren():
cname = _local_name(child)
if cname in {'cvParam', 'userParam', 'UserParam'}:
newinfo = self._handle_param(child, **kwargs)
params.append(newinfo)
else:
if cname not in schema_info['lists']:
info[cname] = self._get_info_smart(child, ename=cname, **kwargs)
else:
info.setdefault(cname, []).append(
self._get_info_smart(child, ename=cname, **kwargs))
else:
# handle the case where we do not want to unpack all children, but
# *Param tags are considered part of the current entity, semantically
for child in self._find_immediate_params(element, **kwargs):
params.append(self._handle_param(child, **kwargs))
handler = self._element_handlers.get(name)
if handler is not None:
info, params = handler(self, info, params)
for param in params:
self._insert_param(info, param)
# process element text
if element.text:
stext = element.text.strip()
if stext:
if info:
info[name] = stext
else:
return stext
# convert types
try:
for k, v in info.items():
for t, a in self._converters_items:
if t in schema_info and (name, k) in schema_info[t]:
info[k] = a(v)
except ValueError as e:
message = 'Error when converting types: {}'.format(e.args)
if not self._read_schema:
message += '\nTry reading the file with read_schema=True'
raise PyteomicsError(message)
# resolve refs
if kwargs.get('retrieve_refs', self._retrieve_refs_enabled):
self._retrieve_refs(info, **kwargs)
# flatten the excessive nesting
for k, v in dict(info).items():
if k in self._structures_to_flatten:
if isinstance(v, list):
for vi in v:
info.update(vi)
else:
info.update(v)
del info[k]
# another simplification
for k, v in dict(info).items():
if isinstance(v, dict) and 'name' in v and len(v) == 1:
info[k] = v['name']
if len(info) == 2 and 'name' in info and (
'value' in info or 'values' in info):
name = info.pop('name')
info = {name: info.popitem()[1]}
return info
@_keepstate
def build_tree(self):
"""Build and store the :py:class:`ElementTree` instance
for the underlying file"""
p = etree.XMLParser(remove_comments=True, huge_tree=True)
self._tree = etree.parse(self._source, parser=p)
def clear_tree(self):
"""Remove the saved :py:class:`ElementTree`."""
self._tree = None
def _retrieve_refs(self, info, **kwargs):
"""Retrieves and embeds the data for each attribute in `info` that
ends in _ref. Removes the id attribute from `info`.
This implementation is a stub and must be implemented for each specific
subclass. It is only called if :attr:`retrieve_refs` """
raise NotImplementedError(
("_retrieve_refs is not implemented for {}. "
"Do not use `retrieve_refs=True`.").format(
self.__class__.__name__))
def iterfind(self, path, **kwargs):
"""Parse the XML and yield info on elements with specified local
name or by specified "XPath".
Parameters
----------
path : str
Element name or XPath-like expression. The path is very close to
full XPath syntax, but local names should be used for all elements in the path.
They will be substituted with local-name() checks, up to the (first) predicate.
The path can be absolute or "free". Please don't specify namespaces.
**kwargs : passed to :py:meth:`self._get_info_smart`.
Returns
-------
out : iterator
"""
return Iterfind(self, path, **kwargs)
@_keepstate
def _iterfind_impl(self, path, **kwargs):
"""Parse the XML and yield info on elements with specified local
name or by specified "XPath".
Parameters
----------
path : str
Element name or XPath-like expression. The path is very close to
full XPath syntax, but local names should be used for all elements in the path.
They will be substituted with local-name() checks, up to the (first) predicate.
The path can be absolute or "free". Please don't specify namespaces.
**kwargs : passed to :py:meth:`self._get_info_smart`.
Returns
-------
out : iterator
"""
try:
path, tail = re.match(pattern_path, path).groups()
except AttributeError:
raise PyteomicsError('Invalid path: ' + path)
if path[:2] == '//' or path[0] != '/':
absolute = False
if path[:2] == '//':
path = path[2:]
if path[0] == '/' or '//' in path:
raise PyteomicsError("Too many /'s in a row.")
else:
absolute = True
path = path[1:]
nodes = path.rstrip('/').split('/')
if not nodes:
raise PyteomicsError('Invalid path: ' + path)
if not self._tree:
if tail:
if tail[0] == '[':
tail = '(.)' + tail
else:
raise PyteomicsError('Cannot parse path tail: ' + tail)
xpath = etree.XPath(tail)
localname = nodes[0]
found = False
for ev, elem in etree.iterparse(self, events=('start', 'end'), remove_comments=True, huge_tree=self._huge_tree):
name_lc = _local_name(elem)
if ev == 'start':
if name_lc == localname or localname == '*':
found += 1
else:
if name_lc == localname or localname == '*':
if (absolute and elem.getparent() is None) or not absolute:
for child in get_rel_path(elem, nodes[1:]):
if tail:
for elem in xpath(child):
info = self._get_info_smart(elem, **kwargs)
yield info
else:
info = self._get_info_smart(child, **kwargs)
yield info
if not localname == '*':
found -= 1
if not found:
elem.clear()
else:
xpath = ('/' if absolute else '//') + '/'.join(
'*[local-name()="{}"]'.format(node) if node != '*' else '*' for node in nodes ) + tail
for elem in self._tree.xpath(xpath):
info = self._get_info_smart(elem, **kwargs)
yield info
@_keepstate
def build_id_cache(self):
"""Construct a cache for each element in the document, indexed by id
attribute"""
stack = 0
id_dict = {}
for event, elem in etree.iterparse(self._source, events=('start', 'end'),
remove_comments=True, huge_tree=self._huge_tree):
if event == 'start':
if 'id' in elem.attrib:
stack += 1
else:
if 'id' in elem.attrib:
stack -= 1
id_dict[elem.attrib['id']] = elem
elif stack == 0:
elem.clear()
self._id_dict = id_dict
def clear_id_cache(self):
"""Clear the element ID cache"""
self._id_dict = {}
def _find_by_id_no_reset(self, elem_id, id_key=None):
"""
An almost exact copy of :meth:`get_by_id` with the difference that it does
not reset the file reader's position before iterative parsing.
Parameters
----------
elem_id : str
The element id to query for
Returns
-------
lxml.Element
"""
found = False
if id_key is None:
id_key = self._default_id_attr
for event, elem in etree.iterparse(
self._source, events=('start', 'end'), remove_comments=True, huge_tree=self._huge_tree):
if event == 'start':
if elem.attrib.get(id_key) == elem_id:
found = True
else:
if elem.attrib.get(id_key) == elem_id:
return elem
if not found:
elem.clear()
raise KeyError(elem_id)
@_keepstate
def get_by_id(self, elem_id, **kwargs):
"""Parse the file and return the element with `id` attribute equal
to `elem_id`. Returns :py:const:`None` if no such element is found.
Parameters
----------
elem_id : str
The value of the `id` attribute to match.
Returns
-------
out : :py:class:`dict` or :py:const:`None`
"""
if not self._id_dict:
elem = self._find_by_id_no_reset(elem_id)
else:
elem = self._id_dict[elem_id]
return self._get_info_smart(elem, **kwargs)
# XPath emulator tools
pattern_path = re.compile(r'([\w/*]*)(.*)')
def get_rel_path(element, names):
if not names:
yield element
else:
for child in element.iterchildren():
if names[0] == '*' or _local_name(child) == names[0]:
if len(names) == 1:
yield child
else:
for gchild in get_rel_path(child, names[1:]):
yield gchild
def xpath(tree, path, ns=None):
"""Return the results of XPath query with added namespaces.
Assumes the ns declaration is on the root element or absent.
Parameters
----------
tree : ElementTree
path : str
ns : str or None, optional
"""
if hasattr(tree, 'getroot'):
root = tree.getroot()
else:
root = tree
while root.getparent() is not None:
root = root.getparent()
ns = root.nsmap.get(ns)
def repl(m):
s = m.group(1)
if not ns: return s
if not s: return 'd:'
return '/d:'
new_path = re.sub(r'(\/|^)(?![\*\/])', repl, path)
n_s = ({'d': ns} if ns else None)
return tree.xpath(new_path, namespaces=n_s)
def _make_version_info(cls):
def version_info(source):
return cls(source).version_info
version_info.__doc__ = """
Provide version information about the {0.file_format} file.
.. note:: This function is provided for backward compatibility only.
It simply creates an :py:class:`{0.__name__}` instance
and returns its :py:data:`!version_info` attribute.
Parameters
----------
source : str or file
File name or file-like object.
Returns
-------
out : tuple
A (version, schema URL) tuple, both elements are strings or None.
""".format(cls)
return version_info
class ByteCountingXMLScanner(_file_obj):
"""
Carry out the construction of a byte offset index for `source` XML file
for each type of tag in :attr:`indexed_tags`.
Inheris from :py:class:`pyteomics.auxiliary._file_obj` to support the object-oriented
:py:func:`_keep_state` interface.
"""
entities = {
'quot': '"',
'amp': '&',
'apos': "'",
'lt': '<',
'gt': '>',
}
xml_entity_pattern = re.compile(r"&({});".format('|'.join(entities.keys())))
def __init__(self, source, indexed_tags, block_size=1000000):
"""
Parameters
----------
indexed_tags : iterable of bytes
The XML tags (without namespaces) to build indices for.
block_size : int, optional
The size of the each chunk or "block" of the file to hold in memory as a
partitioned string at any given time. Defaults to `1000000`.
"""
super(ByteCountingXMLScanner, self).__init__(source, 'rb')
self.indexed_tags = ensure_bytes(indexed_tags)
self.block_size = block_size
def _chunk_iterator(self):
"""
Read a file in large blocks and chunk up each block into parts
resembling XML tags, yielding each chunk.
Assumes the file is opened in binary mode.
"""
f = self.file
read_size = self.block_size
delim = b'<'
buff = f.read(read_size)
started_with_delim = buff.startswith(delim)
parts = buff.split(delim)
tail = parts[-1]
front = parts[:-1]
i = 0
for part in front:
i += 1
if part == b"":
continue
if i == 1:
if started_with_delim:
yield delim + part
else:
yield part
else:
yield delim + part
running = True
while running:
buff = f.read(read_size)
if not buff:
running = False
buff = tail
else:
buff = tail + buff
parts = buff.split(delim)
tail = parts[-1]
front = parts[:-1]
for part in front:
yield delim + part
def _generate_offsets(self):
"""
Iterate over the lines of an XML file where each line contains exactly one tag,
tracking the byte count for each line. When a line contains a tag whose name matches
a name in :attr:`indexed_tags`, yield the byte offset, the tag type, and it's attributes.
Yields
------
offset : int
The byte offset of a matched tag's opening line
tag_type : bytes
The type of tag matched
attr_dict : dict
The attributes on the matched tag
"""
i = 0
packed = b"|".join(self.indexed_tags)
pattern = re.compile((r"^\s*<(%s)\s" % packed.decode()).encode())
attrs = re.compile(br"(\S+)=[\"']([^\"']*)[\"']")
for line in self._chunk_iterator():
match = pattern.match(line)
if match:
yield i, match.group(1), dict(attrs.findall(line))
i += len(line)
def _entity_sub_cb(self, match):
ent = match.group(1)
return self.entities[ent]
def replace_entities(self, key):
'''Replace XML entities in a string with their character representation
Uses the minimal mapping of XML entities pre-defined for all XML documents and
does not attempt to deal with external DTD defined entities. This mapping is found
in :attr:`entities`.
Parameters
----------
key : str
The string to substitute
Returns
-------
str
'''
return self.xml_entity_pattern.sub(self._entity_sub_cb, key)
@_keepstate
def build_byte_index(self, lookup_id_key_mapping=None):
"""
Builds a byte offset index for one or more types of tags.
Parameters
----------
lookup_id_key_mapping : Mapping, optional
A mapping from tag name to the attribute to look up the identity
for each entity of that type to be extracted. Defaults to 'id' for
each type of tag.
Returns
-------
defaultdict(dict)
Mapping from tag type to dict from identifier to byte offset
"""
if lookup_id_key_mapping is None:
lookup_id_key_mapping = {}
lookup_id_key_mapping = {ensure_bytes_single(key): ensure_bytes_single(value)
for key, value in lookup_id_key_mapping.items()}
for name in self.indexed_tags:
bname = ensure_bytes_single(name)
lookup_id_key_mapping.setdefault(bname, 'id')
lookup_id_key_mapping[bname] = ensure_bytes_single(lookup_id_key_mapping[bname])
indices = HierarchicalOffsetIndex()
g = self._generate_offsets()
for offset, offset_type, attrs in g:
k = attrs[lookup_id_key_mapping[offset_type]].decode('utf-8')
if '&' in k:
k = self.replace_entities(k)
indices[offset_type.decode('utf-8')][k] = offset
return indices
@classmethod
def scan(cls, source, indexed_tags):
inst = cls(source, indexed_tags)
return inst.build_byte_index()
class TagSpecificXMLByteIndex(object):
"""
Encapsulates the construction and querying of a byte offset index
for a set of XML tags.
This type mimics an immutable Mapping.
Attributes
----------
indexed_tags : iterable of bytes
The tag names to index, not including a namespace
offsets : defaultdict(OrderedDict(str, int))
The hierarchy of byte offsets organized ``{"tag_type": {"id": byte_offset}}``
indexed_tag_keys: dict(str, str)
A mapping from tag name to unique identifier attribute
Parameters
----------
index_tags: iterable of bytes
The tag names to include in the index
"""
_default_indexed_tags = []
_default_keys = {}
_scanner_class = ByteCountingXMLScanner
def __init__(self, source, indexed_tags=None, keys=None):
if keys is None:
keys = self._default_keys.copy()
if indexed_tags is None:
indexed_tags = self._default_indexed_tags
self.indexed_tags = indexed_tags
self.indexed_tag_keys = keys
self.source = source
self.offsets = HierarchicalOffsetIndex()
self.build_index()
def __getstate__(self):
state = {}
state['indexed_tags'] = self.indexed_tags
state['indexed_tag_keys'] = self.indexed_tag_keys
state['offsets'] = self.offsets
return state
def __setstate__(self, state):
self.indexed_tags = state['indexed_tags']
self.indexed_tag_keys = state['indexed_tag_keys']
self.offsets = state['offsets']
def __getitem__(self, key):
return self.offsets[key]
def build_index(self):
"""
Perform the byte offset index building for py:attr:`source`.
Returns
-------
offsets: defaultdict
The hierarchical offset, stored in offsets
"""
scanner = self._scanner_class(self.source, self.indexed_tags)
self.offsets = scanner.build_byte_index(self.indexed_tag_keys)
return self.offsets
def items(self):
return self.offsets.items()
def keys(self):
return self.offsets.keys()
def __iter__(self):
return iter(self.keys())
def __len__(self):
return sum(len(group) for key, group in self.items())
@classmethod
def build(cls, source, indexed_tags=None, keys=None):
indexer = cls(source, indexed_tags, keys)
return indexer.offsets
def ensure_bytes_single(string):
if isinstance(string, bytes):
return string
try:
return string.encode('utf-8')
except (AttributeError, UnicodeEncodeError):
raise PyteomicsError('{!r} could not be encoded'.format(string))
def ensure_bytes(strings):
if isinstance(strings, basestring):
strings = [strings]
return [ensure_bytes_single(string) for string in strings]
def _flatten_map(hierarchical_map):
all_records = []
for key, records in hierarchical_map.items():
all_records.extend(records.items())
all_records.sort(key=lambda x: x[1])
return OrderedDict(all_records)
class IndexedXML(IndexedReaderMixin, XML):
"""Subclass of :py:class:`XML` which uses an index of byte offsets for some
elements for quick random access.
"""
_indexed_tags = set()
_indexed_tag_keys = {}
_use_index = True
def __init__(self, source, read_schema=False, iterative=True, build_id_cache=False,
use_index=None, *args, **kwargs):
"""Create an indexed XML parser object.
Parameters
----------
source : str or file
File name or file-like object corresponding to an XML file.
read_schema : bool, optional
Defines whether schema file referenced in the file header
should be used to extract information about value conversion.
Default is :py:const:`False`.
iterative : bool, optional
Defines whether an :py:class:`ElementTree` object should be
constructed and stored on the instance or if iterative parsing
should be used instead. Iterative parsing keeps the memory usage
low for large XML files. Default is :py:const:`True`.
use_index : bool, optional
Defines whether an index of byte offsets needs to be created for
elements listed in `indexed_tags`.
This is useful for random access to spectra in mzML or elements of mzIdentML files,
or for iterative parsing of mzIdentML with ``retrieve_refs=True``.
If :py:const:`True`, `build_id_cache` is ignored.
If :py:const:`False`, the object acts exactly like :py:class:`XML`.
Default is :py:const:`True`.
indexed_tags : container of bytes, optional
If `use_index` is :py:const:`True`, elements listed in this parameter
will be indexed. Empty set by default.
"""
tags = kwargs.get('indexed_tags')
tag_index_keys = kwargs.get('indexed_tag_keys')
if tags is not None:
self._indexed_tags = tags
if tag_index_keys is not None:
self._indexed_tag_keys = tag_index_keys
if use_index is not None:
self._use_index = use_index
if use_index:
build_id_cache = False
if self._default_iter_path and self._default_iter_path != self._default_iter_tag:
warnings.warn('_default_iter_path differs from _default_iter_tag and index is enabled. '
'_default_iter_tag will be used in the index, mind the consequences.')
super(IndexedXML, self).__init__(source, read_schema, iterative, build_id_cache, *args, **kwargs)
self._offset_index = None
self._build_index()
@property
def default_index(self):
return self._offset_index[self._default_iter_tag]
def __reduce_ex__(self, protocol):
reconstructor, args, state = XML.__reduce_ex__(self, protocol)
args = args + (False, )
return reconstructor, args, state
def __getstate__(self):
state = super(IndexedXML, self).__getstate__()
state['_indexed_tags'] = self._indexed_tags
state['_indexed_tag_keys'] = self._indexed_tag_keys
state['_use_index'] = self._use_index
state['_offset_index'] = self._offset_index
return state
def __setstate__(self, state):
super(IndexedXML, self).__setstate__(state)
self._indexed_tags = state['_indexed_tags']
self._indexed_tag_keys = state['_indexed_tag_keys']
self._use_index = state['_use_index']
self._offset_index = state['_offset_index']
@_keepstate
def _build_index(self):
"""
Build up a `dict` of `dict` of offsets for elements. Calls :func:`find_index_list`
on :attr:`_source` and assigns the return value to :attr:`_offset_index`
"""
if not self._indexed_tags or not self._use_index:
return
self._offset_index = TagSpecificXMLByteIndex.build(
self._source, self._indexed_tags, self._indexed_tag_keys)
@_keepstate
def _find_by_id_reset(self, elem_id, id_key=None):
return self._find_by_id_no_reset(elem_id, id_key=id_key)
@_keepstate
def get_by_id(self, elem_id, id_key=None, element_type=None, **kwargs):
"""
Retrieve the requested entity by its id. If the entity
is a spectrum described in the offset index, it will be retrieved
by immediately seeking to the starting position of the entry, otherwise
falling back to parsing from the start of the file.
Parameters
----------
elem_id : str
The id value of the entity to retrieve.
id_key : str, optional
The name of the XML attribute to use for lookup.
Defaults to :py:attr:`self._default_id_attr`.
Returns
-------
dict
"""
try:
index = self._offset_index
if element_type is None:
offset, element_type = index.find_no_type(elem_id)
else:
offset = index.find(elem_id, element_type)
self._source.seek(offset)
if id_key is None:
id_key = self._indexed_tag_keys.get(element_type)
elem = self._find_by_id_no_reset(elem_id, id_key=id_key)
except (KeyError, AttributeError, etree.LxmlError):
elem = self._find_by_id_reset(elem_id, id_key=id_key)
data = self._get_info_smart(elem, **kwargs)
return data
def __contains__(self, key):
return key in self._offset_index[self._default_iter_tag]
def __len__(self):
return len(self._offset_index[self._default_iter_tag])
def iterfind(self, path, **kwargs):
"""Parse the XML and yield info on elements with specified local
name or by specified "XPath".
Parameters
----------
path : str
Element name or XPath-like expression. The path is very close to
full XPath syntax, but local names should be used for all elements in the path.
They will be substituted with local-name() checks, up to the (first) predicate.
The path can be absolute or "free". Please don't specify namespaces.
**kwargs : passed to :py:meth:`self._get_info_smart`.
Returns
-------
out : iterator
"""
if path in self._indexed_tags and self._use_index:
return IndexedIterfind(self, path, **kwargs)
return Iterfind(self, path, **kwargs)
class MultiProcessingXML(IndexedXML, TaskMappingMixin):
"""XML reader that feeds indexes to external processes
for parallel parsing and analysis of XML entries."""
def _task_map_iterator(self):
"""Returns the :class:`Iteratable` to use when dealing work items onto the input IPC
queue used by :meth:`map`
Returns
-------
:class:`Iteratable`
"""
return iter(self._offset_index[self._default_iter_tag])
class IndexSavingXML(IndexSavingMixin, IndexedXML):
"""An extension to the IndexedXML type which
adds facilities to read and write the byte offset
index externally.
"""
_index_class = HierarchicalOffsetIndex
def _read_byte_offsets(self):
"""Read the byte offset index JSON file at :attr:`_byte_offset_filename`
and populate :attr:`_offset_index`
"""
with open(self._byte_offset_filename, 'r') as f:
index = self._index_class.load(f)
if index.schema_version is None:
raise TypeError("Legacy Offset Index!")
self._offset_index = index
class ArrayConversionMixin(BinaryDataArrayTransformer):
_dtype_dict = {}
_array_keys = ['m/z array', 'intensity array']
def __init__(self, *args, **kwargs):
self._dtype_dict = {None: None}
dtype = kwargs.pop('dtype', None)
if isinstance(dtype, dict):
self._dtype_dict.update(dtype)
elif dtype:
self._dtype_dict = {k: dtype for k in self._array_keys}
self._dtype_dict[None] = dtype
super(ArrayConversionMixin, self).__init__(*args, **kwargs)
def __getstate__(self):
state = super(ArrayConversionMixin, self).__getstate__()
state['_dtype_dict'] = self._dtype_dict
return state
def __setstate__(self, state):
super(ArrayConversionMixin, self).__setstate__(state)
self._dtype_dict = state['_dtype_dict']
def _convert_array(self, k, array):
dtype = self._dtype_dict.get(k)
if dtype is not None:
return array.astype(dtype)
return array
def _finalize_record_conversion(self, array, record):
key = record.key
return self._convert_array(key, array)
class Iterfind(object):
def __init__(self, parser, tag_name, **kwargs):
self.parser = parser
self.tag_name = tag_name
self.config = kwargs
self._iterator = None
def __repr__(self):
template = "{self.__class__.__name__}({self.tag_name!r}{config})"
if self.config:
config = ", " + repr(self.config)
else:
config = ''
return template.format(self=self, config=config)
def __iter__(self):
return self
def _make_iterator(self):
return self.parser._iterfind_impl(self.tag_name, **self.config)
def __next__(self):
if self._iterator is None:
self._iterator = self._make_iterator()
return next(self._iterator)
def next(self):
return self.__next__()
@property
def is_indexed(self):
return False
def reset(self):
self._iterator = None
self.parser.reset()
def __enter__(self):
return self
def __exit__(self, *args, **kwargs):
self.reset()
def map(self, *args,**kwargs):
raise NotImplementedError("This query isn't indexed, it cannot be mapped with multiprocessing")
def _get_by_index(self, idx):
self.reset()
value = next(islice(self, idx, idx + 1))
return value
def _get_by_slice(self, slc):
self.reset()
value = list(islice(self, slc.start, slc.stop, slc.step))
return value
def __getitem__(self, i):
if isinstance(i, slice):
return self._get_by_slice(i)
return self._get_by_index(i)
class IndexedIterfind(TaskMappingMixin, Iterfind):
def __init__(self, parser, tag_name, **kwargs):
TaskMappingMixin.__init__(self, **kwargs)
Iterfind.__init__(self, parser, tag_name, **kwargs)
def _task_map_iterator(self):
"""Returns the :class:`Iteratable` to use when dealing work items onto the input IPC
queue used by :meth:`map`
Returns
-------
:class:`Iteratable`
"""
return iter(self._index)
@property
def _offset_index(self):
return self._index
@property
def _index(self):
return self.parser.index[self.tag_name]
def _get_reader_for_worker_spec(self):
return self.parser
def _yield_from_index(self):
for key in self._task_map_iterator():
yield self.parser.get_by_id(key, **self.config)
def _make_iterator(self):
if self.is_indexed:
return self._yield_from_index()
warnings.warn("Non-indexed iterator created from %r" % (self, ))
return super(IndexedIterfind, self)._make_iterator()
@property
def is_indexed(self):
if hasattr(self.parser, 'index'):
if self.parser.index is not None:
index = self.parser.index
if isinstance(index, HierarchicalOffsetIndex):
return bool(self.tag_name in index and index[self.tag_name])
return False
def _get_by_index(self, idx):
index = self._index
key = index.from_index(idx)
return self.parser.get_by_id(key)
def _get_by_slice(self, slc):
index = self._index
keys = index.from_slice(slc)
return self.parser.get_by_ids(keys)
def __len__(self):
index = self._index
return len(index)
|
[
"urllib.request.urlopen",
"lxml.etree.XPath",
"re.match",
"lxml.etree.XMLParser",
"lxml.etree.iterparse",
"collections.namedtuple",
"itertools.islice",
"traceback.format_exc",
"lxml.etree.parse",
"warnings.warn",
"re.sub",
"collections.OrderedDict",
"re.compile"
] |
[((6292, 6341), 'collections.namedtuple', 'namedtuple', (['"""XMLParam"""', "('name', 'value', 'type')"], {}), "('XMLParam', ('name', 'value', 'type'))\n", (6302, 6341), False, 'from collections import OrderedDict, namedtuple\n'), ((26140, 26168), 're.compile', 're.compile', (['"""([\\\\w/*]*)(.*)"""'], {}), "('([\\\\w/*]*)(.*)')\n", (26150, 26168), False, 'import re\n'), ((2433, 2452), 'urllib.request.urlopen', 'urlopen', (['schema_url'], {}), '(schema_url)\n', (2440, 2452), False, 'from urllib.request import urlopen, URLError\n'), ((2461, 2498), 'lxml.etree.XMLParser', 'etree.XMLParser', ([], {'remove_comments': '(True)'}), '(remove_comments=True)\n', (2476, 2498), False, 'from lxml import etree\n'), ((2517, 2551), 'lxml.etree.parse', 'etree.parse', (['schema_file'], {'parser': 'p'}), '(schema_file, parser=p)\n', (2528, 2551), False, 'from lxml import etree\n'), ((4428, 4649), 're.compile', 're.compile', (['"""(?P<sign>-?)P(?:(?P<years>\\\\d+\\\\.?\\\\d*)Y)?(?:(?P<months>\\\\d+\\\\.?\\\\d*)M)?(?:(?P<days>\\\\d+\\\\.?\\\\d*)D)?(?:T(?:(?P<hours>\\\\d+\\\\.?\\\\d*)H)?(?:(?P<minutes>\\\\d+\\\\.?\\\\d*)M)?(?:(?P<seconds>\\\\d+\\\\.?\\\\d*)S)?)?"""'], {}), "(\n '(?P<sign>-?)P(?:(?P<years>\\\\d+\\\\.?\\\\d*)Y)?(?:(?P<months>\\\\d+\\\\.?\\\\d*)M)?(?:(?P<days>\\\\d+\\\\.?\\\\d*)D)?(?:T(?:(?P<hours>\\\\d+\\\\.?\\\\d*)H)?(?:(?P<minutes>\\\\d+\\\\.?\\\\d*)M)?(?:(?P<seconds>\\\\d+\\\\.?\\\\d*)S)?)?'\n )\n", (4438, 4649), False, 'import re\n'), ((27169, 27210), 're.sub', 're.sub', (['"""(\\\\/|^)(?![\\\\*\\\\/])"""', 'repl', 'path'], {}), "('(\\\\/|^)(?![\\\\*\\\\/])', repl, path)\n", (27175, 27210), False, 'import re\n'), ((36475, 36499), 'collections.OrderedDict', 'OrderedDict', (['all_records'], {}), '(all_records)\n', (36486, 36499), False, 'from collections import OrderedDict, namedtuple\n'), ((11042, 11143), 'lxml.etree.iterparse', 'etree.iterparse', (['self._source'], {'events': "('start',)", 'remove_comments': '(True)', 'huge_tree': 'self._huge_tree'}), "(self._source, events=('start',), remove_comments=True,\n huge_tree=self._huge_tree)\n", (11057, 11143), False, 'from lxml import etree\n'), ((19216, 19269), 'lxml.etree.XMLParser', 'etree.XMLParser', ([], {'remove_comments': '(True)', 'huge_tree': '(True)'}), '(remove_comments=True, huge_tree=True)\n', (19231, 19269), False, 'from lxml import etree\n'), ((19291, 19326), 'lxml.etree.parse', 'etree.parse', (['self._source'], {'parser': 'p'}), '(self._source, parser=p)\n', (19302, 19326), False, 'from lxml import etree\n'), ((23966, 24073), 'lxml.etree.iterparse', 'etree.iterparse', (['self._source'], {'events': "('start', 'end')", 'remove_comments': '(True)', 'huge_tree': 'self._huge_tree'}), "(self._source, events=('start', 'end'), remove_comments=True,\n huge_tree=self._huge_tree)\n", (23981, 24073), False, 'from lxml import etree\n'), ((25042, 25149), 'lxml.etree.iterparse', 'etree.iterparse', (['self._source'], {'events': "('start', 'end')", 'remove_comments': '(True)', 'huge_tree': 'self._huge_tree'}), "(self._source, events=('start', 'end'), remove_comments=True,\n huge_tree=self._huge_tree)\n", (25057, 25149), False, 'from lxml import etree\n'), ((30966, 31013), 're.compile', 're.compile', (['b\'(\\\\S+)=[\\\\"\\\']([^\\\\"\\\']*)[\\\\"\\\']\''], {}), '(b\'(\\\\S+)=[\\\\"\\\']([^\\\\"\\\']*)[\\\\"\\\']\')\n', (30976, 31013), False, 'import re\n'), ((47872, 47935), 'warnings.warn', 'warnings.warn', (["('Non-indexed iterator created from %r' % (self,))"], {}), "('Non-indexed iterator created from %r' % (self,))\n", (47885, 47935), False, 'import warnings\n'), ((22357, 22456), 'lxml.etree.iterparse', 'etree.iterparse', (['self'], {'events': "('start', 'end')", 'remove_comments': '(True)', 'huge_tree': 'self._huge_tree'}), "(self, events=('start', 'end'), remove_comments=True,\n huge_tree=self._huge_tree)\n", (22372, 22456), False, 'from lxml import etree\n'), ((46576, 46602), 'itertools.islice', 'islice', (['self', 'idx', '(idx + 1)'], {}), '(self, idx, idx + 1)\n', (46582, 46602), False, 'from itertools import islice\n'), ((46702, 46745), 'itertools.islice', 'islice', (['self', 'slc.start', 'slc.stop', 'slc.step'], {}), '(self, slc.start, slc.stop, slc.step)\n', (46708, 46745), False, 'from itertools import islice\n'), ((22252, 22269), 'lxml.etree.XPath', 'etree.XPath', (['tail'], {}), '(tail)\n', (22263, 22269), False, 'from lxml import etree\n'), ((38836, 39002), 'warnings.warn', 'warnings.warn', (['"""_default_iter_path differs from _default_iter_tag and index is enabled. _default_iter_tag will be used in the index, mind the consequences."""'], {}), "(\n '_default_iter_path differs from _default_iter_tag and index is enabled. _default_iter_tag will be used in the index, mind the consequences.'\n )\n", (38849, 39002), False, 'import warnings\n'), ((21426, 21454), 're.match', 're.match', (['pattern_path', 'path'], {}), '(pattern_path, path)\n', (21434, 21454), False, 'import re\n'), ((13659, 13671), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (13669, 13671), False, 'from traceback import format_exc\n')]
|
from __future__ import print_function
import torch.nn.init as init
import argparse
import time
import os
import random
import numpy as np
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.utils.data
from torch.autograd import Variable
import matplotlib.pyplot as plt
import seaborn as sns
import warnings
warnings.filterwarnings("ignore")
from adamPre import AdamPre
from mogdata import generate_data_SingleBatch, loglikelihood
# TODO: Needed while running on server. Change the GUI accordingly.
plt.switch_backend('agg')
parser = argparse.ArgumentParser()
# Information regarding data input
parser.add_argument('--batchSize', type=int, default=512, help='input batch size')
parser.add_argument('--modes', type=int, default=8, help='total number of gaussian modes to consider')
parser.add_argument('--radius', type=int, default=1, help='radius of circle with MoG')
parser.add_argument('--sigma', type=float, default=0.01, help='variance of gaussians, default=0.01')
# Information regarding network
parser.add_argument('--ngf', type=int, default=128)
parser.add_argument('--ndf', type=int, default=128)
parser.add_argument('--nz', type=int, default=2, help='size of the latent z vector')
parser.add_argument('--ngpu', type=int, default=1, help='number of GPUs to use')
parser.add_argument('--netG', default='', help="path to netG (to continue training)")
parser.add_argument('--netD', default='', help="path to netD (to continue training)")
# Training/Optimizer information
parser.add_argument('--niter', type=int, default=50000, help='number of epochs to train for')
parser.add_argument('--lr', type=float, default=1e-3, help='learning rate, default=0.001')
parser.add_argument('--beta1', type=float, default=0.9, help='beta1 for adam. default=0.5')
parser.add_argument('--pdhgGLookAhead', action='store_true', help='enables generator lookahead')
parser.add_argument('--pdhgDLookAhead', action='store_true', help='enables discriminator lookahead')
parser.add_argument('--GLRatio', type=float, default=1.0, help='scaling factor for lr of generator')
parser.add_argument('--DLRatio', type=float, default=1.0, help='scaling factor for lr of discriminator')
# Miscellaneous information
parser.add_argument('--cuda', action='store_true', help='enables cuda')
parser.add_argument('--outf', default='.', help='folder to output images and model checkpoints')
parser.add_argument('--manualSeed', type=int, help='manual seed')
parser.add_argument('--deviceID', type=int, help='deviceID', default=0)
parser.add_argument('--verbose', action='store_true', help='displays additional information')
# Options for visualization
parser.add_argument('--viz_every', type=int, default=10000, help='plotting visualization every few iteration')
parser.add_argument('--n_batches_viz', type=int, default=10, help='number of samples used for visualization')
parser.add_argument('--markerSize', type=float, help='input batch size')
parser.add_argument('--plotRealData', action='store_true', help='saves real samples')
parser.add_argument('--plotLoss', action='store_true', help='Enables plotting of loss function')
class _netG(nn.Module):
def __init__(self,ngpu,nz,ngf):
super(_netG, self).__init__()
self.ngpu = ngpu
self.main = nn.Sequential(
# input is Z, going into a convolution
nn.Linear(nz, ngf),
nn.Tanh(),
nn.Linear(ngf, ngf),
nn.Tanh(),
nn.Linear(ngf, 2),
)
def forward(self, input):
if self.ngpu > 1 and isinstance(input.data, torch.cuda.FloatTensor):
output = nn.parallel.data_parallel(self.main, input, range(self.ngpu))
else:
output = self.main(input)
return output
class _netD(nn.Module):
def __init__(self, ngpu, ndf):
super(_netD, self).__init__()
self.ngpu = ngpu
self.main = nn.Sequential(
nn.Linear(2, ndf),
nn.Tanh(),
nn.Linear(ndf, ndf),
nn.Tanh(),
nn.Linear(ndf, 1),
nn.Sigmoid()
)
def forward(self, input):
if self.ngpu > 1 and isinstance(input.data, torch.cuda.FloatTensor):
output = nn.parallel.data_parallel(self.main, input, range(self.ngpu))
else:
output = self.main(input)
return output.view(-1, 1)
def main():
opt = parser.parse_args()
print(opt)
try:
os.makedirs(opt.outf)
except OSError:
pass
if opt.manualSeed is None:
opt.manualSeed = random.randint(1, 10000)
print("Random Seed: ", opt.manualSeed)
random.seed(opt.manualSeed)
np.random.seed(opt.manualSeed)
torch.manual_seed(opt.manualSeed)
if opt.cuda:
torch.cuda.manual_seed_all(opt.manualSeed)
torch.backends.cudnn.enabled = False
print("torch.backends.cudnn.enabled is: ", torch.backends.cudnn.enabled)
cudnn.benchmark = True
if torch.cuda.is_available():
ngpu = int(opt.ngpu)
if not opt.cuda:
print("WARNING: You have a CUDA device, so you should probably run with --cuda")
else:
if int(opt.ngpu) > 0:
print("WARNING: CUDA not available, cannot use --ngpu =", opt.ngpu)
ngpu = 0
# Initializing Generator and Discriminator Network
nz = int(opt.nz)
ngf = int(opt.ngf)
ndf = int(opt.ndf)
netG = _netG(ngpu,nz,ngf)
netG.apply(weights_init)
if opt.netG != '':
netG.load_state_dict(torch.load(opt.netG))
print(netG)
netD = _netD(ngpu,ndf)
netD.apply(weights_init)
if opt.netD != '':
netD.load_state_dict(torch.load(opt.netD))
print(netD)
criterion = nn.BCELoss()
input = torch.FloatTensor(opt.batchSize, 2)
noise = torch.FloatTensor(opt.batchSize, nz)
fixed_noise = torch.FloatTensor(opt.batchSize * opt.n_batches_viz, nz).normal_(0, 1)
label = torch.FloatTensor(opt.batchSize)
real_label = 1
fake_label = 0
if opt.cuda:
netD.cuda()
netG.cuda()
criterion.cuda()
input, label = input.cuda(), label.cuda()
noise, fixed_noise = noise.cuda(), fixed_noise.cuda()
input = Variable(input)
label = Variable(label)
noise = Variable(noise)
fixed_noise = Variable(fixed_noise)
# Flag for disabling prediction step in the first iterate
firstTime = True
# setup optimizer
optimizerD = AdamPre(netD.parameters(), lr=opt.lr/opt.DLRatio, betas=(opt.beta1, 0.999), name='optD')
optimizerG = AdamPre(netG.parameters(), lr=opt.lr/opt.GLRatio, betas=(opt.beta1, 0.999), name='optG')
fs = []
np_samples = []
np_samples_real = []
for i in range(opt.niter):
if opt.verbose:
c1 = time.clock()
############################
# (1) Update D network: maximize log(D(x)) + log(1 - D(G(z)))
###########################
# sampling input batch
real_cpu = generate_data_SingleBatch(num_mode=opt.modes, radius=opt.radius, center=(0, 0), sigma=opt.sigma,
batchSize=opt.batchSize)
batch_size = real_cpu.size(0)
input.data.resize_(real_cpu.size()).copy_(real_cpu)
label.data.resize_(batch_size).fill_(real_label)
netD.zero_grad()
output = netD(input)
errD_real = criterion(output, label)
errD_real.backward()
D_x = output.data.mean()
# Update the generator weights with prediction
# We avoid update during the first iteration
if not firstTime and opt.pdhgGLookAhead:
optimizerG.stepLookAhead()
# train with fake
noise.data.resize_(batch_size, nz)
noise.data.normal_(0, 1)
label.data.resize_(batch_size)
label.data.fill_(fake_label)
fake = netG(noise)
output = netD(fake.detach())
errD_fake = criterion(output, label)
errD_fake.backward()
D_G_z1 = output.data.mean()
errD = errD_real + errD_fake
optimizerD.step()
# restore the previous (non-predicted) weights of Generator
if not firstTime and opt.pdhgGLookAhead:
optimizerG.restoreStepLookAhead()
# Set the flag to false after the first iter
firstTime = False
############################
# (2) Update G network: maximize -log(1 - D(G(z)))
###########################
# Update discriminator weights with prediction; restore after the generator update.
if opt.pdhgDLookAhead:
optimizerD.stepLookAhead()
# Unlike DCGAN code, we use original loss for generator. Hence we fill fake labels.
label.data.fill_(fake_label)
netG.zero_grad()
fake = netG(noise)
output = netD(fake)
errG = -criterion(output, label)
errG.backward()
D_G_z2 = output.data.mean()
optimizerG.step()
# restore back discriminator weights
if opt.pdhgDLookAhead:
optimizerD.restoreStepLookAhead()
if opt.plotLoss:
f = [errD.data[0], errG.data[0]]
fs.append(f)
print('[%d/%d] Loss_D: %.4f Loss_G: %.4f D(x): %.4f D(G(z)): %.4f / %.4f'
% (i, opt.niter, errD.data[0], errG.data[0], D_x, D_G_z1, D_G_z2))
if opt.verbose:
print("itr=", i, "clock time elapsed=", time.clock() - c1)
if i % opt.viz_every == 0 or i == opt.niter - 1:
# save checkpoints
torch.save(netG.state_dict(), '{0}/netG_epoch_{1}.pth'.format(opt.outf, i))
torch.save(netD.state_dict(), '{0}/netD_epoch_{1}.pth'.format(opt.outf, i))
tmp_cpu = ((netG(fixed_noise)).data).cpu().numpy()
np_samples.append(tmp_cpu)
fig = plt.figure(figsize=(5, 5))
if opt.markerSize:
plt.scatter(tmp_cpu[:, 0], tmp_cpu[:, 1], c='g', edgecolor='none', s=opt.markerSize)
else:
plt.scatter(tmp_cpu[:, 0], tmp_cpu[:, 1], c='g', edgecolor='none')
plt.axis('off')
plt.savefig('%s/MoG_Fake_withP_%03d.pdf' % (opt.outf, i))
plt.close()
if opt.plotRealData:
real_cpu_temp = generate_data_SingleBatch(num_mode=opt.modes, radius=opt.radius, center=(0, 0), sigma=opt.sigma,
batchSize=opt.batchSize * opt.n_batches_viz)
tmp_cpu = real_cpu_temp.numpy()
np_samples_real.append(tmp_cpu)
fig = plt.figure(figsize=(5, 5))
if opt.markerSize:
plt.scatter(tmp_cpu[:, 0], tmp_cpu[:, 1], c='g', edgecolor='none', s=opt.markerSize) # green is ground truth
else:
plt.scatter(tmp_cpu[:, 0], tmp_cpu[:, 1], c='g', edgecolor='none') # green is ground truth
plt.axis('off')
plt.savefig('%s/MoG_Real.pdf' % (opt.outf))
plt.close()
# Final KDE plot for paper. It also plots log likelihood
xmax = 1.3
nLevels = 20
np_samples_ = np_samples[::1]
cols = len(np_samples_)
bg_color = sns.color_palette('Greens', n_colors=256)[0]
plt.figure(figsize=(2*cols, 2))
for i, samps in enumerate(np_samples_):
if i == 0:
ax = plt.subplot(1,cols,1)
else:
plt.subplot(1,cols,i+1, sharex=ax, sharey=ax)
ax2 = sns.kdeplot(samps[:, 0], samps[:, 1], shade=True, cmap='Greens', n_levels=nLevels, clip=[[-xmax,xmax]]*2)
ax2.set_facecolor(bg_color)
plt.xticks([]); plt.yticks([])
plt.title('step %d'%(i*opt.viz_every))
plt.gcf().tight_layout()
plt.savefig('{0}/all.png'.format(opt.outf))
if opt.plotLoss:
plt.figure()
fs = np.array(fs)
plt.plot(fs)
plt.legend(('Discriminator loss', 'Generator loss'))
plt.savefig('{0}/losses.pdf'.format(opt.outf))
plt.close('all')
# custom weights initialization called on netG and netD
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Linear') != -1:
init.orthogonal(m.weight)
init.constant(m.bias, 0.1)
if __name__ == '__main__':
main()
|
[
"matplotlib.pyplot.title",
"numpy.random.seed",
"argparse.ArgumentParser",
"seaborn.kdeplot",
"matplotlib.pyplot.figure",
"torch.nn.BCELoss",
"random.randint",
"matplotlib.pyplot.close",
"torch.load",
"matplotlib.pyplot.yticks",
"torch.FloatTensor",
"torch.nn.init.orthogonal",
"time.clock",
"random.seed",
"torch.nn.Linear",
"matplotlib.pyplot.xticks",
"mogdata.generate_data_SingleBatch",
"torch.nn.Tanh",
"torch.manual_seed",
"torch.autograd.Variable",
"matplotlib.pyplot.legend",
"torch.cuda.is_available",
"torch.nn.init.constant",
"matplotlib.pyplot.gcf",
"torch.nn.Sigmoid",
"matplotlib.pyplot.switch_backend",
"matplotlib.pyplot.subplot",
"os.makedirs",
"matplotlib.pyplot.plot",
"warnings.filterwarnings",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.axis",
"torch.cuda.manual_seed_all",
"numpy.array",
"seaborn.color_palette",
"matplotlib.pyplot.savefig"
] |
[((365, 398), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (388, 398), False, 'import warnings\n'), ((558, 583), 'matplotlib.pyplot.switch_backend', 'plt.switch_backend', (['"""agg"""'], {}), "('agg')\n", (576, 583), True, 'import matplotlib.pyplot as plt\n'), ((594, 619), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (617, 619), False, 'import argparse\n'), ((4657, 4684), 'random.seed', 'random.seed', (['opt.manualSeed'], {}), '(opt.manualSeed)\n', (4668, 4684), False, 'import random\n'), ((4689, 4719), 'numpy.random.seed', 'np.random.seed', (['opt.manualSeed'], {}), '(opt.manualSeed)\n', (4703, 4719), True, 'import numpy as np\n'), ((4724, 4757), 'torch.manual_seed', 'torch.manual_seed', (['opt.manualSeed'], {}), '(opt.manualSeed)\n', (4741, 4757), False, 'import torch\n'), ((4989, 5014), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (5012, 5014), False, 'import torch\n'), ((5737, 5749), 'torch.nn.BCELoss', 'nn.BCELoss', ([], {}), '()\n', (5747, 5749), True, 'import torch.nn as nn\n'), ((5763, 5798), 'torch.FloatTensor', 'torch.FloatTensor', (['opt.batchSize', '(2)'], {}), '(opt.batchSize, 2)\n', (5780, 5798), False, 'import torch\n'), ((5811, 5847), 'torch.FloatTensor', 'torch.FloatTensor', (['opt.batchSize', 'nz'], {}), '(opt.batchSize, nz)\n', (5828, 5847), False, 'import torch\n'), ((5950, 5982), 'torch.FloatTensor', 'torch.FloatTensor', (['opt.batchSize'], {}), '(opt.batchSize)\n', (5967, 5982), False, 'import torch\n'), ((6229, 6244), 'torch.autograd.Variable', 'Variable', (['input'], {}), '(input)\n', (6237, 6244), False, 'from torch.autograd import Variable\n'), ((6257, 6272), 'torch.autograd.Variable', 'Variable', (['label'], {}), '(label)\n', (6265, 6272), False, 'from torch.autograd import Variable\n'), ((6285, 6300), 'torch.autograd.Variable', 'Variable', (['noise'], {}), '(noise)\n', (6293, 6300), False, 'from torch.autograd import Variable\n'), ((6319, 6340), 'torch.autograd.Variable', 'Variable', (['fixed_noise'], {}), '(fixed_noise)\n', (6327, 6340), False, 'from torch.autograd import Variable\n'), ((11482, 11515), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(2 * cols, 2)'}), '(figsize=(2 * cols, 2))\n', (11492, 11515), True, 'import matplotlib.pyplot as plt\n'), ((12219, 12235), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (12228, 12235), True, 'import matplotlib.pyplot as plt\n'), ((4472, 4493), 'os.makedirs', 'os.makedirs', (['opt.outf'], {}), '(opt.outf)\n', (4483, 4493), False, 'import os\n'), ((4584, 4608), 'random.randint', 'random.randint', (['(1)', '(10000)'], {}), '(1, 10000)\n', (4598, 4608), False, 'import random\n'), ((4784, 4826), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['opt.manualSeed'], {}), '(opt.manualSeed)\n', (4810, 4826), False, 'import torch\n'), ((7027, 7152), 'mogdata.generate_data_SingleBatch', 'generate_data_SingleBatch', ([], {'num_mode': 'opt.modes', 'radius': 'opt.radius', 'center': '(0, 0)', 'sigma': 'opt.sigma', 'batchSize': 'opt.batchSize'}), '(num_mode=opt.modes, radius=opt.radius, center=(0,\n 0), sigma=opt.sigma, batchSize=opt.batchSize)\n', (7052, 7152), False, 'from mogdata import generate_data_SingleBatch, loglikelihood\n'), ((10582, 10727), 'mogdata.generate_data_SingleBatch', 'generate_data_SingleBatch', ([], {'num_mode': 'opt.modes', 'radius': 'opt.radius', 'center': '(0, 0)', 'sigma': 'opt.sigma', 'batchSize': '(opt.batchSize * opt.n_batches_viz)'}), '(num_mode=opt.modes, radius=opt.radius, center=(0,\n 0), sigma=opt.sigma, batchSize=opt.batchSize * opt.n_batches_viz)\n', (10607, 10727), False, 'from mogdata import generate_data_SingleBatch, loglikelihood\n'), ((10869, 10895), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(5, 5)'}), '(figsize=(5, 5))\n', (10879, 10895), True, 'import matplotlib.pyplot as plt\n'), ((11172, 11187), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (11180, 11187), True, 'import matplotlib.pyplot as plt\n'), ((11196, 11237), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('%s/MoG_Real.pdf' % opt.outf)"], {}), "('%s/MoG_Real.pdf' % opt.outf)\n", (11207, 11237), True, 'import matplotlib.pyplot as plt\n'), ((11248, 11259), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (11257, 11259), True, 'import matplotlib.pyplot as plt\n'), ((11433, 11474), 'seaborn.color_palette', 'sns.color_palette', (['"""Greens"""'], {'n_colors': '(256)'}), "('Greens', n_colors=256)\n", (11450, 11474), True, 'import seaborn as sns\n'), ((11702, 11815), 'seaborn.kdeplot', 'sns.kdeplot', (['samps[:, 0]', 'samps[:, 1]'], {'shade': '(True)', 'cmap': '"""Greens"""', 'n_levels': 'nLevels', 'clip': '([[-xmax, xmax]] * 2)'}), "(samps[:, 0], samps[:, 1], shade=True, cmap='Greens', n_levels=\n nLevels, clip=[[-xmax, xmax]] * 2)\n", (11713, 11815), True, 'import seaborn as sns\n'), ((11852, 11866), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[]'], {}), '([])\n', (11862, 11866), True, 'import matplotlib.pyplot as plt\n'), ((11868, 11882), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[]'], {}), '([])\n', (11878, 11882), True, 'import matplotlib.pyplot as plt\n'), ((11891, 11933), 'matplotlib.pyplot.title', 'plt.title', (["('step %d' % (i * opt.viz_every))"], {}), "('step %d' % (i * opt.viz_every))\n", (11900, 11933), True, 'import matplotlib.pyplot as plt\n'), ((12038, 12050), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (12048, 12050), True, 'import matplotlib.pyplot as plt\n'), ((12064, 12076), 'numpy.array', 'np.array', (['fs'], {}), '(fs)\n', (12072, 12076), True, 'import numpy as np\n'), ((12085, 12097), 'matplotlib.pyplot.plot', 'plt.plot', (['fs'], {}), '(fs)\n', (12093, 12097), True, 'import matplotlib.pyplot as plt\n'), ((12106, 12158), 'matplotlib.pyplot.legend', 'plt.legend', (["('Discriminator loss', 'Generator loss')"], {}), "(('Discriminator loss', 'Generator loss'))\n", (12116, 12158), True, 'import matplotlib.pyplot as plt\n'), ((12399, 12424), 'torch.nn.init.orthogonal', 'init.orthogonal', (['m.weight'], {}), '(m.weight)\n', (12414, 12424), True, 'import torch.nn.init as init\n'), ((12433, 12459), 'torch.nn.init.constant', 'init.constant', (['m.bias', '(0.1)'], {}), '(m.bias, 0.1)\n', (12446, 12459), True, 'import torch.nn.init as init\n'), ((3378, 3396), 'torch.nn.Linear', 'nn.Linear', (['nz', 'ngf'], {}), '(nz, ngf)\n', (3387, 3396), True, 'import torch.nn as nn\n'), ((3410, 3419), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (3417, 3419), True, 'import torch.nn as nn\n'), ((3433, 3452), 'torch.nn.Linear', 'nn.Linear', (['ngf', 'ngf'], {}), '(ngf, ngf)\n', (3442, 3452), True, 'import torch.nn as nn\n'), ((3466, 3475), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (3473, 3475), True, 'import torch.nn as nn\n'), ((3489, 3506), 'torch.nn.Linear', 'nn.Linear', (['ngf', '(2)'], {}), '(ngf, 2)\n', (3498, 3506), True, 'import torch.nn as nn\n'), ((3954, 3971), 'torch.nn.Linear', 'nn.Linear', (['(2)', 'ndf'], {}), '(2, ndf)\n', (3963, 3971), True, 'import torch.nn as nn\n'), ((3985, 3994), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (3992, 3994), True, 'import torch.nn as nn\n'), ((4008, 4027), 'torch.nn.Linear', 'nn.Linear', (['ndf', 'ndf'], {}), '(ndf, ndf)\n', (4017, 4027), True, 'import torch.nn as nn\n'), ((4041, 4050), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (4048, 4050), True, 'import torch.nn as nn\n'), ((4064, 4081), 'torch.nn.Linear', 'nn.Linear', (['ndf', '(1)'], {}), '(ndf, 1)\n', (4073, 4081), True, 'import torch.nn as nn\n'), ((4095, 4107), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (4105, 4107), True, 'import torch.nn as nn\n'), ((5535, 5555), 'torch.load', 'torch.load', (['opt.netG'], {}), '(opt.netG)\n', (5545, 5555), False, 'import torch\n'), ((5682, 5702), 'torch.load', 'torch.load', (['opt.netD'], {}), '(opt.netD)\n', (5692, 5702), False, 'import torch\n'), ((5866, 5922), 'torch.FloatTensor', 'torch.FloatTensor', (['(opt.batchSize * opt.n_batches_viz)', 'nz'], {}), '(opt.batchSize * opt.n_batches_viz, nz)\n', (5883, 5922), False, 'import torch\n'), ((6799, 6811), 'time.clock', 'time.clock', ([], {}), '()\n', (6809, 6811), False, 'import time\n'), ((10121, 10147), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(5, 5)'}), '(figsize=(5, 5))\n', (10131, 10147), True, 'import matplotlib.pyplot as plt\n'), ((10414, 10429), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (10422, 10429), True, 'import matplotlib.pyplot as plt\n'), ((10446, 10503), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('%s/MoG_Fake_withP_%03d.pdf' % (opt.outf, i))"], {}), "('%s/MoG_Fake_withP_%03d.pdf' % (opt.outf, i))\n", (10457, 10503), True, 'import matplotlib.pyplot as plt\n'), ((10520, 10531), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (10529, 10531), True, 'import matplotlib.pyplot as plt\n'), ((10935, 11024), 'matplotlib.pyplot.scatter', 'plt.scatter', (['tmp_cpu[:, 0]', 'tmp_cpu[:, 1]'], {'c': '"""g"""', 'edgecolor': '"""none"""', 's': 'opt.markerSize'}), "(tmp_cpu[:, 0], tmp_cpu[:, 1], c='g', edgecolor='none', s=opt.\n markerSize)\n", (10946, 11024), True, 'import matplotlib.pyplot as plt\n'), ((11071, 11137), 'matplotlib.pyplot.scatter', 'plt.scatter', (['tmp_cpu[:, 0]', 'tmp_cpu[:, 1]'], {'c': '"""g"""', 'edgecolor': '"""none"""'}), "(tmp_cpu[:, 0], tmp_cpu[:, 1], c='g', edgecolor='none')\n", (11082, 11137), True, 'import matplotlib.pyplot as plt\n'), ((11594, 11617), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', 'cols', '(1)'], {}), '(1, cols, 1)\n', (11605, 11617), True, 'import matplotlib.pyplot as plt\n'), ((11642, 11691), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', 'cols', '(i + 1)'], {'sharex': 'ax', 'sharey': 'ax'}), '(1, cols, i + 1, sharex=ax, sharey=ax)\n', (11653, 11691), True, 'import matplotlib.pyplot as plt\n'), ((11935, 11944), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (11942, 11944), True, 'import matplotlib.pyplot as plt\n'), ((10203, 10292), 'matplotlib.pyplot.scatter', 'plt.scatter', (['tmp_cpu[:, 0]', 'tmp_cpu[:, 1]'], {'c': '"""g"""', 'edgecolor': '"""none"""', 's': 'opt.markerSize'}), "(tmp_cpu[:, 0], tmp_cpu[:, 1], c='g', edgecolor='none', s=opt.\n markerSize)\n", (10214, 10292), True, 'import matplotlib.pyplot as plt\n'), ((10330, 10396), 'matplotlib.pyplot.scatter', 'plt.scatter', (['tmp_cpu[:, 0]', 'tmp_cpu[:, 1]'], {'c': '"""g"""', 'edgecolor': '"""none"""'}), "(tmp_cpu[:, 0], tmp_cpu[:, 1], c='g', edgecolor='none')\n", (10341, 10396), True, 'import matplotlib.pyplot as plt\n'), ((9686, 9698), 'time.clock', 'time.clock', ([], {}), '()\n', (9696, 9698), False, 'import time\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import pulumi
import pulumi.runtime
class GetOrganizationResult(object):
"""
A collection of values returned by getOrganization.
"""
def __init__(__self__, create_time=None, directory_customer_id=None, domain=None, lifecycle_state=None, name=None, id=None):
if create_time and not isinstance(create_time, basestring):
raise TypeError('Expected argument create_time to be a basestring')
__self__.create_time = create_time
"""
Timestamp when the Organization was created. A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z".
"""
if directory_customer_id and not isinstance(directory_customer_id, basestring):
raise TypeError('Expected argument directory_customer_id to be a basestring')
__self__.directory_customer_id = directory_customer_id
"""
The Google for Work customer ID of the Organization.
"""
if domain and not isinstance(domain, basestring):
raise TypeError('Expected argument domain to be a basestring')
__self__.domain = domain
if lifecycle_state and not isinstance(lifecycle_state, basestring):
raise TypeError('Expected argument lifecycle_state to be a basestring')
__self__.lifecycle_state = lifecycle_state
"""
The Organization's current lifecycle state.
"""
if name and not isinstance(name, basestring):
raise TypeError('Expected argument name to be a basestring')
__self__.name = name
"""
The resource name of the Organization in the form `organizations/{organization_id}`.
"""
if id and not isinstance(id, basestring):
raise TypeError('Expected argument id to be a basestring')
__self__.id = id
"""
id is the provider-assigned unique ID for this managed resource.
"""
def get_organization(domain=None, organization=None):
"""
Use this data source to get information about a Google Cloud Organization.
```hcl
data "google_organization" "org" {
domain = "example.com"
}
resource "google_folder" "sales" {
display_name = "Sales"
parent = "${data.google_organization.org.name}"
}
```
"""
__args__ = dict()
__args__['domain'] = domain
__args__['organization'] = organization
__ret__ = pulumi.runtime.invoke('gcp:organizations/getOrganization:getOrganization', __args__)
return GetOrganizationResult(
create_time=__ret__.get('createTime'),
directory_customer_id=__ret__.get('directoryCustomerId'),
domain=__ret__.get('domain'),
lifecycle_state=__ret__.get('lifecycleState'),
name=__ret__.get('name'),
id=__ret__.get('id'))
|
[
"pulumi.runtime.invoke"
] |
[((2633, 2721), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""gcp:organizations/getOrganization:getOrganization"""', '__args__'], {}), "('gcp:organizations/getOrganization:getOrganization',\n __args__)\n", (2654, 2721), False, 'import pulumi\n')]
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG for Google BigQuery Sensors.
"""
import os
from datetime import datetime
from airflow import models
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator,
BigQueryExecuteQueryOperator,
)
from airflow.providers.google.cloud.sensors.bigquery import (
BigQueryTableExistenceSensor, BigQueryTablePartitionExistenceSensor,
)
from airflow.utils.dates import days_ago
PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project")
DATASET_NAME = os.environ.get("GCP_BIGQUERY_DATASET_NAME", "test_sensors_dataset")
TABLE_NAME = "partitioned_table"
INSERT_DATE = datetime.now().strftime("%Y-%m-%d")
PARTITION_NAME = "{{ ds_nodash }}"
INSERT_ROWS_QUERY = \
f"INSERT {DATASET_NAME}.{TABLE_NAME} VALUES " \
"(42, '{{ ds }}')"
SCHEMA = [
{"name": "value", "type": "INTEGER", "mode": "REQUIRED"},
{"name": "ds", "type": "DATE", "mode": "NULLABLE"},
]
dag_id = "example_bigquery_sensors"
with models.DAG(
dag_id,
schedule_interval=None, # Override to match your needs
start_date=days_ago(1),
tags=["example"],
user_defined_macros={"DATASET": DATASET_NAME, "TABLE": TABLE_NAME},
default_args={"project_id": PROJECT_ID}
) as dag_with_locations:
create_dataset = BigQueryCreateEmptyDatasetOperator(
task_id="create-dataset", dataset_id=DATASET_NAME, project_id=PROJECT_ID
)
create_table = BigQueryCreateEmptyTableOperator(
task_id="create_table",
dataset_id=DATASET_NAME,
table_id=TABLE_NAME,
schema_fields=SCHEMA,
time_partitioning={
"type": "DAY",
"field": "ds",
}
)
# [START howto_sensor_bigquery_table]
check_table_exists = BigQueryTableExistenceSensor(
task_id="check_table_exists", project_id=PROJECT_ID, dataset_id=DATASET_NAME, table_id=TABLE_NAME
)
# [END howto_sensor_bigquery_table]
execute_insert_query = BigQueryExecuteQueryOperator(
task_id="execute_insert_query", sql=INSERT_ROWS_QUERY, use_legacy_sql=False
)
# [START howto_sensor_bigquery_table_partition]
check_table_partition_exists = BigQueryTablePartitionExistenceSensor(
task_id="check_table_partition_exists", project_id=PROJECT_ID, dataset_id=DATASET_NAME,
table_id=TABLE_NAME, partition_id=PARTITION_NAME
)
# [END howto_sensor_bigquery_table_partition]
delete_dataset = BigQueryDeleteDatasetOperator(
task_id="delete_dataset", dataset_id=DATASET_NAME, delete_contents=True
)
create_dataset >> create_table
create_table >> check_table_exists
create_table >> execute_insert_query
execute_insert_query >> check_table_partition_exists
check_table_exists >> delete_dataset
check_table_partition_exists >> delete_dataset
|
[
"airflow.providers.google.cloud.sensors.bigquery.BigQueryTablePartitionExistenceSensor",
"airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator",
"os.environ.get",
"airflow.utils.dates.days_ago",
"airflow.providers.google.cloud.sensors.bigquery.BigQueryTableExistenceSensor",
"airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteDatasetOperator",
"airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyTableOperator",
"airflow.providers.google.cloud.operators.bigquery.BigQueryExecuteQueryOperator",
"datetime.datetime.now"
] |
[((1310, 1361), 'os.environ.get', 'os.environ.get', (['"""GCP_PROJECT_ID"""', '"""example-project"""'], {}), "('GCP_PROJECT_ID', 'example-project')\n", (1324, 1361), False, 'import os\n'), ((1377, 1444), 'os.environ.get', 'os.environ.get', (['"""GCP_BIGQUERY_DATASET_NAME"""', '"""test_sensors_dataset"""'], {}), "('GCP_BIGQUERY_DATASET_NAME', 'test_sensors_dataset')\n", (1391, 1444), False, 'import os\n'), ((2134, 2247), 'airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator', 'BigQueryCreateEmptyDatasetOperator', ([], {'task_id': '"""create-dataset"""', 'dataset_id': 'DATASET_NAME', 'project_id': 'PROJECT_ID'}), "(task_id='create-dataset', dataset_id=\n DATASET_NAME, project_id=PROJECT_ID)\n", (2168, 2247), False, 'from airflow.providers.google.cloud.operators.bigquery import BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator, BigQueryExecuteQueryOperator\n'), ((2277, 2460), 'airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyTableOperator', 'BigQueryCreateEmptyTableOperator', ([], {'task_id': '"""create_table"""', 'dataset_id': 'DATASET_NAME', 'table_id': 'TABLE_NAME', 'schema_fields': 'SCHEMA', 'time_partitioning': "{'type': 'DAY', 'field': 'ds'}"}), "(task_id='create_table', dataset_id=\n DATASET_NAME, table_id=TABLE_NAME, schema_fields=SCHEMA,\n time_partitioning={'type': 'DAY', 'field': 'ds'})\n", (2309, 2460), False, 'from airflow.providers.google.cloud.operators.bigquery import BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator, BigQueryExecuteQueryOperator\n'), ((2600, 2732), 'airflow.providers.google.cloud.sensors.bigquery.BigQueryTableExistenceSensor', 'BigQueryTableExistenceSensor', ([], {'task_id': '"""check_table_exists"""', 'project_id': 'PROJECT_ID', 'dataset_id': 'DATASET_NAME', 'table_id': 'TABLE_NAME'}), "(task_id='check_table_exists', project_id=\n PROJECT_ID, dataset_id=DATASET_NAME, table_id=TABLE_NAME)\n", (2628, 2732), False, 'from airflow.providers.google.cloud.sensors.bigquery import BigQueryTableExistenceSensor, BigQueryTablePartitionExistenceSensor\n'), ((2810, 2920), 'airflow.providers.google.cloud.operators.bigquery.BigQueryExecuteQueryOperator', 'BigQueryExecuteQueryOperator', ([], {'task_id': '"""execute_insert_query"""', 'sql': 'INSERT_ROWS_QUERY', 'use_legacy_sql': '(False)'}), "(task_id='execute_insert_query', sql=\n INSERT_ROWS_QUERY, use_legacy_sql=False)\n", (2838, 2920), False, 'from airflow.providers.google.cloud.operators.bigquery import BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator, BigQueryExecuteQueryOperator\n'), ((3018, 3203), 'airflow.providers.google.cloud.sensors.bigquery.BigQueryTablePartitionExistenceSensor', 'BigQueryTablePartitionExistenceSensor', ([], {'task_id': '"""check_table_partition_exists"""', 'project_id': 'PROJECT_ID', 'dataset_id': 'DATASET_NAME', 'table_id': 'TABLE_NAME', 'partition_id': 'PARTITION_NAME'}), "(task_id=\n 'check_table_partition_exists', project_id=PROJECT_ID, dataset_id=\n DATASET_NAME, table_id=TABLE_NAME, partition_id=PARTITION_NAME)\n", (3055, 3203), False, 'from airflow.providers.google.cloud.sensors.bigquery import BigQueryTableExistenceSensor, BigQueryTablePartitionExistenceSensor\n'), ((3288, 3395), 'airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteDatasetOperator', 'BigQueryDeleteDatasetOperator', ([], {'task_id': '"""delete_dataset"""', 'dataset_id': 'DATASET_NAME', 'delete_contents': '(True)'}), "(task_id='delete_dataset', dataset_id=\n DATASET_NAME, delete_contents=True)\n", (3317, 3395), False, 'from airflow.providers.google.cloud.operators.bigquery import BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator, BigQueryExecuteQueryOperator\n'), ((1493, 1507), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1505, 1507), False, 'from datetime import datetime\n'), ((1937, 1948), 'airflow.utils.dates.days_ago', 'days_ago', (['(1)'], {}), '(1)\n', (1945, 1948), False, 'from airflow.utils.dates import days_ago\n')]
|
# Generated by Django 3.1a1 on 2020-07-07 07:56
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import strops.utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('references', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Parameter',
fields=[
('id', models.AutoField(help_text='Primary key for Base class.', primary_key=True, serialize=False)),
('last_modified', models.DateTimeField(auto_now=True, help_text='Date the class was last modified')),
('tag', models.CharField(blank=True, help_text='User defined tag for easy searches', max_length=200, null=True)),
('name', models.CharField(help_text='Descriptive name of the variable', max_length=256)),
('symbol', strops.utils.fields.SympyField(encoder='expression', help_text='The mathematical symbol (Sympy syntax)')),
('value', models.JSONField(help_text='Value or descriptive information.')),
('reference', models.ForeignKey(help_text='Publication specifying the parameter.', on_delete=django.db.models.deletion.CASCADE, to='references.publication')),
('user', models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'unique_together': {('name', 'reference')},
},
),
]
|
[
"django.db.migrations.swappable_dependency",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.JSONField",
"django.db.models.AutoField",
"django.db.models.DateTimeField"
] |
[((314, 371), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (345, 371), False, 'from django.db import migrations, models\n'), ((505, 601), 'django.db.models.AutoField', 'models.AutoField', ([], {'help_text': '"""Primary key for Base class."""', 'primary_key': '(True)', 'serialize': '(False)'}), "(help_text='Primary key for Base class.', primary_key=True,\n serialize=False)\n", (521, 601), False, 'from django.db import migrations, models\n'), ((634, 720), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'help_text': '"""Date the class was last modified"""'}), "(auto_now=True, help_text=\n 'Date the class was last modified')\n", (654, 720), False, 'from django.db import migrations, models\n'), ((742, 849), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'help_text': '"""User defined tag for easy searches"""', 'max_length': '(200)', 'null': '(True)'}), "(blank=True, help_text='User defined tag for easy searches',\n max_length=200, null=True)\n", (758, 849), False, 'from django.db import migrations, models\n'), ((873, 951), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Descriptive name of the variable"""', 'max_length': '(256)'}), "(help_text='Descriptive name of the variable', max_length=256)\n", (889, 951), False, 'from django.db import migrations, models\n'), ((1114, 1177), 'django.db.models.JSONField', 'models.JSONField', ([], {'help_text': '"""Value or descriptive information."""'}), "(help_text='Value or descriptive information.')\n", (1130, 1177), False, 'from django.db import migrations, models\n'), ((1210, 1356), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'help_text': '"""Publication specifying the parameter."""', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""references.publication"""'}), "(help_text='Publication specifying the parameter.',\n on_delete=django.db.models.deletion.CASCADE, to='references.publication')\n", (1227, 1356), False, 'from django.db import migrations, models\n'), ((1380, 1615), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'help_text': '"""User who updated this object. Set on save by connection to database. Anonymous if not found."""', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), "(blank=True, help_text=\n 'User who updated this object. Set on save by connection to database. Anonymous if not found.'\n , null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n", (1397, 1615), False, 'from django.db import migrations, models\n')]
|
import functools
import datetime
def validate_id(func):
@functools.wraps(func)
def wrapper(_, id):
if type(id) != str:
raise TypeError(
f"\n Expecting an id type of string but got {type(id)}\n")
result = func(_, id)
return result
return wrapper
def validate_dates(func):
@functools.wraps(func)
def wrapper(_, _from, _to):
# Check if the date format is in the order YYYY-MM_DD
date_format = '%Y-%m-%d'
try:
_from = datetime.datetime.strptime(_from, date_format)
_to = datetime.datetime.strptime(_to, date_format)
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
if _to < _from:
raise Exception(
"The start date cannot be greater than the end date")
result = func(_, _from, _to)
return result
return wrapper
def validate_date_id(func):
@functools.wraps(func)
def wrapper(_, _from, _to, customer):
if type(customer) != str:
raise TypeError(
f"\n Expecting an id type of string but got {type(customer)}\n")
# Check if the date format is in the order YYYY-MM_DD
date_format = '%Y-%m-%d'
try:
_from = datetime.datetime.strptime(_from, date_format)
_to = datetime.datetime.strptime(_to, date_format)
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
if _to < _from:
raise Exception(
"The start date cannot be greater than the end date")
result = func(_, _from, _to, customer)
return result
return wrapper
|
[
"datetime.datetime.strptime",
"functools.wraps"
] |
[((63, 84), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (78, 84), False, 'import functools\n'), ((345, 366), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (360, 366), False, 'import functools\n'), ((981, 1002), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (996, 1002), False, 'import functools\n'), ((529, 575), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['_from', 'date_format'], {}), '(_from, date_format)\n', (555, 575), False, 'import datetime\n'), ((594, 638), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['_to', 'date_format'], {}), '(_to, date_format)\n', (620, 638), False, 'import datetime\n'), ((1320, 1366), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['_from', 'date_format'], {}), '(_from, date_format)\n', (1346, 1366), False, 'import datetime\n'), ((1385, 1429), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['_to', 'date_format'], {}), '(_to, date_format)\n', (1411, 1429), False, 'import datetime\n')]
|
import os
import click
import tempfile
import shutil
import logging
import stitch_common as sc
@click.command()
@click.argument('root_dir')
def get_video_lengths(root_dir):
for root, subdirs, files in os.walk(root_dir):
logging.info('**** Processing folder: {}'.format(root))
mp4s = [fi for fi in files if fi.lower().endswith('.mp4') and not fi.startswith('._')]
file_text = ''
for vid in mp4s:
file_path = "{}/{}".format(root, vid)
logging.info('Grabbing details from {}'.format(file_path))
mp4_details = sc.get_video_details(file_path)
if len(mp4_details) == 0:
logging.error('**** bad mp4 detected: {} ****'.format(file_path))
else:
logging.info('MP4 details: {}'.format(str(mp4_details)))
logging.info('Finished folder.\n')
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO)
get_video_lengths()
|
[
"click.argument",
"logging.basicConfig",
"os.walk",
"click.command",
"logging.info",
"stitch_common.get_video_details"
] |
[((97, 112), 'click.command', 'click.command', ([], {}), '()\n', (110, 112), False, 'import click\n'), ((114, 140), 'click.argument', 'click.argument', (['"""root_dir"""'], {}), "('root_dir')\n", (128, 140), False, 'import click\n'), ((206, 223), 'os.walk', 'os.walk', (['root_dir'], {}), '(root_dir)\n', (213, 223), False, 'import os\n'), ((898, 990), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s:%(levelname)s:%(message)s"""', 'level': 'logging.INFO'}), "(format='%(asctime)s:%(levelname)s:%(message)s', level=\n logging.INFO)\n", (917, 990), False, 'import logging\n'), ((831, 865), 'logging.info', 'logging.info', (['"""Finished folder.\n"""'], {}), "('Finished folder.\\n')\n", (843, 865), False, 'import logging\n'), ((580, 611), 'stitch_common.get_video_details', 'sc.get_video_details', (['file_path'], {}), '(file_path)\n', (600, 611), True, 'import stitch_common as sc\n')]
|
# Copyright (c) 2020-2021, NVIDIA CORPORATION.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import pytest
import cudf
import cugraph
from cugraph.internals import GraphBasedDimRedCallback
from cugraph.tests import utils
from sklearn.manifold import trustworthiness
import scipy.io
from pathlib import PurePath
# Temporarily suppress warnings till networkX fixes deprecation warnings
# (Using or importing the ABCs from 'collections' instead of from
# 'collections.abc' is deprecated, and in 3.8 it will stop working) for
# python 3.7. Also, these import fa2 and import networkx need to be
# relocated in the third-party group once this gets fixed.
def cugraph_call(cu_M, max_iter, pos_list, outbound_attraction_distribution,
lin_log_mode, prevent_overlapping, edge_weight_influence,
jitter_tolerance, barnes_hut_theta, barnes_hut_optimize,
scaling_ratio, strong_gravity_mode, gravity, callback=None):
G = cugraph.Graph()
G.from_cudf_edgelist(
cu_M, source="0", destination="1", edge_attr="2", renumber=False
)
# cugraph Force Atlas 2 Call
t1 = time.time()
pos = cugraph.force_atlas2(
G,
max_iter=max_iter,
pos_list=pos_list,
outbound_attraction_distribution=outbound_attraction_distribution,
lin_log_mode=lin_log_mode,
prevent_overlapping=prevent_overlapping,
edge_weight_influence=edge_weight_influence,
jitter_tolerance=jitter_tolerance,
barnes_hut_optimize=barnes_hut_optimize,
barnes_hut_theta=barnes_hut_theta,
scaling_ratio=scaling_ratio,
strong_gravity_mode=strong_gravity_mode,
gravity=gravity,
callback=callback)
t2 = time.time() - t1
print("Cugraph Time : " + str(t2))
return pos
DATASETS = [
(PurePath(utils.RAPIDS_DATASET_ROOT_DIR)/f,)+(d,) for (f, d) in [
("karate.csv", 0.70),
("polbooks.csv", 0.75),
("dolphins.csv", 0.66),
("netscience.csv", 0.66)]
]
MAX_ITERATIONS = [500]
BARNES_HUT_OPTIMIZE = [False, True]
class TestCallback(GraphBasedDimRedCallback):
def __init__(self):
super(TestCallback, self).__init__()
self.on_preprocess_end_called_count = 0
self.on_epoch_end_called_count = 0
self.on_train_end_called_count = 0
def on_preprocess_end(self, positions):
self.on_preprocess_end_called_count += 1
def on_epoch_end(self, positions):
self.on_epoch_end_called_count += 1
def on_train_end(self, positions):
self.on_train_end_called_count += 1
@pytest.mark.parametrize('graph_file, score', DATASETS)
@pytest.mark.parametrize('max_iter', MAX_ITERATIONS)
@pytest.mark.parametrize('barnes_hut_optimize', BARNES_HUT_OPTIMIZE)
def test_force_atlas2(graph_file, score, max_iter,
barnes_hut_optimize):
cu_M = utils.read_csv_file(graph_file)
test_callback = TestCallback()
cu_pos = cugraph_call(cu_M,
max_iter=max_iter,
pos_list=None,
outbound_attraction_distribution=True,
lin_log_mode=False,
prevent_overlapping=False,
edge_weight_influence=1.0,
jitter_tolerance=1.0,
barnes_hut_optimize=False,
barnes_hut_theta=0.5,
scaling_ratio=2.0,
strong_gravity_mode=False,
gravity=1.0,
callback=test_callback)
"""
Trustworthiness score can be used for Force Atlas 2 as the algorithm
optimizes modularity. The final layout will result in
different communities being drawn out. We consider here the n x n
adjacency matrix of the graph as an embedding of the nodes in high
dimension. The results of force atlas 2 corresponds to the layout in
a 2d space. Here we check that nodes belonging to the same community
or neighbors are close to each other in the final embedding.
Thresholds are based on the best score that is achived after 500
iterations on a given graph.
"""
matrix_file = graph_file.with_suffix(".mtx")
M = scipy.io.mmread(matrix_file)
M = M.todense()
cu_trust = trustworthiness(M, cu_pos[["x", "y"]].to_pandas())
print(cu_trust, score)
assert cu_trust > score
# verify `on_preprocess_end` was only called once
assert test_callback.on_preprocess_end_called_count == 1
# verify `on_epoch_end` was called on each iteration
assert test_callback.on_epoch_end_called_count == max_iter
# verify `on_train_end` was only called once
assert test_callback.on_train_end_called_count == 1
@pytest.mark.parametrize('graph_file, score', DATASETS[:-1])
@pytest.mark.parametrize('max_iter', MAX_ITERATIONS)
@pytest.mark.parametrize('barnes_hut_optimize', BARNES_HUT_OPTIMIZE)
def test_force_atlas2_multi_column_pos_list(graph_file, score, max_iter,
barnes_hut_optimize):
cu_M = utils.read_csv_file(graph_file)
test_callback = TestCallback()
pos = cugraph_call(cu_M,
max_iter=max_iter,
pos_list=None,
outbound_attraction_distribution=True,
lin_log_mode=False,
prevent_overlapping=False,
edge_weight_influence=1.0,
jitter_tolerance=1.0,
barnes_hut_optimize=False,
barnes_hut_theta=0.5,
scaling_ratio=2.0,
strong_gravity_mode=False,
gravity=1.0,
callback=test_callback)
cu_M.rename(columns={'0': 'src_0', '1': 'dst_0'}, inplace=True)
cu_M['src_1'] = cu_M['src_0'] + 1000
cu_M['dst_1'] = cu_M['dst_0'] + 1000
G = cugraph.Graph()
G.from_cudf_edgelist(
cu_M, source=["src_0", "src_1"],
destination=["dst_0", "dst_1"],
edge_attr="2"
)
pos_list = cudf.DataFrame()
pos_list['vertex_0'] = pos['vertex']
pos_list['vertex_1'] = pos_list['vertex_0'] + 1000
pos_list['x'] = pos['x']
pos_list['y'] = pos['y']
cu_pos = cugraph.force_atlas2(
G,
max_iter=max_iter,
pos_list=pos_list,
outbound_attraction_distribution=True,
lin_log_mode=False,
prevent_overlapping=False,
edge_weight_influence=1.0,
jitter_tolerance=1.0,
barnes_hut_optimize=False,
barnes_hut_theta=0.5,
scaling_ratio=2.0,
strong_gravity_mode=False,
gravity=1.0,
callback=test_callback)
cu_pos = cu_pos.sort_values('0_vertex')
matrix_file = graph_file.with_suffix(".mtx")
M = scipy.io.mmread(matrix_file)
M = M.todense()
cu_trust = trustworthiness(M, cu_pos[["x", "y"]].to_pandas())
print(cu_trust, score)
assert cu_trust > score
|
[
"cudf.DataFrame",
"cugraph.Graph",
"cugraph.tests.utils.read_csv_file",
"time.time",
"cugraph.force_atlas2",
"pathlib.PurePath",
"pytest.mark.parametrize"
] |
[((3164, 3218), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""graph_file, score"""', 'DATASETS'], {}), "('graph_file, score', DATASETS)\n", (3187, 3218), False, 'import pytest\n'), ((3220, 3271), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""max_iter"""', 'MAX_ITERATIONS'], {}), "('max_iter', MAX_ITERATIONS)\n", (3243, 3271), False, 'import pytest\n'), ((3273, 3340), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""barnes_hut_optimize"""', 'BARNES_HUT_OPTIMIZE'], {}), "('barnes_hut_optimize', BARNES_HUT_OPTIMIZE)\n", (3296, 3340), False, 'import pytest\n'), ((5393, 5452), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""graph_file, score"""', 'DATASETS[:-1]'], {}), "('graph_file, score', DATASETS[:-1])\n", (5416, 5452), False, 'import pytest\n'), ((5454, 5505), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""max_iter"""', 'MAX_ITERATIONS'], {}), "('max_iter', MAX_ITERATIONS)\n", (5477, 5505), False, 'import pytest\n'), ((5507, 5574), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""barnes_hut_optimize"""', 'BARNES_HUT_OPTIMIZE'], {}), "('barnes_hut_optimize', BARNES_HUT_OPTIMIZE)\n", (5530, 5574), False, 'import pytest\n'), ((1478, 1493), 'cugraph.Graph', 'cugraph.Graph', ([], {}), '()\n', (1491, 1493), False, 'import cugraph\n'), ((1642, 1653), 'time.time', 'time.time', ([], {}), '()\n', (1651, 1653), False, 'import time\n'), ((1664, 2152), 'cugraph.force_atlas2', 'cugraph.force_atlas2', (['G'], {'max_iter': 'max_iter', 'pos_list': 'pos_list', 'outbound_attraction_distribution': 'outbound_attraction_distribution', 'lin_log_mode': 'lin_log_mode', 'prevent_overlapping': 'prevent_overlapping', 'edge_weight_influence': 'edge_weight_influence', 'jitter_tolerance': 'jitter_tolerance', 'barnes_hut_optimize': 'barnes_hut_optimize', 'barnes_hut_theta': 'barnes_hut_theta', 'scaling_ratio': 'scaling_ratio', 'strong_gravity_mode': 'strong_gravity_mode', 'gravity': 'gravity', 'callback': 'callback'}), '(G, max_iter=max_iter, pos_list=pos_list,\n outbound_attraction_distribution=outbound_attraction_distribution,\n lin_log_mode=lin_log_mode, prevent_overlapping=prevent_overlapping,\n edge_weight_influence=edge_weight_influence, jitter_tolerance=\n jitter_tolerance, barnes_hut_optimize=barnes_hut_optimize,\n barnes_hut_theta=barnes_hut_theta, scaling_ratio=scaling_ratio,\n strong_gravity_mode=strong_gravity_mode, gravity=gravity, callback=callback\n )\n', (1684, 2152), False, 'import cugraph\n'), ((3447, 3478), 'cugraph.tests.utils.read_csv_file', 'utils.read_csv_file', (['graph_file'], {}), '(graph_file)\n', (3466, 3478), False, 'from cugraph.tests import utils\n'), ((5725, 5756), 'cugraph.tests.utils.read_csv_file', 'utils.read_csv_file', (['graph_file'], {}), '(graph_file)\n', (5744, 5756), False, 'from cugraph.tests import utils\n'), ((6581, 6596), 'cugraph.Graph', 'cugraph.Graph', ([], {}), '()\n', (6594, 6596), False, 'import cugraph\n'), ((6748, 6764), 'cudf.DataFrame', 'cudf.DataFrame', ([], {}), '()\n', (6762, 6764), False, 'import cudf\n'), ((6933, 7278), 'cugraph.force_atlas2', 'cugraph.force_atlas2', (['G'], {'max_iter': 'max_iter', 'pos_list': 'pos_list', 'outbound_attraction_distribution': '(True)', 'lin_log_mode': '(False)', 'prevent_overlapping': '(False)', 'edge_weight_influence': '(1.0)', 'jitter_tolerance': '(1.0)', 'barnes_hut_optimize': '(False)', 'barnes_hut_theta': '(0.5)', 'scaling_ratio': '(2.0)', 'strong_gravity_mode': '(False)', 'gravity': '(1.0)', 'callback': 'test_callback'}), '(G, max_iter=max_iter, pos_list=pos_list,\n outbound_attraction_distribution=True, lin_log_mode=False,\n prevent_overlapping=False, edge_weight_influence=1.0, jitter_tolerance=\n 1.0, barnes_hut_optimize=False, barnes_hut_theta=0.5, scaling_ratio=2.0,\n strong_gravity_mode=False, gravity=1.0, callback=test_callback)\n', (6953, 7278), False, 'import cugraph\n'), ((2301, 2312), 'time.time', 'time.time', ([], {}), '()\n', (2310, 2312), False, 'import time\n'), ((2392, 2431), 'pathlib.PurePath', 'PurePath', (['utils.RAPIDS_DATASET_ROOT_DIR'], {}), '(utils.RAPIDS_DATASET_ROOT_DIR)\n', (2400, 2431), False, 'from pathlib import PurePath\n')]
|
__author__ = 'katharine'
from gevent import monkey; monkey.patch_all()
from gevent import GreenletExit
import struct
import websocket
import STPyV8 as v8
from .exceptions import JSRuntimeException
from . import events
CloseEvent = lambda runtime, *args: v8.JSObject.create(runtime.context.locals.CloseEvent, args)
MessageEvent = lambda runtime, *args: v8.JSObject.create(runtime.context.locals.MessageEvent, args)
class WebSocket(events.EventSourceMixin):
CONNECTING = 0
OPEN = 1
CLOSING = 2
CLOSED = 3
def __init__(self, runtime, url, protocols=None):
super(WebSocket, self).__init__(runtime)
with runtime.context as ctx:
ctx.eval("""
CloseEvent = function(eventInitDict) {
Event.call(this, "close", eventInitDict);
var wasClean = eventInitDict.wasClean;
var code = eventInitDict.code;
var reason = eventInitDict.reason;
Object.defineProperties(this, {
wasClean: {
get: function() { return wasClean; },
enumerable: true,
},
code: {
get: function() { return code; },
enumerable: true,
},
reason: {
get: function() { return reason; },
enumerable: true,
},
});
};
CloseEvent.prototype = Object.create(Event.prototype);
CloseEvent.prototype.constructor = CloseEvent;
MessageEvent = function(origin, data, eventInitDict) {
Event.call(this, "message", eventInitDict);
this.data = data;
this.origin = origin;
};
MessageEvent.prototype = Object.create(Event.prototype);
MessageEvent.prototype.constructor = CloseEvent;
""")
self.runtime = runtime
self.url = url
if protocols is None or protocols == v8.JSNull:
self.protocols = None
else:
self.protocols = protocols
self.runtime.group.spawn(self.handle_ws)
self.ws = None
# JS properties
self.readyState = self.CONNECTING
self.bufferedAmount = 0
self.onopen = None
self.onerror = None
self.onclose = None
self.onmessage = None
self.extensions = ''
self.protocol = None
self.binaryType = 'arraybuffer'
def close(self, code=1000, reason="", *args):
if self.readyState != self.OPEN:
return
self.readyState = self.CLOSING
self.ws.send_close(code, reason)
def send(self, data, *args):
if self.readyState != self.OPEN:
raise JSRuntimeException("Websocket is not open.")
if isinstance(data, str):
self.ws.send(data)
return
# yay, JavaScript
if str(data) in ['[object %sArray]' % x for x in ('Float32', 'Float64', 'Int16', 'Int32', 'Int8', 'Uint16',
'Uint32', 'Uint8', 'Uint8Clamped')]:
data = data.buffer
if str(data) == '[object ArrayBuffer]':
uint8_array = self.runtime.context.locals.Uint8Array
data_array = uint8_array.create(uint8_array, (data,))
self.ws.send_binary(str(bytearray(data_array[str(x)] for x in range(data_array.length))))
def handle_ws(self):
try:
self.ws = websocket.create_connection(self.url, subprotocols=self.protocols)
except websocket.WebSocketException:
self.handle_error(1006, "Connection failed.")
return
self.protocol = self.ws.subprotocol
self.readyState = self.OPEN
self.triggerEvent("open")
try:
while self.ws.connected:
opcode, data = self.ws.recv_data()
if opcode == websocket.ABNF.OPCODE_TEXT:
self.handle_text(data)
elif opcode == websocket.ABNF.OPCODE_BINARY:
self.handle_binary(data)
elif opcode == websocket.ABNF.OPCODE_CLOSE:
# this is annoying.
if len(data) >= 2:
close_code, = struct.unpack_from("!H", data, 0)
reason = data[2:]
self.handle_closed(close_code, reason)
else:
self.handle_closed()
else:
continue
except GreenletExit:
if self.ws is not None and self.ws.connected:
self.ws.close()
raise
def handle_text(self, data):
def go():
if self.readyState != self.OPEN:
return
self.triggerEvent("message", MessageEvent(self.runtime, self.url, data))
self.runtime.enqueue(go)
def handle_binary(self, data):
def go():
if self.readyState != self.OPEN:
return
if self.binaryType == "arraybuffer":
uint8_array = self.runtime.context.locals.Uint8Array
buffer = uint8_array.create(uint8_array, (v8.JSArray(list(bytearray(data))),)).buffer
self.triggerEvent("message", MessageEvent(self.runtime, self.url, buffer))
self.runtime.enqueue(go)
def handle_error(self, code, reason):
def go():
self.readyState = self.CLOSED
self.triggerEvent("error")
self.triggerEvent("close", CloseEvent(self.runtime, {'wasClean': False, code: code, reason: reason}))
self.runtime.enqueue(go)
def handle_closed(self, code=1000, reason=""):
def go():
self.readyState = self.CLOSED
self.triggerEvent("close", CloseEvent(self.runtime, {'wasClean': True, code: code, reason: reason}))
self.runtime.enqueue(go)
def prepare_ws(runtime):
runtime.natives['ws'] = WebSocket
with runtime.context as ctx:
ctx.eval("""
_init_websocket = function(runtime, session) {
var _ws = _from_python('ws');
this.WebSocket = function(url, protocols) {
var origin = new _ws(runtime, url, protocols);
_make_proxies(this, origin, ['close', 'send']);
_make_properties(this, origin, ['readyState', 'bufferedAmount', 'onopen', 'onerror', 'onclose', 'onmessage',
'extensions', 'protocol', 'binaryType']);
};
this.WebSocket.CONNECTING = 0;
this.WebSocket.OPEN = 1;
this.WebSocket.CLOSING = 2;
this.WebSocket.CLOSED = 3;
}
""")
return runtime.context.locals._init_websocket(runtime)
|
[
"STPyV8.JSObject.create",
"websocket.create_connection",
"gevent.monkey.patch_all",
"struct.unpack_from"
] |
[((53, 71), 'gevent.monkey.patch_all', 'monkey.patch_all', ([], {}), '()\n', (69, 71), False, 'from gevent import monkey\n'), ((257, 316), 'STPyV8.JSObject.create', 'v8.JSObject.create', (['runtime.context.locals.CloseEvent', 'args'], {}), '(runtime.context.locals.CloseEvent, args)\n', (275, 316), True, 'import STPyV8 as v8\n'), ((355, 416), 'STPyV8.JSObject.create', 'v8.JSObject.create', (['runtime.context.locals.MessageEvent', 'args'], {}), '(runtime.context.locals.MessageEvent, args)\n', (373, 416), True, 'import STPyV8 as v8\n'), ((3593, 3659), 'websocket.create_connection', 'websocket.create_connection', (['self.url'], {'subprotocols': 'self.protocols'}), '(self.url, subprotocols=self.protocols)\n', (3620, 3659), False, 'import websocket\n'), ((4380, 4413), 'struct.unpack_from', 'struct.unpack_from', (['"""!H"""', 'data', '(0)'], {}), "('!H', data, 0)\n", (4398, 4413), False, 'import struct\n')]
|
"""add step_key pipeline_name
Revision ID: 3b1e175a2be3
Revises: 1<PASSWORD>
Create Date: 2020-03-31 11:01:42.609069
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.engine import reflection
# pylint: disable=no-member
# revision identifiers, used by Alembic.
revision = "3b1e175a2be3"
down_revision = "1<PASSWORD>"
branch_labels = None
depends_on = None
def upgrade():
bind = op.get_context().bind
inspector = reflection.Inspector.from_engine(bind)
has_tables = inspector.get_table_names()
if "event_logs" in has_tables:
columns = [x.get("name") for x in inspector.get_columns("event_logs")]
if "step_key" not in columns:
op.add_column("event_logs", sa.Column("step_key", sa.String))
def downgrade():
bind = op.get_context().bind
inspector = reflection.Inspector.from_engine(bind)
has_tables = inspector.get_table_names()
if "event_logs" in has_tables:
columns = [x.get("name") for x in inspector.get_columns("event_logs")]
if "step_key" in columns:
op.drop_column("event_logs", "step_key")
|
[
"alembic.op.drop_column",
"sqlalchemy.engine.reflection.Inspector.from_engine",
"sqlalchemy.Column",
"alembic.op.get_context"
] |
[((443, 481), 'sqlalchemy.engine.reflection.Inspector.from_engine', 'reflection.Inspector.from_engine', (['bind'], {}), '(bind)\n', (475, 481), False, 'from sqlalchemy.engine import reflection\n'), ((821, 859), 'sqlalchemy.engine.reflection.Inspector.from_engine', 'reflection.Inspector.from_engine', (['bind'], {}), '(bind)\n', (853, 859), False, 'from sqlalchemy.engine import reflection\n'), ((405, 421), 'alembic.op.get_context', 'op.get_context', ([], {}), '()\n', (419, 421), False, 'from alembic import op\n'), ((783, 799), 'alembic.op.get_context', 'op.get_context', ([], {}), '()\n', (797, 799), False, 'from alembic import op\n'), ((1065, 1105), 'alembic.op.drop_column', 'op.drop_column', (['"""event_logs"""', '"""step_key"""'], {}), "('event_logs', 'step_key')\n", (1079, 1105), False, 'from alembic import op\n'), ((719, 751), 'sqlalchemy.Column', 'sa.Column', (['"""step_key"""', 'sa.String'], {}), "('step_key', sa.String)\n", (728, 751), True, 'import sqlalchemy as sa\n')]
|
"""
Copyright (c) 2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
import scipy.optimize
from nncf.compression_method_api import CompressionScheduler
from nncf.config import NNCFConfig
from nncf.registry import Registry
PRUNING_SCHEDULERS = Registry("pruning_schedulers")
class PruningScheduler(CompressionScheduler):
def __init__(self, pruning_algo, params: NNCFConfig = None):
super().__init__()
if params is None:
self._params = NNCFConfig()
else:
self._params = params
self.algo = pruning_algo
# Number of initial steps of training before pruning
self.num_init_steps = self._params.get('num_init_steps', 0)
self.pruning_steps = self._params.get('pruning_steps', 100)
# Pruning rates
self.initial_pruning = self.algo.pruning_init
self.pruning_target = self._params.get('pruning_target', 0.5)
def load_state_dict(self, state_dict):
super().load_state_dict(state_dict)
self._set_pruning_level()
def epoch_step(self, next_epoch=None):
super().epoch_step(next_epoch)
self._set_pruning_level()
def _set_pruning_level(self):
self.algo.set_pruning_rate(self.current_pruning_level)
if self.current_epoch >= (self.pruning_steps + self.num_init_steps):
self.algo.freeze()
def _calc_pruning_level(self):
raise NotImplementedError
@property
def current_pruning_level(self):
if self.current_epoch >= self.num_init_steps:
return self._calc_pruning_level()
return 0
def _calc_density_level(self):
return 1 - self.current_pruning_level()
@PRUNING_SCHEDULERS.register("baseline")
class BaselinePruningScheduler(PruningScheduler):
"""
Baseline scheduler that setting max pruning rate after num_init_steps epoch
and freeze algorithm after it.
"""
def __init__(self, pruning_algo, config=None):
super().__init__(pruning_algo, config)
self._set_pruning_level()
def _calc_pruning_level(self):
return self.pruning_target
def _set_pruning_level(self):
self.algo.set_pruning_rate(self.current_pruning_level)
if self.current_epoch >= self.num_init_steps:
self.algo.freeze()
@PRUNING_SCHEDULERS.register("exponential")
class ExponentialPruningScheduler(PruningScheduler):
"""
Calculates pruning rate progressively according to the formula
P = 1 - a * exp(- k * epoch)
Where:
epoch - epoch number
P - pruning rate for current epoch
a, k - params
"""
def __init__(self, pruning_algo, config=None):
super().__init__(pruning_algo, config)
self.a, self.k = self._init_exp(self.initial_pruning, self.pruning_target, pruning_steps=self.pruning_steps)
self._set_pruning_level()
def _calc_pruning_level(self):
curr_pruning = 1 - self.a * np.exp(-self.k * (self.current_epoch - self.num_init_steps))
max_pruning = self.pruning_target
return max_pruning if curr_pruning >= max_pruning else curr_pruning
@staticmethod
def _init_exp(initial_pruning, max_pruning, pruning_steps=20):
p1 = (0, 1 - initial_pruning)
p2 = (pruning_steps, 1 - max_pruning)
k = np.log(p2[1] / p1[1]) / (p1[0] - p2[0])
a = p1[1] / np.exp(-k * p1[0])
return a, k
@PRUNING_SCHEDULERS.register("exponential_with_bias")
class ExponentialWithBiasPruningScheduler(PruningScheduler):
"""
Calculates pruning rate progressively according to the formula
P = a * exp(- k * epoch) + b
Where:
epoch - epoch number
P - pruning rate for current epoch
a, b, k - params
"""
def __init__(self, pruning_algo, config=None):
super().__init__(pruning_algo, config)
self.a, self.b, self.k = self._init_exp(self.pruning_steps, self.initial_pruning, self.pruning_target)
self._set_pruning_level()
def _calc_pruning_level(self):
curr_pruning = self.a * np.exp(-self.k * (self.current_epoch - self.num_init_steps - 1)) + self.b
max_pruning = self.pruning_target
return max_pruning if curr_pruning >= max_pruning else curr_pruning
@staticmethod
def _init_exp(E_max, P_min, P_max, D=1 / 8):
"""
Find a, b, k for system (from SPFP paper):
1. P_min = a + b
2. P_max = a * exp(-k * E_max) + b
3. 3/4 * P_max = a * exp(-k * E_max * D) + b
Where P_min, P_max - minimal and goal levels of pruning rate
E_max - number of epochs for pruning
"""
def get_b(a, k):
return P_min - a
def get_a(k):
return (3 / 4 * P_max - P_min) / (np.exp(- D * k * E_max) - 1)
def f_to_solve(x):
y = np.exp(D * x * E_max)
return 1 / 3 * y + 1 / (y ** 7) - 4 / 3
k = scipy.optimize.fsolve(f_to_solve, [1])[0]
a = get_a(k)
b = get_b(a, k)
return a, b, k
|
[
"numpy.exp",
"nncf.registry.Registry",
"nncf.config.NNCFConfig",
"numpy.log"
] |
[((772, 802), 'nncf.registry.Registry', 'Registry', (['"""pruning_schedulers"""'], {}), "('pruning_schedulers')\n", (780, 802), False, 'from nncf.registry import Registry\n'), ((997, 1009), 'nncf.config.NNCFConfig', 'NNCFConfig', ([], {}), '()\n', (1007, 1009), False, 'from nncf.config import NNCFConfig\n'), ((3807, 3828), 'numpy.log', 'np.log', (['(p2[1] / p1[1])'], {}), '(p2[1] / p1[1])\n', (3813, 3828), True, 'import numpy as np\n'), ((3867, 3885), 'numpy.exp', 'np.exp', (['(-k * p1[0])'], {}), '(-k * p1[0])\n', (3873, 3885), True, 'import numpy as np\n'), ((5313, 5334), 'numpy.exp', 'np.exp', (['(D * x * E_max)'], {}), '(D * x * E_max)\n', (5319, 5334), True, 'import numpy as np\n'), ((3446, 3506), 'numpy.exp', 'np.exp', (['(-self.k * (self.current_epoch - self.num_init_steps))'], {}), '(-self.k * (self.current_epoch - self.num_init_steps))\n', (3452, 3506), True, 'import numpy as np\n'), ((4546, 4610), 'numpy.exp', 'np.exp', (['(-self.k * (self.current_epoch - self.num_init_steps - 1))'], {}), '(-self.k * (self.current_epoch - self.num_init_steps - 1))\n', (4552, 4610), True, 'import numpy as np\n'), ((5240, 5262), 'numpy.exp', 'np.exp', (['(-D * k * E_max)'], {}), '(-D * k * E_max)\n', (5246, 5262), True, 'import numpy as np\n')]
|