code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Animation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'', max_length=100)),
('description', models.TextField(default=b'')),
],
),
migrations.CreateModel(
name='Asset',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'', max_length=100)),
('description', models.TextField(default=b'')),
('remoteUrl', models.CharField(default=b'', max_length=1000)),
('assetType', models.CharField(default=b'', max_length=255, choices=[(b'CHARACTER COMPONENT', b'Character Component'), (b'MESH', b'Mesh'), (b'ITEM', b'Item')])),
],
),
migrations.CreateModel(
name='Behaviour',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'', max_length=100)),
('description', models.TextField(default=b'')),
],
),
migrations.CreateModel(
name='Character',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'', max_length=100)),
('description', models.TextField(default=b'')),
],
),
migrations.CreateModel(
name='CharacterComponent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'', max_length=100)),
('componentType', models.CharField(max_length=100, choices=[(b'UPPER ARM', b'Upper Arm'), (b'LOWER ARM', b'Lower Arm'), (b'HAND', b'Hand'), (b'UPPER LEG', b'Upper Leg'), (b'LOWER LEG', b'Lower Leg'), (b'FOOT', b'Foot'), (b'TORSO', b'Torso'), (b'LOWER JAW', b'Lower Jaw'), (b'UPPER JAW', b'Upper Jaw'), (b'NOSE', b'Node'), (b'LEFT PUPIL', b'Left Pupil'), (b'RIGHT PUPIL', b'Right Pupil'), (b'PELVIS', b'Pelvis')])),
],
),
migrations.CreateModel(
name='Check',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
),
migrations.CreateModel(
name='Component',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('name', models.CharField(default=b'', unique=True, max_length=100)),
('image', models.ImageField(upload_to=b'component_images', blank=True)),
('description', models.TextField(default=b'')),
('rating', models.FloatField(default=0.0)),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='Condition',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'', max_length=100)),
('numArgs', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='ConditionalArguments',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('value', models.TextField(default=b'')),
('dataType', models.IntegerField(default=0)),
('index', models.IntegerField(default=0)),
('conditionCheck', models.ForeignKey(to='api.Check', null=True)),
],
),
migrations.CreateModel(
name='Connection',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
),
migrations.CreateModel(
name='Conversation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'', max_length=100)),
],
),
migrations.CreateModel(
name='Dialogue',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('conversation', models.ForeignKey(to='api.Conversation', null=True)),
('speaker', models.ForeignKey(to='api.Character', null=True)),
],
),
migrations.CreateModel(
name='FurnitureComponent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'', max_length=100)),
('meshUrl', models.TextField(default=b'')),
],
),
migrations.CreateModel(
name='FurnitureType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'', max_length=100)),
('furnitureComponent', models.ForeignKey(to='api.FurnitureComponent', null=True)),
],
),
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('character', models.ForeignKey(to='api.Character', null=True)),
],
),
migrations.CreateModel(
name='Joint',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('xPercentage', models.FloatField(default=0.0)),
('yPercentage', models.FloatField(default=0.0)),
],
),
migrations.CreateModel(
name='Line',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('text', models.CharField(default=b'', max_length=100)),
('dialogue', models.ForeignKey(to='api.Dialogue', null=True)),
],
),
migrations.CreateModel(
name='Option',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('text', models.TextField(default=b'')),
('conversation', models.ForeignKey(to='api.Conversation', null=True)),
],
),
migrations.CreateModel(
name='PDUser',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('avatar', models.ImageField(upload_to=b'profile_images', blank=True)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Room',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('size', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Scenario',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('name', models.CharField(default=b'', max_length=100)),
('description', models.TextField(default=b'')),
('script', models.TextField(default=b'')),
('jsonUrl', models.CharField(default=b'{}', max_length=1024)),
('owner', models.ForeignKey(related_name='scenarios', to='api.PDUser')),
],
options={
'ordering': ('created',),
},
),
migrations.CreateModel(
name='SkeletalConnection',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('component', models.ForeignKey(to='api.CharacterComponent', null=True)),
('outComponents', models.ManyToManyField(related_name='outComponents_rel_+', null=True, to='api.SkeletalConnection')),
],
),
migrations.CreateModel(
name='State',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'', max_length=100)),
('behaviour', models.ForeignKey(to='api.Behaviour', null=True)),
('character', models.ForeignKey(to='api.Character', null=True)),
('conversation', models.ForeignKey(to='api.Conversation', null=True)),
('idleAnimationOverride', models.ForeignKey(to='api.Animation', null=True)),
],
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('value', models.CharField(default=b'', max_length=100)),
],
),
migrations.CreateModel(
name='Taggable',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
),
migrations.CreateModel(
name='Texture',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'', max_length=100)),
('imageUrl', models.TextField(default=b'')),
],
),
migrations.CreateModel(
name='Trigger',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('function', models.CharField(default=b'', max_length=100)),
('description', models.TextField(default=b'')),
],
),
migrations.CreateModel(
name='TriggerArgument',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('dataType', models.CharField(default=0, max_length=100, choices=[(b'INT', b'int'), (b'FLOAT', b'float'), (b'CHARACTER', b'character'), (b'ITEM', b'item'), (b'ROOM', b'room'), (b'CONVERSATION', b'conversation')])),
('field', models.CharField(default=0, max_length=100)),
('trigger', models.ForeignKey(to='api.Trigger', null=True)),
],
),
migrations.CreateModel(
name='UploadFile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('file', models.FileField(upload_to=b'files/%Y/%m/%d')),
],
),
migrations.CreateModel(
name='ComponentSet',
fields=[
('taggable_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='api.Taggable')),
('name', models.CharField(default=b'', max_length=100)),
('jsonRepresentation', models.TextField(default=b'')),
('fileUrl', models.CharField(default=b'', max_length=512)),
('setType', models.CharField(default=b'', max_length=100, choices=[(b'ARM', b'Arm'), (b'LEG', b'Leg'), (b'HEAD', b'Head'), (b'TORSO', b'Torso'), (b'PELVIS', b'Pelvis')])),
],
bases=('api.taggable',),
),
migrations.CreateModel(
name='ItemDefinition',
fields=[
('taggable_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='api.Taggable')),
('name', models.CharField(default=b'', max_length=100)),
('description', models.TextField(default=b'')),
('interactable', models.BooleanField(default=False)),
('texture', models.OneToOneField(null=True, to='api.Texture')),
],
bases=('api.taggable',),
),
migrations.AddField(
model_name='tag',
name='owner',
field=models.ForeignKey(to='api.Taggable', null=True),
),
migrations.AddField(
model_name='room',
name='scenario',
field=models.ForeignKey(to='api.Scenario', null=True),
),
migrations.AddField(
model_name='item',
name='room',
field=models.ForeignKey(to='api.Room', null=True),
),
migrations.AddField(
model_name='item',
name='scenario',
field=models.ForeignKey(to='api.Scenario', null=True),
),
migrations.AddField(
model_name='furnituretype',
name='room',
field=models.ForeignKey(to='api.Room', null=True),
),
migrations.AddField(
model_name='conversation',
name='scenario',
field=models.ForeignKey(to='api.Scenario', null=True),
),
migrations.AddField(
model_name='component',
name='owner',
field=models.ForeignKey(related_name='components', to='api.PDUser'),
),
migrations.AddField(
model_name='check',
name='dialogue',
field=models.ForeignKey(to='api.Dialogue', null=True),
),
migrations.AddField(
model_name='charactercomponent',
name='texture',
field=models.OneToOneField(null=True, to='api.Texture'),
),
migrations.AddField(
model_name='character',
name='scenario',
field=models.ForeignKey(to='api.Scenario', null=True),
),
migrations.AddField(
model_name='item',
name='itemDef',
field=models.ForeignKey(to='api.ItemDefinition', null=True),
),
migrations.AddField(
model_name='component',
name='componentSet',
field=models.ForeignKey(to='api.ComponentSet', null=True),
),
migrations.AddField(
model_name='charactercomponent',
name='componentSet',
field=models.ForeignKey(to='api.ComponentSet', null=True),
),
]
| [
"django.db.models.OneToOneField",
"django.db.models.FloatField",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.ManyToManyField",
"django.db.models.FileField",
"django.db.models.BooleanField",
"django.db.models.ImageField",
"django.d... | [((210, 267), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (241, 267), False, 'from django.db import models, migrations\n'), ((13564, 13611), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Taggable"""', 'null': '(True)'}), "(to='api.Taggable', null=True)\n", (13581, 13611), False, 'from django.db import models, migrations\n'), ((13731, 13778), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Scenario"""', 'null': '(True)'}), "(to='api.Scenario', null=True)\n", (13748, 13778), False, 'from django.db import models, migrations\n'), ((13894, 13937), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Room"""', 'null': '(True)'}), "(to='api.Room', null=True)\n", (13911, 13937), False, 'from django.db import models, migrations\n'), ((14057, 14104), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Scenario"""', 'null': '(True)'}), "(to='api.Scenario', null=True)\n", (14074, 14104), False, 'from django.db import models, migrations\n'), ((14229, 14272), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Room"""', 'null': '(True)'}), "(to='api.Room', null=True)\n", (14246, 14272), False, 'from django.db import models, migrations\n'), ((14400, 14447), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Scenario"""', 'null': '(True)'}), "(to='api.Scenario', null=True)\n", (14417, 14447), False, 'from django.db import models, migrations\n'), ((14569, 14630), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""components"""', 'to': '"""api.PDUser"""'}), "(related_name='components', to='api.PDUser')\n", (14586, 14630), False, 'from django.db import models, migrations\n'), ((14751, 14798), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Dialogue"""', 'null': '(True)'}), "(to='api.Dialogue', null=True)\n", (14768, 14798), False, 'from django.db import models, migrations\n'), ((14931, 14980), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'to': '"""api.Texture"""'}), "(null=True, to='api.Texture')\n", (14951, 14980), False, 'from django.db import models, migrations\n'), ((15105, 15152), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Scenario"""', 'null': '(True)'}), "(to='api.Scenario', null=True)\n", (15122, 15152), False, 'from django.db import models, migrations\n'), ((15271, 15324), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.ItemDefinition"""', 'null': '(True)'}), "(to='api.ItemDefinition', null=True)\n", (15288, 15324), False, 'from django.db import models, migrations\n'), ((15453, 15504), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.ComponentSet"""', 'null': '(True)'}), "(to='api.ComponentSet', null=True)\n", (15470, 15504), False, 'from django.db import models, migrations\n'), ((15642, 15693), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.ComponentSet"""', 'null': '(True)'}), "(to='api.ComponentSet', null=True)\n", (15659, 15693), False, 'from django.db import models, migrations\n'), ((401, 494), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (417, 494), False, 'from django.db import models, migrations\n'), ((518, 563), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (534, 563), False, 'from django.db import models, migrations\n'), ((598, 627), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (614, 627), False, 'from django.db import models, migrations\n'), ((758, 851), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (774, 851), False, 'from django.db import models, migrations\n'), ((875, 920), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (891, 920), False, 'from django.db import models, migrations\n'), ((955, 984), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (971, 984), False, 'from django.db import models, migrations\n'), ((1017, 1063), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(1000)'}), "(default=b'', max_length=1000)\n", (1033, 1063), False, 'from django.db import models, migrations\n'), ((1096, 1251), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(255)', 'choices': "[(b'CHARACTER COMPONENT', b'Character Component'), (b'MESH', b'Mesh'), (\n b'ITEM', b'Item')]"}), "(default=b'', max_length=255, choices=[(\n b'CHARACTER COMPONENT', b'Character Component'), (b'MESH', b'Mesh'), (\n b'ITEM', b'Item')])\n", (1112, 1251), False, 'from django.db import models, migrations\n'), ((1376, 1469), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (1392, 1469), False, 'from django.db import models, migrations\n'), ((1493, 1538), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (1509, 1538), False, 'from django.db import models, migrations\n'), ((1573, 1602), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (1589, 1602), False, 'from django.db import models, migrations\n'), ((1737, 1830), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (1753, 1830), False, 'from django.db import models, migrations\n'), ((1854, 1899), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (1870, 1899), False, 'from django.db import models, migrations\n'), ((1934, 1963), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (1950, 1963), False, 'from django.db import models, migrations\n'), ((2107, 2200), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (2123, 2200), False, 'from django.db import models, migrations\n'), ((2224, 2269), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (2240, 2269), False, 'from django.db import models, migrations\n'), ((2306, 2723), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'choices': "[(b'UPPER ARM', b'Upper Arm'), (b'LOWER ARM', b'Lower Arm'), (b'HAND',\n b'Hand'), (b'UPPER LEG', b'Upper Leg'), (b'LOWER LEG', b'Lower Leg'), (\n b'FOOT', b'Foot'), (b'TORSO', b'Torso'), (b'LOWER JAW', b'Lower Jaw'),\n (b'UPPER JAW', b'Upper Jaw'), (b'NOSE', b'Node'), (b'LEFT PUPIL',\n b'Left Pupil'), (b'RIGHT PUPIL', b'Right Pupil'), (b'PELVIS', b'Pelvis')]"}), "(max_length=100, choices=[(b'UPPER ARM', b'Upper Arm'), (\n b'LOWER ARM', b'Lower Arm'), (b'HAND', b'Hand'), (b'UPPER LEG',\n b'Upper Leg'), (b'LOWER LEG', b'Lower Leg'), (b'FOOT', b'Foot'), (\n b'TORSO', b'Torso'), (b'LOWER JAW', b'Lower Jaw'), (b'UPPER JAW',\n b'Upper Jaw'), (b'NOSE', b'Node'), (b'LEFT PUPIL', b'Left Pupil'), (\n b'RIGHT PUPIL', b'Right Pupil'), (b'PELVIS', b'Pelvis')])\n", (2322, 2723), False, 'from django.db import models, migrations\n'), ((2831, 2924), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (2847, 2924), False, 'from django.db import models, migrations\n'), ((3055, 3148), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (3071, 3148), False, 'from django.db import models, migrations\n'), ((3175, 3214), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3195, 3214), False, 'from django.db import models, migrations\n'), ((3242, 3300), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'unique': '(True)', 'max_length': '(100)'}), "(default=b'', unique=True, max_length=100)\n", (3258, 3300), False, 'from django.db import models, migrations\n'), ((3329, 3389), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': "b'component_images'", 'blank': '(True)'}), "(upload_to=b'component_images', blank=True)\n", (3346, 3389), False, 'from django.db import models, migrations\n'), ((3424, 3453), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (3440, 3453), False, 'from django.db import models, migrations\n'), ((3483, 3513), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0.0)'}), '(default=0.0)\n', (3500, 3513), False, 'from django.db import models, migrations\n'), ((3724, 3817), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (3740, 3817), False, 'from django.db import models, migrations\n'), ((3841, 3886), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (3857, 3886), False, 'from django.db import models, migrations\n'), ((3917, 3947), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (3936, 3947), False, 'from django.db import models, migrations\n'), ((4093, 4186), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (4109, 4186), False, 'from django.db import models, migrations\n'), ((4211, 4240), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (4227, 4240), False, 'from django.db import models, migrations\n'), ((4272, 4302), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (4291, 4302), False, 'from django.db import models, migrations\n'), ((4331, 4361), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (4350, 4361), False, 'from django.db import models, migrations\n'), ((4399, 4443), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Check"""', 'null': '(True)'}), "(to='api.Check', null=True)\n", (4416, 4443), False, 'from django.db import models, migrations\n'), ((4579, 4672), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (4595, 4672), False, 'from django.db import models, migrations\n'), ((4806, 4899), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (4822, 4899), False, 'from django.db import models, migrations\n'), ((4923, 4968), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (4939, 4968), False, 'from django.db import models, migrations\n'), ((5102, 5195), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (5118, 5195), False, 'from django.db import models, migrations\n'), ((5227, 5278), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Conversation"""', 'null': '(True)'}), "(to='api.Conversation', null=True)\n", (5244, 5278), False, 'from django.db import models, migrations\n'), ((5309, 5357), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Character"""', 'null': '(True)'}), "(to='api.Character', null=True)\n", (5326, 5357), False, 'from django.db import models, migrations\n'), ((5501, 5594), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (5517, 5594), False, 'from django.db import models, migrations\n'), ((5618, 5663), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (5634, 5663), False, 'from django.db import models, migrations\n'), ((5694, 5723), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (5710, 5723), False, 'from django.db import models, migrations\n'), ((5862, 5955), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (5878, 5955), False, 'from django.db import models, migrations\n'), ((5979, 6024), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (5995, 6024), False, 'from django.db import models, migrations\n'), ((6066, 6123), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.FurnitureComponent"""', 'null': '(True)'}), "(to='api.FurnitureComponent', null=True)\n", (6083, 6123), False, 'from django.db import models, migrations\n'), ((6253, 6346), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (6269, 6346), False, 'from django.db import models, migrations\n'), ((6375, 6423), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Character"""', 'null': '(True)'}), "(to='api.Character', null=True)\n", (6392, 6423), False, 'from django.db import models, migrations\n'), ((6554, 6647), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (6570, 6647), False, 'from django.db import models, migrations\n'), ((6678, 6708), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0.0)'}), '(default=0.0)\n', (6695, 6708), False, 'from django.db import models, migrations\n'), ((6743, 6773), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0.0)'}), '(default=0.0)\n', (6760, 6773), False, 'from django.db import models, migrations\n'), ((6903, 6996), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (6919, 6996), False, 'from django.db import models, migrations\n'), ((7020, 7065), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (7036, 7065), False, 'from django.db import models, migrations\n'), ((7097, 7144), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Dialogue"""', 'null': '(True)'}), "(to='api.Dialogue', null=True)\n", (7114, 7144), False, 'from django.db import models, migrations\n'), ((7276, 7369), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (7292, 7369), False, 'from django.db import models, migrations\n'), ((7393, 7422), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (7409, 7422), False, 'from django.db import models, migrations\n'), ((7458, 7509), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Conversation"""', 'null': '(True)'}), "(to='api.Conversation', null=True)\n", (7475, 7509), False, 'from django.db import models, migrations\n'), ((7641, 7734), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (7657, 7734), False, 'from django.db import models, migrations\n'), ((7760, 7818), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': "b'profile_images'", 'blank': '(True)'}), "(upload_to=b'profile_images', blank=True)\n", (7777, 7818), False, 'from django.db import models, migrations\n'), ((7846, 7895), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'to': 'settings.AUTH_USER_MODEL'}), '(to=settings.AUTH_USER_MODEL)\n', (7866, 7895), False, 'from django.db import models, migrations\n'), ((8025, 8118), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (8041, 8118), False, 'from django.db import models, migrations\n'), ((8142, 8172), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (8161, 8172), False, 'from django.db import models, migrations\n'), ((8306, 8399), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (8322, 8399), False, 'from django.db import models, migrations\n'), ((8426, 8465), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (8446, 8465), False, 'from django.db import models, migrations\n'), ((8493, 8538), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (8509, 8538), False, 'from django.db import models, migrations\n'), ((8573, 8602), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (8589, 8602), False, 'from django.db import models, migrations\n'), ((8632, 8661), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (8648, 8661), False, 'from django.db import models, migrations\n'), ((8692, 8740), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b'{}'", 'max_length': '(1024)'}), "(default=b'{}', max_length=1024)\n", (8708, 8740), False, 'from django.db import models, migrations\n'), ((8769, 8829), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""scenarios"""', 'to': '"""api.PDUser"""'}), "(related_name='scenarios', to='api.PDUser')\n", (8786, 8829), False, 'from django.db import models, migrations\n'), ((9052, 9145), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (9068, 9145), False, 'from django.db import models, migrations\n'), ((9174, 9231), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.CharacterComponent"""', 'null': '(True)'}), "(to='api.CharacterComponent', null=True)\n", (9191, 9231), False, 'from django.db import models, migrations\n'), ((9268, 9371), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""outComponents_rel_+"""', 'null': '(True)', 'to': '"""api.SkeletalConnection"""'}), "(related_name='outComponents_rel_+', null=True, to=\n 'api.SkeletalConnection')\n", (9290, 9371), False, 'from django.db import models, migrations\n'), ((9497, 9590), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (9513, 9590), False, 'from django.db import models, migrations\n'), ((9614, 9659), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (9630, 9659), False, 'from django.db import models, migrations\n'), ((9692, 9740), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Behaviour"""', 'null': '(True)'}), "(to='api.Behaviour', null=True)\n", (9709, 9740), False, 'from django.db import models, migrations\n'), ((9773, 9821), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Character"""', 'null': '(True)'}), "(to='api.Character', null=True)\n", (9790, 9821), False, 'from django.db import models, migrations\n'), ((9857, 9908), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Conversation"""', 'null': '(True)'}), "(to='api.Conversation', null=True)\n", (9874, 9908), False, 'from django.db import models, migrations\n'), ((9953, 10001), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Animation"""', 'null': '(True)'}), "(to='api.Animation', null=True)\n", (9970, 10001), False, 'from django.db import models, migrations\n'), ((10130, 10223), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (10146, 10223), False, 'from django.db import models, migrations\n'), ((10248, 10293), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (10264, 10293), False, 'from django.db import models, migrations\n'), ((10427, 10520), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (10443, 10520), False, 'from django.db import models, migrations\n'), ((10649, 10742), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (10665, 10742), False, 'from django.db import models, migrations\n'), ((10766, 10811), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (10782, 10811), False, 'from django.db import models, migrations\n'), ((10843, 10872), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (10859, 10872), False, 'from django.db import models, migrations\n'), ((11005, 11098), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (11021, 11098), False, 'from django.db import models, migrations\n'), ((11126, 11171), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (11142, 11171), False, 'from django.db import models, migrations\n'), ((11206, 11235), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (11222, 11235), False, 'from django.db import models, migrations\n'), ((11376, 11469), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (11392, 11469), False, 'from django.db import models, migrations\n'), ((11497, 11705), 'django.db.models.CharField', 'models.CharField', ([], {'default': '(0)', 'max_length': '(100)', 'choices': "[(b'INT', b'int'), (b'FLOAT', b'float'), (b'CHARACTER', b'character'), (\n b'ITEM', b'item'), (b'ROOM', b'room'), (b'CONVERSATION', b'conversation')]"}), "(default=0, max_length=100, choices=[(b'INT', b'int'), (\n b'FLOAT', b'float'), (b'CHARACTER', b'character'), (b'ITEM', b'item'),\n (b'ROOM', b'room'), (b'CONVERSATION', b'conversation')])\n", (11513, 11705), False, 'from django.db import models, migrations\n'), ((11725, 11768), 'django.db.models.CharField', 'models.CharField', ([], {'default': '(0)', 'max_length': '(100)'}), '(default=0, max_length=100)\n', (11741, 11768), False, 'from django.db import models, migrations\n'), ((11799, 11845), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""api.Trigger"""', 'null': '(True)'}), "(to='api.Trigger', null=True)\n", (11816, 11845), False, 'from django.db import models, migrations\n'), ((11981, 12074), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (11997, 12074), False, 'from django.db import models, migrations\n'), ((12098, 12143), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': "b'files/%Y/%m/%d'"}), "(upload_to=b'files/%Y/%m/%d')\n", (12114, 12143), False, 'from django.db import models, migrations\n'), ((12291, 12406), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'parent_link': '(True)', 'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""api.Taggable"""'}), "(parent_link=True, auto_created=True, primary_key=True,\n serialize=False, to='api.Taggable')\n", (12311, 12406), False, 'from django.db import models, migrations\n'), ((12430, 12475), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (12446, 12475), False, 'from django.db import models, migrations\n'), ((12517, 12546), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (12533, 12546), False, 'from django.db import models, migrations\n'), ((12577, 12622), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(512)'}), "(default=b'', max_length=512)\n", (12593, 12622), False, 'from django.db import models, migrations\n'), ((12653, 12819), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)', 'choices': "[(b'ARM', b'Arm'), (b'LEG', b'Leg'), (b'HEAD', b'Head'), (b'TORSO',\n b'Torso'), (b'PELVIS', b'Pelvis')]"}), "(default=b'', max_length=100, choices=[(b'ARM', b'Arm'), (\n b'LEG', b'Leg'), (b'HEAD', b'Head'), (b'TORSO', b'Torso'), (b'PELVIS',\n b'Pelvis')])\n", (12669, 12819), False, 'from django.db import models, migrations\n'), ((12997, 13112), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'parent_link': '(True)', 'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""api.Taggable"""'}), "(parent_link=True, auto_created=True, primary_key=True,\n serialize=False, to='api.Taggable')\n", (13017, 13112), False, 'from django.db import models, migrations\n'), ((13136, 13181), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)'}), "(default=b'', max_length=100)\n", (13152, 13181), False, 'from django.db import models, migrations\n'), ((13216, 13245), 'django.db.models.TextField', 'models.TextField', ([], {'default': "b''"}), "(default=b'')\n", (13232, 13245), False, 'from django.db import models, migrations\n'), ((13281, 13315), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (13300, 13315), False, 'from django.db import models, migrations\n'), ((13346, 13395), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'to': '"""api.Texture"""'}), "(null=True, to='api.Texture')\n", (13366, 13395), False, 'from django.db import models, migrations\n')] |
import importlib.metadata as ilmd
from textwrap import dedent
def main():
for key in ["flake8.extension", "flake8.report"]:
print(
dedent(
f"""
{key}
{'=' * len(key)}
{ilmd.entry_points().get(key, "(none)")}
"""
)
)
if __name__ == "__main__":
main()
| [
"importlib.metadata.entry_points"
] | [((239, 258), 'importlib.metadata.entry_points', 'ilmd.entry_points', ([], {}), '()\n', (256, 258), True, 'import importlib.metadata as ilmd\n')] |
from aiogram import types
from aiogram.dispatcher import FSMContext
from aiogram.dispatcher.filters import Command
from antiplagiat import Antiplagiat
from data.config import ADVEGO_TOKEN
from loader import dp, _
api = Antiplagiat(ADVEGO_TOKEN)
async def antiplagiator(text):
result = api.unique_text_add(text)
key = result['key']
result = api.unique_check(key)
if result['status'] == 'done':
print('Done!')
# сделать чтото с отчетом
return
elif result['status'] == 'error':
print(f'Error: {result}')
return
elif result['status'] == 'not found':
print('Not found!')
return
@dp.message_handler(Command('plagiat'))
async def plagiat_check_start(message: types.Message, state: FSMContext):
await message.answer(_('Пришлите текст для проверки на плагиат! Не больше 4096 символов'))
await state.set_state('process_plagiat')
@dp.message_handler(state='process_plagiat')
async def plagiat_check_start(message: types.Message, state: FSMContext):
await antiplagiator(text=message.text)
await state.reset_state()
| [
"loader._",
"antiplagiat.Antiplagiat",
"loader.dp.message_handler",
"aiogram.dispatcher.filters.Command"
] | [((221, 246), 'antiplagiat.Antiplagiat', 'Antiplagiat', (['ADVEGO_TOKEN'], {}), '(ADVEGO_TOKEN)\n', (232, 246), False, 'from antiplagiat import Antiplagiat\n'), ((916, 959), 'loader.dp.message_handler', 'dp.message_handler', ([], {'state': '"""process_plagiat"""'}), "(state='process_plagiat')\n", (934, 959), False, 'from loader import dp, _\n'), ((679, 697), 'aiogram.dispatcher.filters.Command', 'Command', (['"""plagiat"""'], {}), "('plagiat')\n", (686, 697), False, 'from aiogram.dispatcher.filters import Command\n'), ((798, 866), 'loader._', '_', (['"""Пришлите текст для проверки на плагиат! Не больше 4096 символов"""'], {}), "('Пришлите текст для проверки на плагиат! Не больше 4096 символов')\n", (799, 866), False, 'from loader import dp, _\n')] |
'''
Testers use 3 approaches for Dropdown controls in web test automation using Selenium.
1. Using Selenium's Select class as it provides higher level methods.
2. Using sendKeys() method of WebElement.
3. (Especially for custom select controls) - Click the drop down control and then click the option.
Arjuna tries to cater to all of them with a single abstraction - its DropDown object.
3 will be covered later when element configuration has been discussed.
'''
from arjuna.revised.tpi import Arjuna
from arjuna.revised.tpi.guiauto.helpers import With
from arjuna.revised.tpi.guiauto.helpers import Screen
from .wp_login_logout import *
Arjuna.init()
# Default Gui automation engine is Selenium
automator = Arjuna.create_gui_automator(Arjuna.get_central_config())
login(automator)
automator.element(With.link_text("Settings")).click()
role_select = automator.DropDown(With.id("default_role"))
role_select.select_value("editor")
role_select.select_visible_text("Subscriber")
print(role_select.has_visible_text_selected("Subscriber"))
print(role_select.has_value_selected("subscriber"))
print(role_select.has_index_selected(2))
print(role_select.get_first_selected_option_value())
print(role_select.get_first_selected_option_text())
role_select.select_index(4)
print(role_select.has_index_selected(4))
text = "Subscriber"
role_select.send_option_text(text)
assert role_select.has_visible_text_selected("Subscriber") is True
logout(automator) | [
"arjuna.revised.tpi.guiauto.helpers.With.id",
"arjuna.revised.tpi.guiauto.helpers.With.link_text",
"arjuna.revised.tpi.Arjuna.init",
"arjuna.revised.tpi.Arjuna.get_central_config"
] | [((644, 657), 'arjuna.revised.tpi.Arjuna.init', 'Arjuna.init', ([], {}), '()\n', (655, 657), False, 'from arjuna.revised.tpi import Arjuna\n'), ((742, 769), 'arjuna.revised.tpi.Arjuna.get_central_config', 'Arjuna.get_central_config', ([], {}), '()\n', (767, 769), False, 'from arjuna.revised.tpi import Arjuna\n'), ((878, 901), 'arjuna.revised.tpi.guiauto.helpers.With.id', 'With.id', (['"""default_role"""'], {}), "('default_role')\n", (885, 901), False, 'from arjuna.revised.tpi.guiauto.helpers import With\n'), ((808, 834), 'arjuna.revised.tpi.guiauto.helpers.With.link_text', 'With.link_text', (['"""Settings"""'], {}), "('Settings')\n", (822, 834), False, 'from arjuna.revised.tpi.guiauto.helpers import With\n')] |
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from dataclasses import dataclass
from ax.exceptions.core import UserInputError
from ax.modelbridge.generation_strategy import GenerationStrategy
from ax.service.utils.scheduler_options import SchedulerOptions
from ax.utils.common.base import Base
@dataclass(frozen=True)
class BenchmarkMethod(Base):
"""Benchmark method, represented in terms of Ax generation strategy (which tells us
which models to use when) and scheduler options (which tell us extra execution
information like maximum parallelism, early stopping configuration, etc.)
"""
name: str
generation_strategy: GenerationStrategy
scheduler_options: SchedulerOptions
def __post_init__(self) -> None:
if self.scheduler_options.total_trials is None:
raise UserInputError(
"SchedulerOptions.total_trials may not be None in BenchmarkMethod."
)
| [
"ax.exceptions.core.UserInputError",
"dataclasses.dataclass"
] | [((432, 454), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (441, 454), False, 'from dataclasses import dataclass\n'), ((952, 1040), 'ax.exceptions.core.UserInputError', 'UserInputError', (['"""SchedulerOptions.total_trials may not be None in BenchmarkMethod."""'], {}), "(\n 'SchedulerOptions.total_trials may not be None in BenchmarkMethod.')\n", (966, 1040), False, 'from ax.exceptions.core import UserInputError\n')] |
# -*- coding: utf-8 -*-
# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
# holder of all proprietary rights on this computer program.
# You can only use this computer program if you have closed
# a license agreement with MPG or you get the right to use the computer
# program from someone who is authorized to grant you that right.
# Any use of the computer program without a valid license is prohibited and
# liable to prosecution.
#
# Copyright©2019 Max-Planck-Gesellschaft zur Förderung
# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
# for Intelligent Systems. All rights reserved.
#
# Author: <NAME>
# Contact: <EMAIL>
# Contact: <EMAIL>
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import sys
import os
import time
import argparse
try:
input = raw_input
except NameError:
pass
import open3d as o3d
import torch
import torch.nn as nn
import torch.autograd as autograd
from copy import deepcopy
import numpy as np
import tqdm
from loguru import logger
from psbody.mesh import Mesh
import bvh_distance_queries
if __name__ == "__main__":
device = torch.device('cuda')
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--mesh-fn', type=str, dest='mesh_fn',
help='A mesh file (.obj, .ply, e.t.c.) to be checked' +
' for collisions')
parser.add_argument('--num-query-points', type=int, default=1,
dest='num_query_points',
help='Number of random query points')
parser.add_argument('--seed', type=int, default=None,
help='If given then set the seed')
args, _ = parser.parse_known_args()
mesh_fn = args.mesh_fn
num_query_points = args.num_query_points
seed = args.seed
input_mesh = Mesh(filename=mesh_fn)
if seed is not None:
torch.manual_seed(seed)
logger.info(f'Number of triangles = {input_mesh.f.shape[0]}')
v = input_mesh.v
vertices = torch.tensor(v, dtype=torch.float32, device=device)
faces = torch.tensor(input_mesh.f.astype(np.int64),
dtype=torch.long,
device=device)
min_vals, _ = torch.min(vertices, dim=0, keepdim=True)
max_vals, _ = torch.max(vertices, dim=0, keepdim=True)
query_points = torch.rand([1, num_query_points, 3], dtype=torch.float32,
device=device) * (max_vals - min_vals) + min_vals
query_points_np = query_points.detach().cpu().numpy().squeeze(
axis=0).astype(np.float32).reshape(num_query_points, 3)
batch_size = 1
triangles = vertices[faces].unsqueeze(dim=0)
m = bvh_distance_queries.BVH()
torch.cuda.synchronize()
start = time.perf_counter()
distances, closest_points, closest_faces, closest_bcs = m(
triangles, query_points)
torch.cuda.synchronize()
logger.info(f'CUDA Elapsed time {time.perf_counter() - start}')
distances = distances.detach().cpu().numpy()
closest_points = closest_points.detach().cpu().numpy().squeeze()
mesh = o3d.geometry.TriangleMesh()
mesh.vertices = o3d.utility.Vector3dVector(v)
mesh.triangles = o3d.utility.Vector3iVector(input_mesh.f.astype(np.int64))
mesh.compute_vertex_normals()
mesh.paint_uniform_color([0.3, 0.3, 0.3])
query_pcl = o3d.geometry.PointCloud()
query_pcl.points = o3d.utility.Vector3dVector(
query_points.detach().cpu().numpy().squeeze(axis=0).reshape(-1, 3))
query_pcl.paint_uniform_color([0.9, 0.3, 0.3])
closest_points_pcl = o3d.geometry.PointCloud()
closest_points_pcl.points = o3d.utility.Vector3dVector(
closest_points.reshape(-1, 3))
closest_points_pcl.paint_uniform_color([0.3, 0.3, 0.9])
o3d.visualization.draw_geometries([
mesh,
query_pcl,
closest_points_pcl,
])
| [
"torch.manual_seed",
"loguru.logger.info",
"argparse.ArgumentParser",
"torch.rand",
"torch.max",
"time.perf_counter",
"torch.min",
"torch.cuda.synchronize",
"torch.tensor",
"open3d.geometry.TriangleMesh",
"bvh_distance_queries.BVH",
"psbody.mesh.Mesh",
"open3d.geometry.PointCloud",
"open3d... | [((1194, 1214), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (1206, 1214), False, 'import torch\n'), ((1229, 1308), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), '(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n', (1252, 1308), False, 'import argparse\n'), ((1952, 1974), 'psbody.mesh.Mesh', 'Mesh', ([], {'filename': 'mesh_fn'}), '(filename=mesh_fn)\n', (1956, 1974), False, 'from psbody.mesh import Mesh\n'), ((2038, 2099), 'loguru.logger.info', 'logger.info', (['f"""Number of triangles = {input_mesh.f.shape[0]}"""'], {}), "(f'Number of triangles = {input_mesh.f.shape[0]}')\n", (2049, 2099), False, 'from loguru import logger\n'), ((2138, 2189), 'torch.tensor', 'torch.tensor', (['v'], {'dtype': 'torch.float32', 'device': 'device'}), '(v, dtype=torch.float32, device=device)\n', (2150, 2189), False, 'import torch\n'), ((2348, 2388), 'torch.min', 'torch.min', (['vertices'], {'dim': '(0)', 'keepdim': '(True)'}), '(vertices, dim=0, keepdim=True)\n', (2357, 2388), False, 'import torch\n'), ((2407, 2447), 'torch.max', 'torch.max', (['vertices'], {'dim': '(0)', 'keepdim': '(True)'}), '(vertices, dim=0, keepdim=True)\n', (2416, 2447), False, 'import torch\n'), ((2815, 2841), 'bvh_distance_queries.BVH', 'bvh_distance_queries.BVH', ([], {}), '()\n', (2839, 2841), False, 'import bvh_distance_queries\n'), ((2847, 2871), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (2869, 2871), False, 'import torch\n'), ((2884, 2903), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2901, 2903), False, 'import time\n'), ((3004, 3028), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (3026, 3028), False, 'import torch\n'), ((3227, 3254), 'open3d.geometry.TriangleMesh', 'o3d.geometry.TriangleMesh', ([], {}), '()\n', (3252, 3254), True, 'import open3d as o3d\n'), ((3275, 3304), 'open3d.utility.Vector3dVector', 'o3d.utility.Vector3dVector', (['v'], {}), '(v)\n', (3301, 3304), True, 'import open3d as o3d\n'), ((3481, 3506), 'open3d.geometry.PointCloud', 'o3d.geometry.PointCloud', ([], {}), '()\n', (3504, 3506), True, 'import open3d as o3d\n'), ((3711, 3736), 'open3d.geometry.PointCloud', 'o3d.geometry.PointCloud', ([], {}), '()\n', (3734, 3736), True, 'import open3d as o3d\n'), ((3901, 3973), 'open3d.visualization.draw_geometries', 'o3d.visualization.draw_geometries', (['[mesh, query_pcl, closest_points_pcl]'], {}), '([mesh, query_pcl, closest_points_pcl])\n', (3934, 3973), True, 'import open3d as o3d\n'), ((2009, 2032), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (2026, 2032), False, 'import torch\n'), ((2468, 2540), 'torch.rand', 'torch.rand', (['[1, num_query_points, 3]'], {'dtype': 'torch.float32', 'device': 'device'}), '([1, num_query_points, 3], dtype=torch.float32, device=device)\n', (2478, 2540), False, 'import torch\n'), ((3066, 3085), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (3083, 3085), False, 'import time\n')] |
import PIL.Image,PIL.ImageDraw,PIL.ImageFont,PIL.ImageFilter
import random
#随机字母
def rndchar():
return chr(random.randint(65, 90))
#random.randint()函数生成随机数字,数字范围为在65 到90内,在此范围内的美国标准信息编码是大写的A-Z
#chr(kk) 函数,kk为整数,asc编码值,函数返回asc编码为kk 的对应的字符
#随机颜色1
def rndcolor():
return random.randint(64, 255),random.randint(64, 255),random.randint(64, 255)
#随机颜色2
def rndcolor2():
return random.randint(32, 127), random.randint(32, 127), random.randint(32, 127)
width = 60*4
height = 60
image = PIL.Image.new('RGB', (width, height), (255, 255, 255))
#RGB文件:RGB色彩模式是工业界的一种颜色标准,是通过对红®、绿(G)、蓝(B)三个颜色通道的变化以及它们相互之间的叠加来得到各式各样的颜色的,RGB即是代表红、绿、蓝三个通道的颜色
#创建font对象
font = PIL.ImageFont.truetype('fonts.ttf', 36)
#加载一个TrueType或者OpenType字体文件,并且创建一个字体对象,这里的路径可以打开控制面板->字体->选择一种字体,将字体样式的路径复制到这里这个函数从指定的文件加载了一个字体对象,并且为指定大小的字体创建了字体对象。
#创建draw对象
draw = PIL.ImageDraw.Draw(image)
#填充每个像素
for x in range(width):
for y in range(height):
draw.point((x, y), fill=rndcolor())
#输出文字
for t in range(4):
draw.text((60*t+10, 10), rndchar(), font=font, fill=rndcolor2())
image = image.filter(PIL.ImageFilter.BLUR)
image.save('test2.png') | [
"random.randint"
] | [((112, 134), 'random.randint', 'random.randint', (['(65)', '(90)'], {}), '(65, 90)\n', (126, 134), False, 'import random\n'), ((279, 302), 'random.randint', 'random.randint', (['(64)', '(255)'], {}), '(64, 255)\n', (293, 302), False, 'import random\n'), ((303, 326), 'random.randint', 'random.randint', (['(64)', '(255)'], {}), '(64, 255)\n', (317, 326), False, 'import random\n'), ((327, 350), 'random.randint', 'random.randint', (['(64)', '(255)'], {}), '(64, 255)\n', (341, 350), False, 'import random\n'), ((387, 410), 'random.randint', 'random.randint', (['(32)', '(127)'], {}), '(32, 127)\n', (401, 410), False, 'import random\n'), ((412, 435), 'random.randint', 'random.randint', (['(32)', '(127)'], {}), '(32, 127)\n', (426, 435), False, 'import random\n'), ((437, 460), 'random.randint', 'random.randint', (['(32)', '(127)'], {}), '(32, 127)\n', (451, 460), False, 'import random\n')] |
# Advent of Code 2015
#
# From https://adventofcode.com/2015/day/12
import json
import re
filename = ''
data = [re.findall(r'(-?\d+)', row.strip()) for row in open(f'../inputs/Advent2015_12{filename}.json', 'r')]
print(f"AoC 2015 Day 12, Part 1 answer is {sum(int(x[0]) for x in data if x)}")
with open(f'../inputs/Advent2015_12{filename}.json', 'r') as read_file:
data = json.load(read_file)
def parse_level(level):
count = 0
if isinstance(level, dict):
if 'red' in level or 'red' in level.values():
return 0
for k, v in level.items():
if isinstance(k, int) or isinstance(k, str) and k.isdigit():
count += int(k)
if isinstance(v, int) or isinstance(v, str) and v.isdigit():
count += int(v)
if isinstance(v, (dict, list)):
count += parse_level(v)
elif isinstance(level, list):
for x in level:
if isinstance(x, int) or isinstance(x, str) and x.isdigit():
count += int(x)
elif isinstance(x, (dict, list)):
count += parse_level(x)
return count
print(f"AoC 2015 Day 12, Part 2 answer is {parse_level(data)}")
| [
"json.load"
] | [((380, 400), 'json.load', 'json.load', (['read_file'], {}), '(read_file)\n', (389, 400), False, 'import json\n')] |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.ads.google_ads.v0.proto.resources import keyword_view_pb2 as google_dot_ads_dot_googleads__v0_dot_proto_dot_resources_dot_keyword__view__pb2
from google.ads.google_ads.v0.proto.services import keyword_view_service_pb2 as google_dot_ads_dot_googleads__v0_dot_proto_dot_services_dot_keyword__view__service__pb2
class KeywordViewServiceStub(object):
"""Service to manage keyword views.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetKeywordView = channel.unary_unary(
'/google.ads.googleads.v0.services.KeywordViewService/GetKeywordView',
request_serializer=google_dot_ads_dot_googleads__v0_dot_proto_dot_services_dot_keyword__view__service__pb2.GetKeywordViewRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v0_dot_proto_dot_resources_dot_keyword__view__pb2.KeywordView.FromString,
)
class KeywordViewServiceServicer(object):
"""Service to manage keyword views.
"""
def GetKeywordView(self, request, context):
"""Returns the requested keyword view in full detail.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_KeywordViewServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetKeywordView': grpc.unary_unary_rpc_method_handler(
servicer.GetKeywordView,
request_deserializer=google_dot_ads_dot_googleads__v0_dot_proto_dot_services_dot_keyword__view__service__pb2.GetKeywordViewRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v0_dot_proto_dot_resources_dot_keyword__view__pb2.KeywordView.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.ads.googleads.v0.services.KeywordViewService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| [
"grpc.method_handlers_generic_handler",
"grpc.unary_unary_rpc_method_handler"
] | [((1892, 2009), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""google.ads.googleads.v0.services.KeywordViewService"""', 'rpc_method_handlers'], {}), "(\n 'google.ads.googleads.v0.services.KeywordViewService', rpc_method_handlers)\n", (1928, 2009), False, 'import grpc\n'), ((1493, 1851), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetKeywordView'], {'request_deserializer': 'google_dot_ads_dot_googleads__v0_dot_proto_dot_services_dot_keyword__view__service__pb2.GetKeywordViewRequest.FromString', 'response_serializer': 'google_dot_ads_dot_googleads__v0_dot_proto_dot_resources_dot_keyword__view__pb2.KeywordView.SerializeToString'}), '(servicer.GetKeywordView,\n request_deserializer=\n google_dot_ads_dot_googleads__v0_dot_proto_dot_services_dot_keyword__view__service__pb2\n .GetKeywordViewRequest.FromString, response_serializer=\n google_dot_ads_dot_googleads__v0_dot_proto_dot_resources_dot_keyword__view__pb2\n .KeywordView.SerializeToString)\n', (1528, 1851), False, 'import grpc\n')] |
from typing import Optional
from werkzeug.security import check_password_hash
from .models.user import User
def authenticate(username, password) -> Optional[User]:
user = User.find_by_username(username)
if user and check_password_hash(user.hashed_password, password):
return user
return None
def user_identity_lookup(user: User) -> str:
return user.id
def user_lookup_callback(_jwt_header, jwt_data) -> Optional[User]:
user_id = jwt_data["sub"]
return User.find_by_id(user_id)
| [
"werkzeug.security.check_password_hash"
] | [((227, 278), 'werkzeug.security.check_password_hash', 'check_password_hash', (['user.hashed_password', 'password'], {}), '(user.hashed_password, password)\n', (246, 278), False, 'from werkzeug.security import check_password_hash\n')] |
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-git',
version='0.1.0',
description='Get git information for your django repository',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
url='https://github.com/spapas/django-git/',
zip_safe=False,
include_package_data=False,
packages=find_packages(exclude=['tests.*', 'tests', 'sample', ]),
install_requires=['Django >=1.4', 'six', 'GitPython > 1.0'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries',
],
)
| [
"setuptools.find_packages"
] | [((367, 420), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests.*', 'tests', 'sample']"}), "(exclude=['tests.*', 'tests', 'sample'])\n", (380, 420), False, 'from setuptools import setup, find_packages\n')] |
from __future__ import print_function
from astrometry.util.fits import *
import pylab as plt
import numpy as np
from glob import glob
from astrometry.util.plotutils import *
from astrometry.libkd.spherematch import *
from astrometry.util.resample import *
from astrometry.util.util import *
ps = PlotSequence('cosmos')
baseA = 'cosmos-dr5-60/'
baseB = 'cosmos-dr5-67/'
Atxt = '60'
Btxt = '67'
TA = merge_tables([fits_table(fn) for fn in glob(baseA + 'tractor/*/tractor-*.fits')])
print('Total of', len(TA), 'sources in 60')
TA.cut(TA.brick_primary)
print(len(TA), 'brick primary')
TB = merge_tables([fits_table(fn) for fn in glob(baseB + 'tractor/*/tractor-*.fits')])
print('Total of', len(TB), 'sources in 67')
TB.cut(TB.brick_primary)
print(len(TB), 'brick primary')
ramin = min(TA.ra.min(), TB.ra.min())
ramax = max(TA.ra.max(), TB.ra.max())
decmin = min(TA.dec.min(), TB.dec.min())
decmax = max(TA.dec.max(), TB.dec.max())
# Create low-res depth maps
pixsc = 10. * 0.262/3600.
rc,dc = (ramin+ramax)/2., (decmin+decmax)/2.
w = int((ramax - ramin) * np.cos(np.deg2rad(dc)) / pixsc)
h = int((decmax - decmin) / pixsc)
wcs = Tan(rc, dc, w/2., h/2., -pixsc, 0., 0., pixsc, float(w), float(h))
#print('WCS:', wcs)
#for band in ['g','r','z']:
for band in ['g']:
psfdepthA = np.zeros(wcs.shape, np.float32)
psfdepthB = np.zeros(wcs.shape, np.float32)
for fn in glob(baseA + 'coadd/*/*/legacysurvey-*-depth-%s.fits*' % band):
print('Reading', fn)
iwcs = Tan(fn, 1)
Yo,Xo,Yi,Xi,nil = resample_with_wcs(wcs, iwcs)
dmap = fitsio.read(fn)
#I = np.flatnonzero(np.isfinite(dmap) * (dmap > 0))
#print(len(I), 'finite & positive values')
psfdepthA[Yo,Xo] = dmap[Yi,Xi]
for fn in glob(baseB + 'coadd/*/*/legacysurvey-*-depth-%s.fits*' % band):
print('Reading', fn)
iwcs = Tan(fn, 1)
Yo,Xo,Yi,Xi,nil = resample_with_wcs(wcs, iwcs)
dmap = fitsio.read(fn)
#I = np.flatnonzero(np.isfinite(dmap) * (dmap > 0))
#print(len(I), 'finite & positive values')
psfdepthB[Yo,Xo] = dmap[Yi,Xi]
galdepthA = np.zeros(wcs.shape, np.float32)
galdepthB = np.zeros(wcs.shape, np.float32)
for fn in glob(baseA + 'coadd/*/*/legacysurvey-*-galdepth-%s.fits*' % band):
print('Reading', fn)
iwcs = Tan(fn, 1)
Yo,Xo,Yi,Xi,nil = resample_with_wcs(wcs, iwcs)
dmap = fitsio.read(fn)
#I = np.flatnonzero(np.isfinite(dmap) * (dmap > 0))
#print(len(I), 'finite & positive values')
galdepthA[Yo,Xo] = dmap[Yi,Xi]
for fn in glob(baseB + 'coadd/*/*/legacysurvey-*-galdepth-%s.fits*' % band):
print('Reading', fn)
iwcs = Tan(fn, 1)
Yo,Xo,Yi,Xi,nil = resample_with_wcs(wcs, iwcs)
dmap = fitsio.read(fn)
#I = np.flatnonzero(np.isfinite(dmap) * (dmap > 0))
#print(len(I), 'finite & positive values')
galdepthB[Yo,Xo] = dmap[Yi,Xi]
print('PsfdepthA (iv)', psfdepthA.min(), psfdepthA.max())
print('PsfdepthB (iv)', psfdepthB.min(), psfdepthB.max())
psfdepthA = -2.5 * (np.log10(5./np.sqrt(psfdepthA)) - 9)
psfdepthB = -2.5 * (np.log10(5./np.sqrt(psfdepthB)) - 9)
print('PsfdepthA', psfdepthA.min(), psfdepthA.max())
print('PsfdepthB', psfdepthB.min(), psfdepthB.max())
galdepthA = -2.5 * (np.log10(5./np.sqrt(galdepthA)) - 9)
galdepthB = -2.5 * (np.log10(5./np.sqrt(galdepthB)) - 9)
print('GaldepthA', galdepthA.min(), galdepthA.max())
print('GaldepthB', galdepthB.min(), galdepthB.max())
ima = dict(interpolation='nearest', origin='lower',
extent=[ramax,ramin,decmin,decmax], vmin=20.0, vmax=24.5)
plt.clf()
plt.subplot(1,2,1)
plt.imshow(psfdepthA, **ima)
plt.title(Atxt)
plt.subplot(1,2,2)
plt.imshow(psfdepthB, **ima)
plt.title(Btxt)
plt.suptitle('PSF Depth maps (%s)' % band)
ps.savefig()
plt.clf()
plt.subplot(1,2,1)
plt.imshow(galdepthA, **ima)
plt.title(Atxt)
plt.subplot(1,2,2)
plt.imshow(galdepthB, **ima)
plt.title(Btxt)
plt.suptitle('Galaxy Depth maps (%s)' % band)
ps.savefig()
# dd = np.append(galdepthA.ravel(), galdepthB.ravel())
# dd = dd[np.isfinite(dd)]
# thresh = np.percentile(dd, 10)
# print('Depth threshold:', thresh)
thresh = 24.0
hh,ww = wcs.shape
ok,xx,yy = wcs.radec2pixelxy(TA.ra, TA.dec)
xx = np.clip((np.round(xx) - 1), 0, ww-1).astype(int)
yy = np.clip((np.round(yy) - 1), 0, hh-1).astype(int)
I = np.flatnonzero((galdepthA[yy,xx] > thresh) * (galdepthB[yy,xx] > thresh))
print(len(I), 'of', len(TA), 'sources in A are in good-depth regions')
TA.cut(I)
ok,xx,yy = wcs.radec2pixelxy(TB.ra, TB.dec)
xx = np.clip((np.round(xx) - 1), 0, ww-1).astype(int)
yy = np.clip((np.round(yy) - 1), 0, hh-1).astype(int)
I = np.flatnonzero((galdepthA[yy,xx] > thresh) * (galdepthB[yy,xx] > thresh))
print(len(I), 'of', len(TB), 'sources in B are in good-depth regions')
TB.cut(I)
ha = dict(range=(18,27), bins=50, histtype='stepfilled', alpha=0.1)
hb = dict(range=(18,27), bins=50, histtype='stepfilled', alpha=0.1)
plt.clf()
plt.hist(np.maximum(psfdepthA.ravel(), 18), color='b', label=Atxt, **ha)
plt.hist(np.maximum(psfdepthB.ravel(), 18), color='r', label=Btxt, **hb)
plt.xlim(18,27)
plt.legend()
plt.title('PSF depth map values (g mag)')
ps.savefig()
plt.clf()
plt.hist(np.maximum(galdepthA.ravel(), 18), color='b', label=Atxt, **ha)
plt.hist(np.maximum(galdepthB.ravel(), 18), color='r', label=Btxt, **hb)
plt.xlim(18,27)
plt.legend()
plt.title('Galaxy depth map values (g mag)')
ps.savefig()
TA.mag_g = -2.5 * (np.log10(TA.flux_g) - 9)
TB.mag_g = -2.5 * (np.log10(TB.flux_g) - 9)
TA.psfdepth_mag_g = -2.5 * (np.log10(5./np.sqrt(TA.psfdepth_g)) - 9)
TB.psfdepth_mag_g = -2.5 * (np.log10(5./np.sqrt(TB.psfdepth_g)) - 9)
TA.galdepth_mag_g = -2.5 * (np.log10(5./np.sqrt(TA.galdepth_g)) - 9)
TB.galdepth_mag_g = -2.5 * (np.log10(5./np.sqrt(TB.galdepth_g)) - 9)
ha = dict(range=(18,27), bins=50, histtype='stepfilled', alpha=0.1)
hb = dict(range=(18,27), bins=50, histtype='stepfilled', alpha=0.1)
ha2 = dict(range=(18,27), bins=50, histtype='step', alpha=0.5)
hb2 = dict(range=(18,27), bins=50, histtype='step', alpha=0.5)
plt.clf()
plt.hist(TA.mag_g, color='b', label=Atxt, **ha)
plt.hist(TA.mag_g, color='b', **ha2)
plt.hist(TB.mag_g, color='r', label=Btxt, **hb)
plt.hist(TB.mag_g, color='r', **hb2)
plt.xlim(18,27)
plt.legend()
plt.xlabel('All sources: g mag')
ps.savefig()
ha = dict(range=(23,25), bins=50, histtype='stepfilled', alpha=0.1)
hb = dict(range=(23,25), bins=50, histtype='stepfilled', alpha=0.1)
plt.clf()
plt.hist(TA.psfdepth_mag_g, color='b', label=Atxt, **ha)
plt.hist(TB.psfdepth_mag_g, color='r', label=Btxt, **hb)
plt.xlim(23,25)
plt.legend()
plt.title('PSF depth for sources (g mag)')
ps.savefig()
plt.clf()
plt.hist(TA.galdepth_mag_g, color='b', label=Atxt, **ha)
plt.hist(TB.galdepth_mag_g, color='r', label=Btxt, **hb)
plt.xlim(23,25)
plt.legend()
plt.title('Gal depth for sources (g mag)')
ps.savefig()
ha = dict(range=((ramin,ramax),(decmin,decmax)), doclf=False,
docolorbar=False, imshowargs=dict(vmin=0, vmax=14))
plt.clf()
plt.subplot(1,2,1)
plothist(TA.ra, TA.dec, 200, **ha)
plt.title(Atxt)
plt.subplot(1,2,2)
plothist(TB.ra, TB.dec, 200, **ha)
plt.title(Btxt)
plt.suptitle('All sources')
ps.savefig()
I,J,d = match_radec(TA.ra, TA.dec, TB.ra, TB.dec, 1./3600.)
unmatchedA = np.ones(len(TA), bool)
unmatchedB = np.ones(len(TB), bool)
unmatchedA[I] = False
unmatchedB[J] = False
ha = dict(range=((ramin,ramax),(decmin,decmax)), doclf=False,
docolorbar=False, imshowargs=dict(vmin=0, vmax=5))
plt.clf()
plt.subplot(1,2,1)
plothist(TA.ra[unmatchedA], TA.dec[unmatchedA], 200, **ha)
plt.title(Atxt)
plt.subplot(1,2,2)
plothist(TB.ra[unmatchedB], TB.dec[unmatchedB], 200, **ha)
plt.title(Btxt)
plt.suptitle('Un-matched sources')
ps.savefig()
| [
"pylab.title",
"numpy.log10",
"pylab.hist",
"numpy.sqrt",
"pylab.subplot",
"numpy.round",
"numpy.flatnonzero",
"pylab.xlabel",
"pylab.legend",
"numpy.zeros",
"numpy.deg2rad",
"glob.glob",
"pylab.xlim",
"pylab.clf",
"pylab.suptitle",
"pylab.imshow"
] | [((4483, 4558), 'numpy.flatnonzero', 'np.flatnonzero', (['((galdepthA[yy, xx] > thresh) * (galdepthB[yy, xx] > thresh))'], {}), '((galdepthA[yy, xx] > thresh) * (galdepthB[yy, xx] > thresh))\n', (4497, 4558), True, 'import numpy as np\n'), ((4795, 4870), 'numpy.flatnonzero', 'np.flatnonzero', (['((galdepthA[yy, xx] > thresh) * (galdepthB[yy, xx] > thresh))'], {}), '((galdepthA[yy, xx] > thresh) * (galdepthB[yy, xx] > thresh))\n', (4809, 4870), True, 'import numpy as np\n'), ((5089, 5098), 'pylab.clf', 'plt.clf', ([], {}), '()\n', (5096, 5098), True, 'import pylab as plt\n'), ((5245, 5261), 'pylab.xlim', 'plt.xlim', (['(18)', '(27)'], {}), '(18, 27)\n', (5253, 5261), True, 'import pylab as plt\n'), ((5261, 5273), 'pylab.legend', 'plt.legend', ([], {}), '()\n', (5271, 5273), True, 'import pylab as plt\n'), ((5274, 5315), 'pylab.title', 'plt.title', (['"""PSF depth map values (g mag)"""'], {}), "('PSF depth map values (g mag)')\n", (5283, 5315), True, 'import pylab as plt\n'), ((5330, 5339), 'pylab.clf', 'plt.clf', ([], {}), '()\n', (5337, 5339), True, 'import pylab as plt\n'), ((5486, 5502), 'pylab.xlim', 'plt.xlim', (['(18)', '(27)'], {}), '(18, 27)\n', (5494, 5502), True, 'import pylab as plt\n'), ((5502, 5514), 'pylab.legend', 'plt.legend', ([], {}), '()\n', (5512, 5514), True, 'import pylab as plt\n'), ((5515, 5559), 'pylab.title', 'plt.title', (['"""Galaxy depth map values (g mag)"""'], {}), "('Galaxy depth map values (g mag)')\n", (5524, 5559), True, 'import pylab as plt\n'), ((6204, 6213), 'pylab.clf', 'plt.clf', ([], {}), '()\n', (6211, 6213), True, 'import pylab as plt\n'), ((6214, 6261), 'pylab.hist', 'plt.hist', (['TA.mag_g'], {'color': '"""b"""', 'label': 'Atxt'}), "(TA.mag_g, color='b', label=Atxt, **ha)\n", (6222, 6261), True, 'import pylab as plt\n'), ((6262, 6298), 'pylab.hist', 'plt.hist', (['TA.mag_g'], {'color': '"""b"""'}), "(TA.mag_g, color='b', **ha2)\n", (6270, 6298), True, 'import pylab as plt\n'), ((6299, 6346), 'pylab.hist', 'plt.hist', (['TB.mag_g'], {'color': '"""r"""', 'label': 'Btxt'}), "(TB.mag_g, color='r', label=Btxt, **hb)\n", (6307, 6346), True, 'import pylab as plt\n'), ((6347, 6383), 'pylab.hist', 'plt.hist', (['TB.mag_g'], {'color': '"""r"""'}), "(TB.mag_g, color='r', **hb2)\n", (6355, 6383), True, 'import pylab as plt\n'), ((6384, 6400), 'pylab.xlim', 'plt.xlim', (['(18)', '(27)'], {}), '(18, 27)\n', (6392, 6400), True, 'import pylab as plt\n'), ((6400, 6412), 'pylab.legend', 'plt.legend', ([], {}), '()\n', (6410, 6412), True, 'import pylab as plt\n'), ((6413, 6445), 'pylab.xlabel', 'plt.xlabel', (['"""All sources: g mag"""'], {}), "('All sources: g mag')\n", (6423, 6445), True, 'import pylab as plt\n'), ((6597, 6606), 'pylab.clf', 'plt.clf', ([], {}), '()\n', (6604, 6606), True, 'import pylab as plt\n'), ((6607, 6663), 'pylab.hist', 'plt.hist', (['TA.psfdepth_mag_g'], {'color': '"""b"""', 'label': 'Atxt'}), "(TA.psfdepth_mag_g, color='b', label=Atxt, **ha)\n", (6615, 6663), True, 'import pylab as plt\n'), ((6664, 6720), 'pylab.hist', 'plt.hist', (['TB.psfdepth_mag_g'], {'color': '"""r"""', 'label': 'Btxt'}), "(TB.psfdepth_mag_g, color='r', label=Btxt, **hb)\n", (6672, 6720), True, 'import pylab as plt\n'), ((6721, 6737), 'pylab.xlim', 'plt.xlim', (['(23)', '(25)'], {}), '(23, 25)\n', (6729, 6737), True, 'import pylab as plt\n'), ((6737, 6749), 'pylab.legend', 'plt.legend', ([], {}), '()\n', (6747, 6749), True, 'import pylab as plt\n'), ((6750, 6792), 'pylab.title', 'plt.title', (['"""PSF depth for sources (g mag)"""'], {}), "('PSF depth for sources (g mag)')\n", (6759, 6792), True, 'import pylab as plt\n'), ((6807, 6816), 'pylab.clf', 'plt.clf', ([], {}), '()\n', (6814, 6816), True, 'import pylab as plt\n'), ((6817, 6873), 'pylab.hist', 'plt.hist', (['TA.galdepth_mag_g'], {'color': '"""b"""', 'label': 'Atxt'}), "(TA.galdepth_mag_g, color='b', label=Atxt, **ha)\n", (6825, 6873), True, 'import pylab as plt\n'), ((6874, 6930), 'pylab.hist', 'plt.hist', (['TB.galdepth_mag_g'], {'color': '"""r"""', 'label': 'Btxt'}), "(TB.galdepth_mag_g, color='r', label=Btxt, **hb)\n", (6882, 6930), True, 'import pylab as plt\n'), ((6931, 6947), 'pylab.xlim', 'plt.xlim', (['(23)', '(25)'], {}), '(23, 25)\n', (6939, 6947), True, 'import pylab as plt\n'), ((6947, 6959), 'pylab.legend', 'plt.legend', ([], {}), '()\n', (6957, 6959), True, 'import pylab as plt\n'), ((6960, 7002), 'pylab.title', 'plt.title', (['"""Gal depth for sources (g mag)"""'], {}), "('Gal depth for sources (g mag)')\n", (6969, 7002), True, 'import pylab as plt\n'), ((7142, 7151), 'pylab.clf', 'plt.clf', ([], {}), '()\n', (7149, 7151), True, 'import pylab as plt\n'), ((7152, 7172), 'pylab.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (7163, 7172), True, 'import pylab as plt\n'), ((7206, 7221), 'pylab.title', 'plt.title', (['Atxt'], {}), '(Atxt)\n', (7215, 7221), True, 'import pylab as plt\n'), ((7222, 7242), 'pylab.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (7233, 7242), True, 'import pylab as plt\n'), ((7276, 7291), 'pylab.title', 'plt.title', (['Btxt'], {}), '(Btxt)\n', (7285, 7291), True, 'import pylab as plt\n'), ((7292, 7319), 'pylab.suptitle', 'plt.suptitle', (['"""All sources"""'], {}), "('All sources')\n", (7304, 7319), True, 'import pylab as plt\n'), ((7636, 7645), 'pylab.clf', 'plt.clf', ([], {}), '()\n', (7643, 7645), True, 'import pylab as plt\n'), ((7646, 7666), 'pylab.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (7657, 7666), True, 'import pylab as plt\n'), ((7724, 7739), 'pylab.title', 'plt.title', (['Atxt'], {}), '(Atxt)\n', (7733, 7739), True, 'import pylab as plt\n'), ((7740, 7760), 'pylab.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (7751, 7760), True, 'import pylab as plt\n'), ((7818, 7833), 'pylab.title', 'plt.title', (['Btxt'], {}), '(Btxt)\n', (7827, 7833), True, 'import pylab as plt\n'), ((7834, 7868), 'pylab.suptitle', 'plt.suptitle', (['"""Un-matched sources"""'], {}), "('Un-matched sources')\n", (7846, 7868), True, 'import pylab as plt\n'), ((1287, 1318), 'numpy.zeros', 'np.zeros', (['wcs.shape', 'np.float32'], {}), '(wcs.shape, np.float32)\n', (1295, 1318), True, 'import numpy as np\n'), ((1335, 1366), 'numpy.zeros', 'np.zeros', (['wcs.shape', 'np.float32'], {}), '(wcs.shape, np.float32)\n', (1343, 1366), True, 'import numpy as np\n'), ((1381, 1443), 'glob.glob', 'glob', (["(baseA + 'coadd/*/*/legacysurvey-*-depth-%s.fits*' % band)"], {}), "(baseA + 'coadd/*/*/legacysurvey-*-depth-%s.fits*' % band)\n", (1385, 1443), False, 'from glob import glob\n'), ((1750, 1812), 'glob.glob', 'glob', (["(baseB + 'coadd/*/*/legacysurvey-*-depth-%s.fits*' % band)"], {}), "(baseB + 'coadd/*/*/legacysurvey-*-depth-%s.fits*' % band)\n", (1754, 1812), False, 'from glob import glob\n'), ((2122, 2153), 'numpy.zeros', 'np.zeros', (['wcs.shape', 'np.float32'], {}), '(wcs.shape, np.float32)\n', (2130, 2153), True, 'import numpy as np\n'), ((2170, 2201), 'numpy.zeros', 'np.zeros', (['wcs.shape', 'np.float32'], {}), '(wcs.shape, np.float32)\n', (2178, 2201), True, 'import numpy as np\n'), ((2216, 2281), 'glob.glob', 'glob', (["(baseA + 'coadd/*/*/legacysurvey-*-galdepth-%s.fits*' % band)"], {}), "(baseA + 'coadd/*/*/legacysurvey-*-galdepth-%s.fits*' % band)\n", (2220, 2281), False, 'from glob import glob\n'), ((2588, 2653), 'glob.glob', 'glob', (["(baseB + 'coadd/*/*/legacysurvey-*-galdepth-%s.fits*' % band)"], {}), "(baseB + 'coadd/*/*/legacysurvey-*-galdepth-%s.fits*' % band)\n", (2592, 2653), False, 'from glob import glob\n'), ((3681, 3690), 'pylab.clf', 'plt.clf', ([], {}), '()\n', (3688, 3690), True, 'import pylab as plt\n'), ((3695, 3715), 'pylab.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (3706, 3715), True, 'import pylab as plt\n'), ((3718, 3746), 'pylab.imshow', 'plt.imshow', (['psfdepthA'], {}), '(psfdepthA, **ima)\n', (3728, 3746), True, 'import pylab as plt\n'), ((3751, 3766), 'pylab.title', 'plt.title', (['Atxt'], {}), '(Atxt)\n', (3760, 3766), True, 'import pylab as plt\n'), ((3771, 3791), 'pylab.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (3782, 3791), True, 'import pylab as plt\n'), ((3794, 3822), 'pylab.imshow', 'plt.imshow', (['psfdepthB'], {}), '(psfdepthB, **ima)\n', (3804, 3822), True, 'import pylab as plt\n'), ((3827, 3842), 'pylab.title', 'plt.title', (['Btxt'], {}), '(Btxt)\n', (3836, 3842), True, 'import pylab as plt\n'), ((3847, 3889), 'pylab.suptitle', 'plt.suptitle', (["('PSF Depth maps (%s)' % band)"], {}), "('PSF Depth maps (%s)' % band)\n", (3859, 3889), True, 'import pylab as plt\n'), ((3912, 3921), 'pylab.clf', 'plt.clf', ([], {}), '()\n', (3919, 3921), True, 'import pylab as plt\n'), ((3926, 3946), 'pylab.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (3937, 3946), True, 'import pylab as plt\n'), ((3949, 3977), 'pylab.imshow', 'plt.imshow', (['galdepthA'], {}), '(galdepthA, **ima)\n', (3959, 3977), True, 'import pylab as plt\n'), ((3982, 3997), 'pylab.title', 'plt.title', (['Atxt'], {}), '(Atxt)\n', (3991, 3997), True, 'import pylab as plt\n'), ((4002, 4022), 'pylab.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (4013, 4022), True, 'import pylab as plt\n'), ((4025, 4053), 'pylab.imshow', 'plt.imshow', (['galdepthB'], {}), '(galdepthB, **ima)\n', (4035, 4053), True, 'import pylab as plt\n'), ((4058, 4073), 'pylab.title', 'plt.title', (['Btxt'], {}), '(Btxt)\n', (4067, 4073), True, 'import pylab as plt\n'), ((4078, 4123), 'pylab.suptitle', 'plt.suptitle', (["('Galaxy Depth maps (%s)' % band)"], {}), "('Galaxy Depth maps (%s)' % band)\n", (4090, 4123), True, 'import pylab as plt\n'), ((5594, 5613), 'numpy.log10', 'np.log10', (['TA.flux_g'], {}), '(TA.flux_g)\n', (5602, 5613), True, 'import numpy as np\n'), ((5638, 5657), 'numpy.log10', 'np.log10', (['TB.flux_g'], {}), '(TB.flux_g)\n', (5646, 5657), True, 'import numpy as np\n'), ((441, 481), 'glob.glob', 'glob', (["(baseA + 'tractor/*/tractor-*.fits')"], {}), "(baseA + 'tractor/*/tractor-*.fits')\n", (445, 481), False, 'from glob import glob\n'), ((630, 670), 'glob.glob', 'glob', (["(baseB + 'tractor/*/tractor-*.fits')"], {}), "(baseB + 'tractor/*/tractor-*.fits')\n", (634, 670), False, 'from glob import glob\n'), ((1070, 1084), 'numpy.deg2rad', 'np.deg2rad', (['dc'], {}), '(dc)\n', (1080, 1084), True, 'import numpy as np\n'), ((4385, 4397), 'numpy.round', 'np.round', (['xx'], {}), '(xx)\n', (4393, 4397), True, 'import numpy as np\n'), ((4439, 4451), 'numpy.round', 'np.round', (['yy'], {}), '(yy)\n', (4447, 4451), True, 'import numpy as np\n'), ((4697, 4709), 'numpy.round', 'np.round', (['xx'], {}), '(xx)\n', (4705, 4709), True, 'import numpy as np\n'), ((4751, 4763), 'numpy.round', 'np.round', (['yy'], {}), '(yy)\n', (4759, 4763), True, 'import numpy as np\n'), ((5704, 5726), 'numpy.sqrt', 'np.sqrt', (['TA.psfdepth_g'], {}), '(TA.psfdepth_g)\n', (5711, 5726), True, 'import numpy as np\n'), ((5773, 5795), 'numpy.sqrt', 'np.sqrt', (['TB.psfdepth_g'], {}), '(TB.psfdepth_g)\n', (5780, 5795), True, 'import numpy as np\n'), ((5842, 5864), 'numpy.sqrt', 'np.sqrt', (['TA.galdepth_g'], {}), '(TA.galdepth_g)\n', (5849, 5864), True, 'import numpy as np\n'), ((5911, 5933), 'numpy.sqrt', 'np.sqrt', (['TB.galdepth_g'], {}), '(TB.galdepth_g)\n', (5918, 5933), True, 'import numpy as np\n'), ((3111, 3129), 'numpy.sqrt', 'np.sqrt', (['psfdepthA'], {}), '(psfdepthA)\n', (3118, 3129), True, 'import numpy as np\n'), ((3172, 3190), 'numpy.sqrt', 'np.sqrt', (['psfdepthB'], {}), '(psfdepthB)\n', (3179, 3190), True, 'import numpy as np\n'), ((3347, 3365), 'numpy.sqrt', 'np.sqrt', (['galdepthA'], {}), '(galdepthA)\n', (3354, 3365), True, 'import numpy as np\n'), ((3408, 3426), 'numpy.sqrt', 'np.sqrt', (['galdepthB'], {}), '(galdepthB)\n', (3415, 3426), True, 'import numpy as np\n')] |
import yaml
class ModelYaml():
FileName = "model.yaml"
def __init__(
self,
yamlText: str
):
o = yaml.load(
yamlText,
Loader=yaml.SafeLoader
)
ModelYaml._shouldNotEmpty(o, [
"version",
"kind",
"name"
])
self.version = o.get("version")
self.kind = o.get("kind")
self.name = o.get("name")
def toYaml(self) -> str:
return yaml.dump(self)
@classmethod
def default(cls, name: str):
yaml = f"""
version: v1
kind: luna-ml/model
name: {name}
"""
return ModelYaml(yaml)
@classmethod
def _shouldNotEmpty(self, o, labels, path = ""):
for l in labels:
if o.get(l) == None or o.get(l) == "":
raise ValueError("'{}{}' is missing in {}".format(path, l, ModelYaml.FileName))
| [
"yaml.load",
"yaml.dump"
] | [((143, 186), 'yaml.load', 'yaml.load', (['yamlText'], {'Loader': 'yaml.SafeLoader'}), '(yamlText, Loader=yaml.SafeLoader)\n', (152, 186), False, 'import yaml\n'), ((488, 503), 'yaml.dump', 'yaml.dump', (['self'], {}), '(self)\n', (497, 503), False, 'import yaml\n')] |
import time
import pytest
@pytest.mark.parametrize("index", range(7))
def test_cat(index):
"""Perform several tests with varying execution times."""
time.sleep(0.2 + (index * 0.1))
assert True
| [
"time.sleep"
] | [((158, 187), 'time.sleep', 'time.sleep', (['(0.2 + index * 0.1)'], {}), '(0.2 + index * 0.1)\n', (168, 187), False, 'import time\n')] |
import gym
import numpy as np
import matplotlib.pyplot as plt
def policy(state, theta):
""" TODO: return probabilities for actions under softmax action selection """
h = state @ theta
return np.exp(h)/np.sum(np.exp(h))
def generate_episode(env, theta, display=False):
""" enerates one episode and returns the list of states, the list of rewards and the list of actions of that episode """
state = env.reset()
states = [state]
actions = []
rewards = []
for t in range(500):
if display:
env.render()
p = policy(state, theta)
action = np.random.choice(len(p), p=p)
state, reward, done, info = env.step(action)
rewards.append(reward)
actions.append(action)
if done:
break
states.append(state)
return states, rewards, actions
def REINFORCE(env, gamma=0.99, alpha=0.05):
theta = np.random.rand(4, 2) # policy parameters
ep_len_list = []
mean_ep_len = []
for e in range(1000):
if e % 300 == 0:
states, rewards, actions = generate_episode(env, theta, False) # display the policy every 300 episodes
else:
states, rewards, actions = generate_episode(env, theta, False)
# TODO: keep track of previous 100 episode lengths and compute mean
if len(ep_len_list) >= 100:
ep_len_list.pop(0) #remove last item
ep_len_list.append(len(states))
mean = sum(ep_len_list) / len(ep_len_list)
mean_ep_len.append(mean)
print("episode:\t" + str(e) + " length:\t" + str(len(states)) + " mean len:\t" + str(mean))
# TODO: implement the reinforce algorithm to improve the policy weights
nr_steps = len(states)
G = np.zeros([nr_steps])
for t in range(nr_steps):
for k in range(t+1,nr_steps+1):
G[t] += (gamma**(k-t-1)) * rewards[k-1]
action = actions[t]
theta[:,action] = theta[:,action] + alpha * (gamma**t) * G[t] * (states[t] * (1 - policy(states[t], theta)[action]))
return mean_ep_len
def main():
env = gym.make('CartPole-v1')
mean_ep_len = REINFORCE(env)
plt.plot(mean_ep_len)
plt.title("Mean Ep length over time")
plt.xlabel("Episodes")
plt.ylabel("Mean Episode Length")
plt.legend()
plt.savefig('ex09' + '.png')
plt.show()
env.close()
if __name__ == "__main__":
main()
| [
"matplotlib.pyplot.savefig",
"numpy.random.rand",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.exp",
"numpy.zeros",
"matplotlib.pyplot.title",
"gym.make",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((913, 933), 'numpy.random.rand', 'np.random.rand', (['(4)', '(2)'], {}), '(4, 2)\n', (927, 933), True, 'import numpy as np\n'), ((2139, 2162), 'gym.make', 'gym.make', (['"""CartPole-v1"""'], {}), "('CartPole-v1')\n", (2147, 2162), False, 'import gym\n'), ((2200, 2221), 'matplotlib.pyplot.plot', 'plt.plot', (['mean_ep_len'], {}), '(mean_ep_len)\n', (2208, 2221), True, 'import matplotlib.pyplot as plt\n'), ((2226, 2263), 'matplotlib.pyplot.title', 'plt.title', (['"""Mean Ep length over time"""'], {}), "('Mean Ep length over time')\n", (2235, 2263), True, 'import matplotlib.pyplot as plt\n'), ((2268, 2290), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Episodes"""'], {}), "('Episodes')\n", (2278, 2290), True, 'import matplotlib.pyplot as plt\n'), ((2295, 2328), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Mean Episode Length"""'], {}), "('Mean Episode Length')\n", (2305, 2328), True, 'import matplotlib.pyplot as plt\n'), ((2333, 2345), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2343, 2345), True, 'import matplotlib.pyplot as plt\n'), ((2350, 2378), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('ex09' + '.png')"], {}), "('ex09' + '.png')\n", (2361, 2378), True, 'import matplotlib.pyplot as plt\n'), ((2383, 2393), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2391, 2393), True, 'import matplotlib.pyplot as plt\n'), ((205, 214), 'numpy.exp', 'np.exp', (['h'], {}), '(h)\n', (211, 214), True, 'import numpy as np\n'), ((1774, 1794), 'numpy.zeros', 'np.zeros', (['[nr_steps]'], {}), '([nr_steps])\n', (1782, 1794), True, 'import numpy as np\n'), ((222, 231), 'numpy.exp', 'np.exp', (['h'], {}), '(h)\n', (228, 231), True, 'import numpy as np\n')] |
import unittest
from ghostwriter import app, mm
#
# Post basic test fixture(?)
# Copyright (C) 2017 <NAME>
#
class PostArticleTestCase(unittest.TestCase):
from flask import json
def setUp(self):
mm.setDatabaseURI('sqlite:////tmp/unittest.db')
mm.init()
mm.create()
self.app = app.test_client()
self.username = ""
self.password = ""
self.create_user()
def tearDown(self):
mm.drop()
def create_user(self):
from ghostwriter.User import User
from ghostwriter.UserManager import UserManager
self.username = 'malakoi'
self.password = '<PASSWORD>'
u = User(self.username)
umng = UserManager()
umng.addUser(u, self.password)
def authenticate(self):
res = self.app.post('/admin/login',
data = {
'username': self.username,
'password': self.password
}, follow_redirects=True)
self.assertEqual(res.status, "200 OK")
def deauthenticate(self):
res = self.app.get('/admin/logoff', follow_redirects=True)
self.assertEqual(res.status, "200 OK")
def test_create_blog_post_unauthenticated(self):
res = self.app.post('/api/post/create/',
data = {
'title': "This won't work"
}, follow_redirects=True)
self.assertEqual(res.status, "401 UNAUTHORIZED")
def test_create_blog_post_authenticated(self):
self.authenticate()
res = self.app.post('/api/post/create/',
data = {
'title': "This will work"
}, follow_redirects=True)
self.assertEqual(res.status, "200 OK")
self.deauthenticate()
def test_create_and_read_blog_post(self):
from flask import json
self.authenticate()
res = self.app.post('/api/post/create/',
data = {
'title': "This will maybe work"
}, follow_redirects=True)
self.assertEqual(res.status, "200 OK")
create_post_data = json.loads(res.data)
res = self.app.get('/api/post/'+str(create_post_data['id'])+'/',
follow_redirects=True)
get_post_data = json.loads(res.data)
self.assertEqual(get_post_data['id'], create_post_data['id'])
self.assertEqual(get_post_data['title'], create_post_data['title'])
self.assertEqual(get_post_data['creation_date'], create_post_data['creation_date'])
self.assertEqual(get_post_data['summary'], create_post_data['summary'])
self.assertEqual(1, get_post_data['owner']['id'])
self.assertEqual(self.username, get_post_data['owner']['name'])
self.deauthenticate()
def test_get_content(self):
self.authenticate()
from ghostwriter.Post import Post, PostManager
from flask import json
p = Post(1, 'Get Content Test')
p.setContent('Post content')
pm = PostManager()
pm.addPost(p)
res = self.app.get('/api/post/'+str(p.ID)+'/content',
follow_redirects=True)
self.assertEqual(res.status, '200 OK')
post_data = res.data
self.assertEqual(b'Post content', post_data)
self.deauthenticate()
def test_set_and_get_content(self):
self.authenticate()
from ghostwriter.Post import Post, PostManager
from flask import json
p = Post(1, 'Get Content Test')
p.setContent('Post content')
pm = PostManager()
pm.addPost(p)
res = self.app.put('/api/post/'+str(p.ID)+'/content',
data = {
'content': 'New Post content'
},
follow_redirects=True)
self.assertEqual(res.status, '200 OK')
res = self.app.get('/api/post/'+str(p.ID)+'/content',
follow_redirects=True)
self.assertEqual(res.status, '200 OK')
post_data = res.data
self.assertEqual(b'New Post content', post_data)
self.deauthenticate()
def test_set_and_get_metadata(self):
self.authenticate()
from ghostwriter.Post import Post, PostManager
from flask import json
p = Post(1, 'Get Meta Test')
p.setContent('Post content')
pm = PostManager()
pm.addPost(p)
res = self.app.put('/api/post/'+str(p.ID)+'/',
data = {
'title': 'New Meta Test'
},
follow_redirects=True)
self.assertEqual(res.status, '200 OK')
res = self.app.get('/api/post/'+str(p.ID)+'/',
follow_redirects=True)
self.assertEqual(res.status, '200 OK')
post_data = json.loads(res.data)
self.assertEqual('New Meta Test', post_data['title'])
self.deauthenticate()
def test_delete_blog_post(self):
self.authenticate()
from ghostwriter.Post import Post, PostManager
from flask import json
p = Post(1, 'Get Content Test')
p.setContent('Post content')
pm = PostManager()
pm.addPost(p)
res = self.app.delete('/api/post/'+str(p.ID)+'/',
follow_redirects=True)
self.assertEqual(res.status, '200 OK')
res = self.app.delete('/api/post/'+str(p.ID)+'/',
follow_redirects=True)
self.assertEqual(res.status, '404 NOT FOUND')
#
# Post composition
class PostComposeTestCase(unittest.TestCase):
from flask import json
def setUp(self):
mm.setDatabaseURI('sqlite:////tmp/unittest.db')
mm.init()
mm.create()
self.app = app.test_client()
self.user = self.create_user('test', 'test')
def tearDown(self):
mm.drop()
def create_user(self, username, password):
from ghostwriter.User import User
from ghostwriter.UserManager import UserManager
u = User(username)
umng = UserManager()
umng.addUser(u, password)
return u
def create_post(self, title, body, author, cdate=None):
from ghostwriter.Post import Post, PostManager
po = Post(author.uid, title, cdate)
po.setContent(body)
return po
def testIfSummaryCorrect(self):
from ghostwriter.Post import Post
p = self.create_post("New Post",
""" This is a big summary
Note that we will have a lot of lines, but it finish here.
Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet """, self.user)
cdata = p.getSummary()
self.assertEqual('.', cdata[-1])
self.assertNotEqual('...', cdata[-3:])
#
# Post search tests
class PostSearchTestCase(unittest.TestCase):
from flask import json
def setUp(self):
mm.setDatabaseURI('sqlite:////tmp/unittest.db')
mm.init()
mm.create()
self.app = app.test_client()
self.user = self.create_user('test', 'test')
def tearDown(self):
mm.drop()
def create_user(self, username, password):
from ghostwriter.User import User
from ghostwriter.UserManager import UserManager
u = User(username)
umng = UserManager()
umng.addUser(u, password)
return u
def create_post(self, title, body, author, cdate=None):
from ghostwriter.Post import Post, PostManager
po = Post(author.uid, title, cdate)
po.setContent(body)
pm = PostManager()
pm.addPost(po)
def test_searchbyTitle(self):
import json
self.create_post("Search One", "Post Search One", self.user)
self.create_post("Normal One", "Post Normal One", self.user)
self.create_post("Search Two", "Post Search Two", self.user)
self.create_post("Normal Two", "Post Normal Two", self.user)
self.create_post("Search THree", "Post Search Three", self.user)
self.create_post("Normal Three", "Post Normal Three", self.user)
self.create_post("What is this", "Post different", self.user)
res = self.app.get('/api/post/search',
query_string = {
'title': 'Search'
}, follow_redirects=True)
self.assertEqual(res.status, '200 OK')
post_data = json.loads(res.data)
self.assertEqual(3, len(post_data))
def test_searchAllNoneFound(self):
import json
other = self.create_user('other', 'other')
res = self.app.get('/api/posts', follow_redirects=True)
self.assertEqual(res.status, '404 NOT FOUND')
def test_searchAll(self):
import json
other = self.create_user('other', 'other')
self.create_post("Search One", "Post Search One", self.user)
self.create_post("Normal One", "Post Normal One", self.user)
self.create_post("Search Two", "Post Search Two", other)
self.create_post("Normal Two", "Post Normal Two", other)
self.create_post("Search THree", "Post Search Three", other)
self.create_post("Normal Three", "Post Normal Three", other)
self.create_post("What is this", "Post different", self.user)
res = self.app.get('/api/posts', follow_redirects=True)
self.assertEqual(res.status, '200 OK')
post_data = json.loads(res.data)
self.assertEqual(7, len(post_data))
def test_searchbyAuthor(self):
import json
other = self.create_user('other', 'other')
self.create_post("Search One", "Post Search One", self.user)
self.create_post("Normal One", "Post Normal One", self.user)
self.create_post("Search Two", "Post Search Two", other)
self.create_post("Normal Two", "Post Normal Two", other)
self.create_post("Search THree", "Post Search Three", other)
self.create_post("Normal Three", "Post Normal Three", other)
self.create_post("What is this", "Post different", self.user)
res = self.app.get('/api/user/1/posts', follow_redirects=True)
self.assertEqual(res.status, '200 OK')
post_data = json.loads(res.data)
self.assertEqual(3, len(post_data))
def test_searchbyDate(self):
from datetime import datetime
import json
self.create_post("Search One", "Post Search One", self.user,
datetime(2017, 7, 1, 1))
self.create_post("Normal One", "Post Normal One", self.user)
self.create_post("Search Two", "Post Search Two", self.user,
datetime(2017, 7, 1, 2))
self.create_post("Normal Two", "Post Normal Two", self.user)
self.create_post("Search THree", "Post Search Three", self.user,
datetime(2017, 7, 1, 3))
self.create_post("Normal Three", "Post Normal Three", self.user)
self.create_post("What is this", "Post different", self.user,
datetime(2017, 7, 1, 4))
res = self.app.get('/api/post/search',
query_string = {
'cdate': '2017-7-1',
}, follow_redirects=True)
self.assertEqual(res.status, '200 OK')
post_data = json.loads(res.data)
self.assertEqual(4, len(post_data))
def test_searchbyTitleandAuthor(self):
other = self.create_user('other', 'other')
import json
self.create_post("Search One", "Post Search One", self.user)
self.create_post("Normal One", "Post Normal One", self.user)
self.create_post("Search Two", "Post Search Two", other)
self.create_post("Normal Two", "Post Normal Two", other)
self.create_post("Search THree", "Post Search Three", self.user)
self.create_post("Normal Three", "Post Normal Three", other)
self.create_post("What is this", "Post different", self.user)
res = self.app.get('/api/user/1/posts/search',
query_string = {
'title': 'Search',
}, follow_redirects=True)
self.assertEqual(res.status, '200 OK')
post_data = json.loads(res.data)
self.assertEqual(2, len(post_data))
def test_searchbyDateandAuthor(self):
from datetime import datetime
import json
other = self.create_user('other', 'other')
self.create_post("Search One", "Post Search One", other,
datetime(2017, 7, 1, 1))
self.create_post("Normal One", "Post Normal One", other)
self.create_post("Search Two", "Post Search Two", self.user,
datetime(2017, 7, 1, 2))
self.create_post("Normal Two", "Post Normal Two", other)
self.create_post("Search THree", "Post Search Three", self.user,
datetime(2017, 7, 1, 3))
self.create_post("Normal Three", "Post Normal Three", other)
self.create_post("What is this", "Post different", self.user,
datetime(2017, 7, 1, 4))
res = self.app.get('/api/user/1/posts/search',
query_string = {
'cdate': '2017-7-1',
}, follow_redirects=True)
self.assertEqual(res.status, '200 OK')
post_data = json.loads(res.data)
self.assertEqual(3, len(post_data))
| [
"datetime.datetime",
"ghostwriter.mm.drop",
"json.loads",
"ghostwriter.mm.init",
"ghostwriter.app.test_client",
"ghostwriter.mm.setDatabaseURI",
"ghostwriter.Post.Post",
"ghostwriter.Post.PostManager",
"ghostwriter.mm.create",
"ghostwriter.User.User",
"ghostwriter.UserManager.UserManager"
] | [((217, 264), 'ghostwriter.mm.setDatabaseURI', 'mm.setDatabaseURI', (['"""sqlite:////tmp/unittest.db"""'], {}), "('sqlite:////tmp/unittest.db')\n", (234, 264), False, 'from ghostwriter import app, mm\n'), ((273, 282), 'ghostwriter.mm.init', 'mm.init', ([], {}), '()\n', (280, 282), False, 'from ghostwriter import app, mm\n'), ((291, 302), 'ghostwriter.mm.create', 'mm.create', ([], {}), '()\n', (300, 302), False, 'from ghostwriter import app, mm\n'), ((322, 339), 'ghostwriter.app.test_client', 'app.test_client', ([], {}), '()\n', (337, 339), False, 'from ghostwriter import app, mm\n'), ((455, 464), 'ghostwriter.mm.drop', 'mm.drop', ([], {}), '()\n', (462, 464), False, 'from ghostwriter import app, mm\n'), ((674, 693), 'ghostwriter.User.User', 'User', (['self.username'], {}), '(self.username)\n', (678, 693), False, 'from ghostwriter.User import User\n'), ((709, 722), 'ghostwriter.UserManager.UserManager', 'UserManager', ([], {}), '()\n', (720, 722), False, 'from ghostwriter.UserManager import UserManager\n'), ((2137, 2157), 'json.loads', 'json.loads', (['res.data'], {}), '(res.data)\n', (2147, 2157), False, 'import json\n'), ((2295, 2315), 'json.loads', 'json.loads', (['res.data'], {}), '(res.data)\n', (2305, 2315), False, 'import json\n'), ((2964, 2991), 'ghostwriter.Post.Post', 'Post', (['(1)', '"""Get Content Test"""'], {}), "(1, 'Get Content Test')\n", (2968, 2991), False, 'from ghostwriter.Post import Post, PostManager\n'), ((3042, 3055), 'ghostwriter.Post.PostManager', 'PostManager', ([], {}), '()\n', (3053, 3055), False, 'from ghostwriter.Post import Post, PostManager\n'), ((3517, 3544), 'ghostwriter.Post.Post', 'Post', (['(1)', '"""Get Content Test"""'], {}), "(1, 'Get Content Test')\n", (3521, 3544), False, 'from ghostwriter.Post import Post, PostManager\n'), ((3595, 3608), 'ghostwriter.Post.PostManager', 'PostManager', ([], {}), '()\n', (3606, 3608), False, 'from ghostwriter.Post import Post, PostManager\n'), ((4318, 4342), 'ghostwriter.Post.Post', 'Post', (['(1)', '"""Get Meta Test"""'], {}), "(1, 'Get Meta Test')\n", (4322, 4342), False, 'from ghostwriter.Post import Post, PostManager\n'), ((4393, 4406), 'ghostwriter.Post.PostManager', 'PostManager', ([], {}), '()\n', (4404, 4406), False, 'from ghostwriter.Post import Post, PostManager\n'), ((4824, 4844), 'json.loads', 'json.loads', (['res.data'], {}), '(res.data)\n', (4834, 4844), False, 'import json\n'), ((5110, 5137), 'ghostwriter.Post.Post', 'Post', (['(1)', '"""Get Content Test"""'], {}), "(1, 'Get Content Test')\n", (5114, 5137), False, 'from ghostwriter.Post import Post, PostManager\n'), ((5188, 5201), 'ghostwriter.Post.PostManager', 'PostManager', ([], {}), '()\n', (5199, 5201), False, 'from ghostwriter.Post import Post, PostManager\n'), ((5657, 5704), 'ghostwriter.mm.setDatabaseURI', 'mm.setDatabaseURI', (['"""sqlite:////tmp/unittest.db"""'], {}), "('sqlite:////tmp/unittest.db')\n", (5674, 5704), False, 'from ghostwriter import app, mm\n'), ((5713, 5722), 'ghostwriter.mm.init', 'mm.init', ([], {}), '()\n', (5720, 5722), False, 'from ghostwriter import app, mm\n'), ((5731, 5742), 'ghostwriter.mm.create', 'mm.create', ([], {}), '()\n', (5740, 5742), False, 'from ghostwriter import app, mm\n'), ((5762, 5779), 'ghostwriter.app.test_client', 'app.test_client', ([], {}), '()\n', (5777, 5779), False, 'from ghostwriter import app, mm\n'), ((5866, 5875), 'ghostwriter.mm.drop', 'mm.drop', ([], {}), '()\n', (5873, 5875), False, 'from ghostwriter import app, mm\n'), ((6034, 6048), 'ghostwriter.User.User', 'User', (['username'], {}), '(username)\n', (6038, 6048), False, 'from ghostwriter.User import User\n'), ((6064, 6077), 'ghostwriter.UserManager.UserManager', 'UserManager', ([], {}), '()\n', (6075, 6077), False, 'from ghostwriter.UserManager import UserManager\n'), ((6274, 6304), 'ghostwriter.Post.Post', 'Post', (['author.uid', 'title', 'cdate'], {}), '(author.uid, title, cdate)\n', (6278, 6304), False, 'from ghostwriter.Post import Post, PostManager\n'), ((7128, 7175), 'ghostwriter.mm.setDatabaseURI', 'mm.setDatabaseURI', (['"""sqlite:////tmp/unittest.db"""'], {}), "('sqlite:////tmp/unittest.db')\n", (7145, 7175), False, 'from ghostwriter import app, mm\n'), ((7184, 7193), 'ghostwriter.mm.init', 'mm.init', ([], {}), '()\n', (7191, 7193), False, 'from ghostwriter import app, mm\n'), ((7202, 7213), 'ghostwriter.mm.create', 'mm.create', ([], {}), '()\n', (7211, 7213), False, 'from ghostwriter import app, mm\n'), ((7233, 7250), 'ghostwriter.app.test_client', 'app.test_client', ([], {}), '()\n', (7248, 7250), False, 'from ghostwriter import app, mm\n'), ((7337, 7346), 'ghostwriter.mm.drop', 'mm.drop', ([], {}), '()\n', (7344, 7346), False, 'from ghostwriter import app, mm\n'), ((7505, 7519), 'ghostwriter.User.User', 'User', (['username'], {}), '(username)\n', (7509, 7519), False, 'from ghostwriter.User import User\n'), ((7535, 7548), 'ghostwriter.UserManager.UserManager', 'UserManager', ([], {}), '()\n', (7546, 7548), False, 'from ghostwriter.UserManager import UserManager\n'), ((7745, 7775), 'ghostwriter.Post.Post', 'Post', (['author.uid', 'title', 'cdate'], {}), '(author.uid, title, cdate)\n', (7749, 7775), False, 'from ghostwriter.Post import Post, PostManager\n'), ((7818, 7831), 'ghostwriter.Post.PostManager', 'PostManager', ([], {}), '()\n', (7829, 7831), False, 'from ghostwriter.Post import Post, PostManager\n'), ((8636, 8656), 'json.loads', 'json.loads', (['res.data'], {}), '(res.data)\n', (8646, 8656), False, 'import json\n'), ((9647, 9667), 'json.loads', 'json.loads', (['res.data'], {}), '(res.data)\n', (9657, 9667), False, 'import json\n'), ((10436, 10456), 'json.loads', 'json.loads', (['res.data'], {}), '(res.data)\n', (10446, 10456), False, 'import json\n'), ((11484, 11504), 'json.loads', 'json.loads', (['res.data'], {}), '(res.data)\n', (11494, 11504), False, 'import json\n'), ((12384, 12404), 'json.loads', 'json.loads', (['res.data'], {}), '(res.data)\n', (12394, 12404), False, 'import json\n'), ((13484, 13504), 'json.loads', 'json.loads', (['res.data'], {}), '(res.data)\n', (13494, 13504), False, 'import json\n'), ((10679, 10702), 'datetime.datetime', 'datetime', (['(2017)', '(7)', '(1)', '(1)'], {}), '(2017, 7, 1, 1)\n', (10687, 10702), False, 'from datetime import datetime\n'), ((10858, 10881), 'datetime.datetime', 'datetime', (['(2017)', '(7)', '(1)', '(2)'], {}), '(2017, 7, 1, 2)\n', (10866, 10881), False, 'from datetime import datetime\n'), ((11041, 11064), 'datetime.datetime', 'datetime', (['(2017)', '(7)', '(1)', '(3)'], {}), '(2017, 7, 1, 3)\n', (11049, 11064), False, 'from datetime import datetime\n'), ((11225, 11248), 'datetime.datetime', 'datetime', (['(2017)', '(7)', '(1)', '(4)'], {}), '(2017, 7, 1, 4)\n', (11233, 11248), False, 'from datetime import datetime\n'), ((12683, 12706), 'datetime.datetime', 'datetime', (['(2017)', '(7)', '(1)', '(1)'], {}), '(2017, 7, 1, 1)\n', (12691, 12706), False, 'from datetime import datetime\n'), ((12858, 12881), 'datetime.datetime', 'datetime', (['(2017)', '(7)', '(1)', '(2)'], {}), '(2017, 7, 1, 2)\n', (12866, 12881), False, 'from datetime import datetime\n'), ((13037, 13060), 'datetime.datetime', 'datetime', (['(2017)', '(7)', '(1)', '(3)'], {}), '(2017, 7, 1, 3)\n', (13045, 13060), False, 'from datetime import datetime\n'), ((13217, 13240), 'datetime.datetime', 'datetime', (['(2017)', '(7)', '(1)', '(4)'], {}), '(2017, 7, 1, 4)\n', (13225, 13240), False, 'from datetime import datetime\n')] |
import os
import path
def get_location(text):
lines = text.split("\n")
res = []
for line in lines:
if not line.startswith("~~ location"):
break
_, _, key, path = line.split()
res.append((key, path))
return res
def render(text, key):
lines = text.split("\n")
res = []
in_flag = True
for line in lines:
if not line.startswith("~~"):
if in_flag:
res.append(line)
continue
args = line.split()
if args[1] == "location":
continue
elif args[1] == "contentstart":
if args[2] != key:
in_flag = False
elif args[1] == "contentend":
if args[2] != key:
in_flag = True
elif args[1] == "include":
res.extend(render(open(args[2], 'r', encoding='utf-8').read(), key))
else:
raise RuntimeError("Unknown tags %s" % args[1])
return res
if __name__ == "__main__":
for filename in os.listdir("./"):
if not filename.endswith(".md") and not filename.endswith(".rst"):
continue
print(filename)
file = open(filename, "r", encoding='utf-8').read()
locations = get_location(file)
for key, path in locations:
text = render(file, key)
open(path, 'w', encoding='utf-8').write("\n".join(text) + "\n")
print("render %s with %s" % (path, key))
| [
"os.listdir"
] | [((1029, 1045), 'os.listdir', 'os.listdir', (['"""./"""'], {}), "('./')\n", (1039, 1045), False, 'import os\n')] |
import argparse
import os
import pika
from decouple import config
import importlib
simple_queue_read = importlib.import_module('simple_queue_read')
simple_queue_publish = importlib.import_module('simple_queue_publish')
URL = config('URL')
url = os.environ.get('CLOUDAMQP_URL', URL)
params = pika.URLParameters(url)
params.socket_timeout = 5
connection = pika.BlockingConnection(params)
channel = connection.channel() # start a channel
channel.queue_declare(queue='hello') # Declare a queue
parser = argparse.ArgumentParser(description='How to')
parser.add_argument('-read', action='store_true')
flags = parser.parse_args()
if flags.read:
simple_queue_read.read_queue(channel)
else:
simple_queue_publish.publish_queue(channel)
connection.close()
| [
"importlib.import_module",
"argparse.ArgumentParser",
"pika.URLParameters",
"decouple.config",
"os.environ.get",
"pika.BlockingConnection"
] | [((104, 148), 'importlib.import_module', 'importlib.import_module', (['"""simple_queue_read"""'], {}), "('simple_queue_read')\n", (127, 148), False, 'import importlib\n'), ((172, 219), 'importlib.import_module', 'importlib.import_module', (['"""simple_queue_publish"""'], {}), "('simple_queue_publish')\n", (195, 219), False, 'import importlib\n'), ((227, 240), 'decouple.config', 'config', (['"""URL"""'], {}), "('URL')\n", (233, 240), False, 'from decouple import config\n'), ((248, 284), 'os.environ.get', 'os.environ.get', (['"""CLOUDAMQP_URL"""', 'URL'], {}), "('CLOUDAMQP_URL', URL)\n", (262, 284), False, 'import os\n'), ((294, 317), 'pika.URLParameters', 'pika.URLParameters', (['url'], {}), '(url)\n', (312, 317), False, 'import pika\n'), ((357, 388), 'pika.BlockingConnection', 'pika.BlockingConnection', (['params'], {}), '(params)\n', (380, 388), False, 'import pika\n'), ((505, 550), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""How to"""'}), "(description='How to')\n", (528, 550), False, 'import argparse\n')] |
from __future__ import annotations
from .__version__ import __version__ # noqa
from .lib import export
from typing import Type, TypeVar, List, Dict
import praw # type: ignore
import requests
__all__ = [] # type: List
__header__ = 'plex_posters'
# __section__ = 'module'
T_movie_poster_porn_scraper = TypeVar(
'T_movie_poster_porn_scraper', bound="movie_poster_porn_scraper"
)
@export
class movie_poster_porn_scraper(object):
"""Poster scraper
Attributes
----------
reddit_instance : praw.Reddit
A praw instance connected to Reddit
"""
def __init__(self, instance: praw.Reddit) -> None:
"""
Parameters
----------
instance : praw.Reddit
A praw instance connected to Reddit
"""
super().__init__()
self.reddit_instance = instance
@classmethod
def create_instance(
cls: Type[T_movie_poster_porn_scraper],
client_id: str,
client_secret: str,
user_agent: str,
) -> T_movie_poster_porn_scraper:
"""`classmethod` to connect to reddit using the api.
Parameters
----------
client_id : str
a valid client id
client_secret : str
the secret key for the client
user_agent : str
a user agent
"""
reddit_instance = praw.Reddit(
client_id=client_id,
client_secret=client_secret,
user_agent=user_agent,
)
return cls(reddit_instance)
def get_hot_posters(
self,
) -> T_movie_poster_porn_scraper:
"""
"""
self._poster_urls: Dict = {}
for post in self.reddit_instance.subreddit('MoviePosterPorn').hot(
limit=10
):
print(post.title)
print(post.url)
# print(dir(post))
# self._poster_urls.append(post.url)
self._poster_urls[post.title] = post.url
print(self._poster_urls)
return self
def get_posters(self):
"""download the posters
Returns
-------
self
"""
for title, url in self._poster_urls.items():
r = requests.get(url)
with open('posters/' + title + '.jpg', 'wb') as p:
p.write(r.content)
return self
| [
"praw.Reddit",
"requests.get",
"typing.TypeVar"
] | [((306, 379), 'typing.TypeVar', 'TypeVar', (['"""T_movie_poster_porn_scraper"""'], {'bound': '"""movie_poster_porn_scraper"""'}), "('T_movie_poster_porn_scraper', bound='movie_poster_porn_scraper')\n", (313, 379), False, 'from typing import Type, TypeVar, List, Dict\n'), ((1359, 1448), 'praw.Reddit', 'praw.Reddit', ([], {'client_id': 'client_id', 'client_secret': 'client_secret', 'user_agent': 'user_agent'}), '(client_id=client_id, client_secret=client_secret, user_agent=\n user_agent)\n', (1370, 1448), False, 'import praw\n'), ((2205, 2222), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (2217, 2222), False, 'import requests\n')] |
#!/usr/bin/env python
from __future__ import division, unicode_literals
import argparse
from onmt.translate.Translator import make_translator
import onmt.io
import onmt.translate
import onmt
import onmt.ModelConstructor
import onmt.modules
import onmt.opts
import timeit
def main(opt):
translator = make_translator(opt, report_score=True)
start = timeit.default_timer()
_, attns_info, oov_info, copy_info, context_attns_info = translator.translate(opt.src_dir, opt.src, opt.tgt,
opt.batch_size, opt.attn_debug)
end = timeit.default_timer()
print("Translation takes {}s".format(end-start))
# currently attns_info,oov_info only contain first index data of batch
if len(context_attns_info) == 0:
return attns_info, oov_info, copy_info
else:
return attns_info, oov_info, copy_info, context_attns_info
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='translate.py',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
onmt.opts.add_md_help_argument(parser)
onmt.opts.translate_opts(parser)
opt = parser.parse_args()
main(opt)
| [
"onmt.opts.translate_opts",
"argparse.ArgumentParser",
"timeit.default_timer",
"onmt.translate.Translator.make_translator",
"onmt.opts.add_md_help_argument"
] | [((317, 356), 'onmt.translate.Translator.make_translator', 'make_translator', (['opt'], {'report_score': '(True)'}), '(opt, report_score=True)\n', (332, 356), False, 'from onmt.translate.Translator import make_translator\n'), ((374, 396), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (394, 396), False, 'import timeit\n'), ((577, 599), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (597, 599), False, 'import timeit\n'), ((935, 1047), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""translate.py"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description='translate.py', formatter_class=\n argparse.ArgumentDefaultsHelpFormatter)\n", (958, 1047), False, 'import argparse\n'), ((1064, 1102), 'onmt.opts.add_md_help_argument', 'onmt.opts.add_md_help_argument', (['parser'], {}), '(parser)\n', (1094, 1102), False, 'import onmt\n'), ((1108, 1140), 'onmt.opts.translate_opts', 'onmt.opts.translate_opts', (['parser'], {}), '(parser)\n', (1132, 1140), False, 'import onmt\n')] |
# Generated by Django 2.2.16 on 2020-10-13 20:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('measurement', '0017_auto_20200609_0533'),
]
operations = [
migrations.RemoveIndex(
model_name='measurement',
name='measurement_endtime_e347a7_idx',
),
migrations.RemoveIndex(
model_name='measurement',
name='measurement_value_520b79_idx',
),
]
| [
"django.db.migrations.RemoveIndex"
] | [((232, 324), 'django.db.migrations.RemoveIndex', 'migrations.RemoveIndex', ([], {'model_name': '"""measurement"""', 'name': '"""measurement_endtime_e347a7_idx"""'}), "(model_name='measurement', name=\n 'measurement_endtime_e347a7_idx')\n", (254, 324), False, 'from django.db import migrations\n'), ((364, 454), 'django.db.migrations.RemoveIndex', 'migrations.RemoveIndex', ([], {'model_name': '"""measurement"""', 'name': '"""measurement_value_520b79_idx"""'}), "(model_name='measurement', name=\n 'measurement_value_520b79_idx')\n", (386, 454), False, 'from django.db import migrations\n')] |
from setuptools import setup
install_requires = ['beautifulsoup4',
'simplejson',
'slacker',
'jira',
'requests',
'websocket-client']
setup(name='linkbot',
install_requires=install_requires,
description='slackbot listening for mentions of jira issues, etc')
| [
"setuptools.setup"
] | [((234, 362), 'setuptools.setup', 'setup', ([], {'name': '"""linkbot"""', 'install_requires': 'install_requires', 'description': '"""slackbot listening for mentions of jira issues, etc"""'}), "(name='linkbot', install_requires=install_requires, description=\n 'slackbot listening for mentions of jira issues, etc')\n", (239, 362), False, 'from setuptools import setup\n')] |
# Generated by Django 2.2.5 on 2019-10-01 15:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("build", "0029_build_org_note")]
operations = [
migrations.AddField(
model_name="build", name="priority", field=models.IntegerField(default=0)
)
]
| [
"django.db.models.IntegerField"
] | [((290, 320), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (309, 320), False, 'from django.db import migrations, models\n')] |
# Generated by Django 3.0.2 on 2020-01-25 19:30
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('restaurant', '0003_recipe_ingreiends'),
]
operations = [
migrations.RemoveField(
model_name='recipe',
name='ingreiends',
),
]
| [
"django.db.migrations.RemoveField"
] | [((229, 291), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""recipe"""', 'name': '"""ingreiends"""'}), "(model_name='recipe', name='ingreiends')\n", (251, 291), False, 'from django.db import migrations\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-10-08 01:12
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Diagnosis_Record',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Date', models.DateTimeField(auto_now=True, verbose_name='Date of exam')),
('Diagnosis', models.CharField(max_length=200)),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
('Doctor', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Doctor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Practice_Name', models.CharField(max_length=200)),
('Practice_Address', models.CharField(max_length=200)),
('Recovery_Phrase', models.CharField(max_length=200)),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
('username', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Doctor_Exam_Record',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Date', models.DateTimeField(auto_now=True, verbose_name='Date of exam')),
('Notes', models.CharField(max_length=200)),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
('Doctor', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Insurance_Administrator',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Company_Name', models.CharField(max_length=200)),
('Company_Address', models.CharField(max_length=200)),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
('username', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Insurance_Claim_Record',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Date', models.DateTimeField(auto_now=True, verbose_name='Date of exam')),
('Amount', models.FloatField(default=0.0)),
('Status', models.CharField(choices=[('Filed', 'Filed'), ('Examining', 'Examining'), ('Rejected', 'Rejected'), ('Accepted', 'Accepted'), ('Paid', 'Paid')], max_length=200)),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
('Medical_Administrator', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='Medical_Administrator_handling_claim_for_doctor', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Medical_Administrator',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Practice_Name', models.CharField(max_length=200)),
('Practice_Address', models.CharField(max_length=200)),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
('Associated_Doctors', models.ManyToManyField(to='smirk.Doctor')),
],
),
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Date', models.DateTimeField(auto_now=True, verbose_name='Note Date')),
('Text', models.CharField(max_length=200)),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
],
),
migrations.CreateModel(
name='Nurse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Practice_Name', models.CharField(max_length=200)),
('Practice_Address', models.CharField(max_length=200)),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
('Associated_Doctors', models.ManyToManyField(to='smirk.Doctor')),
('username', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Patient',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('SSN', models.CharField(max_length=200)),
('Address', models.CharField(max_length=200)),
('DOB', models.DateTimeField(verbose_name='Date')),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
('username', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Patient_Doctor_Correspondence_Record',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
('Doctor', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='Doctor', to=settings.AUTH_USER_MODEL)),
('Notes', models.ManyToManyField(to='smirk.Note')),
],
),
migrations.CreateModel(
name='Raw_Record',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Description', models.CharField(max_length=200)),
('File', models.FileField(upload_to='documents')),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
],
),
migrations.CreateModel(
name='Record',
fields=[
('Record_ID', models.AutoField(primary_key=True, serialize=False)),
('Record_Type', models.CharField(choices=[(b'Doctor Exam', b'Doctor Exam'), (b'Test Result', b'Test Result'), (b'Diagnosis', b'Diagnosis'), (b'Insurance Claim', b'Insurance Claim'), (b'Patient Doctor Correspondence', b'Patient Doctor Correspondence'), (b'Raw', b'Raw')], default='Doctor Exam', max_length=200)),
('Record_Date', models.DateTimeField(auto_now=True, verbose_name='Record_Date')),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
('Edit_Permissions', models.ManyToManyField(related_name='Edit_Permissions', to=settings.AUTH_USER_MODEL)),
('Owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='Owner', to=settings.AUTH_USER_MODEL)),
('Patient', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='Patient', to=settings.AUTH_USER_MODEL)),
('View_Permissions', models.ManyToManyField(related_name='View_Permissions', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='System_Administrator',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Date', models.DateTimeField(verbose_name='Date')),
],
),
migrations.CreateModel(
name='Test_Results_Record',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Date', models.DateTimeField(auto_now=True, verbose_name='Date of exam')),
('Lab', models.CharField(max_length=200)),
('Notes', models.CharField(max_length=200)),
('created_at', models.DateTimeField(auto_now=True, verbose_name='Date')),
('Doctor', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('Record', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='smirk.Record')),
],
),
migrations.AddField(
model_name='raw_record',
name='Record',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='smirk.Record'),
),
migrations.AddField(
model_name='patient_doctor_correspondence_record',
name='Record',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='smirk.Record'),
),
migrations.AddField(
model_name='note',
name='Patient_Doctor_Correspondence',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='smirk.Patient_Doctor_Correspondence_Record'),
),
migrations.AddField(
model_name='medical_administrator',
name='Associated_Nurses',
field=models.ManyToManyField(to='smirk.Nurse'),
),
migrations.AddField(
model_name='medical_administrator',
name='username',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='insurance_claim_record',
name='Record',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='smirk.Record'),
),
migrations.AddField(
model_name='doctor_exam_record',
name='Record',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='smirk.Record'),
),
migrations.AddField(
model_name='diagnosis_record',
name='Record',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='smirk.Record'),
),
]
| [
"django.db.models.FloatField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.FileField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.migrations.swappable_dependency",
"django.db.models.CharField"
] | [((310, 367), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (341, 367), False, 'from django.db import migrations, models\n'), ((9689, 9798), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""smirk.Record"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='smirk.Record')\n", (9706, 9798), False, 'from django.db import migrations, models\n'), ((9943, 10052), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""smirk.Record"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='smirk.Record')\n", (9960, 10052), False, 'from django.db import migrations, models\n'), ((10188, 10327), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""smirk.Patient_Doctor_Correspondence_Record"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='smirk.Patient_Doctor_Correspondence_Record')\n", (10205, 10327), False, 'from django.db import migrations, models\n'), ((10468, 10508), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""smirk.Nurse"""'}), "(to='smirk.Nurse')\n", (10490, 10508), False, 'from django.db import migrations, models\n'), ((10645, 10764), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to=settings.AUTH_USER_MODEL)\n', (10662, 10764), False, 'from django.db import migrations, models\n'), ((10895, 11004), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""smirk.Record"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='smirk.Record')\n", (10912, 11004), False, 'from django.db import migrations, models\n'), ((11131, 11240), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""smirk.Record"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='smirk.Record')\n", (11148, 11240), False, 'from django.db import migrations, models\n'), ((11365, 11474), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""smirk.Record"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='smirk.Record')\n", (11382, 11474), False, 'from django.db import migrations, models\n'), ((508, 601), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (524, 601), False, 'from django.db import migrations, models\n'), ((625, 689), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date of exam"""'}), "(auto_now=True, verbose_name='Date of exam')\n", (645, 689), False, 'from django.db import migrations, models\n'), ((722, 754), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (738, 754), False, 'from django.db import migrations, models\n'), ((788, 844), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (808, 844), False, 'from django.db import migrations, models\n'), ((874, 993), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to=settings.AUTH_USER_MODEL)\n', (891, 993), False, 'from django.db import migrations, models\n'), ((1120, 1213), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1136, 1213), False, 'from django.db import migrations, models\n'), ((1246, 1278), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1262, 1278), False, 'from django.db import migrations, models\n'), ((1318, 1350), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1334, 1350), False, 'from django.db import migrations, models\n'), ((1389, 1421), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1405, 1421), False, 'from django.db import migrations, models\n'), ((1455, 1511), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (1475, 1511), False, 'from django.db import migrations, models\n'), ((1543, 1662), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to=settings.AUTH_USER_MODEL)\n', (1560, 1662), False, 'from django.db import migrations, models\n'), ((1801, 1894), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1817, 1894), False, 'from django.db import migrations, models\n'), ((1918, 1982), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date of exam"""'}), "(auto_now=True, verbose_name='Date of exam')\n", (1938, 1982), False, 'from django.db import migrations, models\n'), ((2011, 2043), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (2027, 2043), False, 'from django.db import migrations, models\n'), ((2077, 2133), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (2097, 2133), False, 'from django.db import migrations, models\n'), ((2163, 2282), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to=settings.AUTH_USER_MODEL)\n', (2180, 2282), False, 'from django.db import migrations, models\n'), ((2426, 2519), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2442, 2519), False, 'from django.db import migrations, models\n'), ((2551, 2583), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (2567, 2583), False, 'from django.db import migrations, models\n'), ((2622, 2654), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (2638, 2654), False, 'from django.db import migrations, models\n'), ((2688, 2744), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (2708, 2744), False, 'from django.db import migrations, models\n'), ((2776, 2895), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to=settings.AUTH_USER_MODEL)\n', (2793, 2895), False, 'from django.db import migrations, models\n'), ((3038, 3131), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3054, 3131), False, 'from django.db import migrations, models\n'), ((3155, 3219), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date of exam"""'}), "(auto_now=True, verbose_name='Date of exam')\n", (3175, 3219), False, 'from django.db import migrations, models\n'), ((3249, 3279), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0.0)'}), '(default=0.0)\n', (3266, 3279), False, 'from django.db import migrations, models\n'), ((3309, 3478), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('Filed', 'Filed'), ('Examining', 'Examining'), ('Rejected', 'Rejected'),\n ('Accepted', 'Accepted'), ('Paid', 'Paid')]", 'max_length': '(200)'}), "(choices=[('Filed', 'Filed'), ('Examining', 'Examining'), (\n 'Rejected', 'Rejected'), ('Accepted', 'Accepted'), ('Paid', 'Paid')],\n max_length=200)\n", (3325, 3478), False, 'from django.db import migrations, models\n'), ((3503, 3559), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (3523, 3559), False, 'from django.db import migrations, models\n'), ((3604, 3797), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""Medical_Administrator_handling_claim_for_doctor"""', 'to': 'settings.AUTH_USER_MODEL'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name=\n 'Medical_Administrator_handling_claim_for_doctor', to=settings.\n AUTH_USER_MODEL)\n", (3621, 3797), False, 'from django.db import migrations, models\n'), ((3929, 4022), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3945, 4022), False, 'from django.db import migrations, models\n'), ((4055, 4087), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (4071, 4087), False, 'from django.db import migrations, models\n'), ((4127, 4159), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (4143, 4159), False, 'from django.db import migrations, models\n'), ((4193, 4249), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (4213, 4249), False, 'from django.db import migrations, models\n'), ((4291, 4332), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""smirk.Doctor"""'}), "(to='smirk.Doctor')\n", (4313, 4332), False, 'from django.db import migrations, models\n'), ((4462, 4555), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (4478, 4555), False, 'from django.db import migrations, models\n'), ((4579, 4640), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Note Date"""'}), "(auto_now=True, verbose_name='Note Date')\n", (4599, 4640), False, 'from django.db import migrations, models\n'), ((4668, 4700), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (4684, 4700), False, 'from django.db import migrations, models\n'), ((4734, 4790), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (4754, 4790), False, 'from django.db import migrations, models\n'), ((4921, 5014), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (4937, 5014), False, 'from django.db import migrations, models\n'), ((5047, 5079), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (5063, 5079), False, 'from django.db import migrations, models\n'), ((5119, 5151), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (5135, 5151), False, 'from django.db import migrations, models\n'), ((5185, 5241), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (5205, 5241), False, 'from django.db import migrations, models\n'), ((5283, 5324), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""smirk.Doctor"""'}), "(to='smirk.Doctor')\n", (5305, 5324), False, 'from django.db import migrations, models\n'), ((5356, 5475), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to=settings.AUTH_USER_MODEL)\n', (5373, 5475), False, 'from django.db import migrations, models\n'), ((5603, 5696), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (5619, 5696), False, 'from django.db import migrations, models\n'), ((5719, 5751), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (5735, 5751), False, 'from django.db import migrations, models\n'), ((5782, 5814), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (5798, 5814), False, 'from django.db import migrations, models\n'), ((5841, 5882), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""Date"""'}), "(verbose_name='Date')\n", (5861, 5882), False, 'from django.db import migrations, models\n'), ((5916, 5972), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (5936, 5972), False, 'from django.db import migrations, models\n'), ((6004, 6123), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to=settings.AUTH_USER_MODEL)\n', (6021, 6123), False, 'from django.db import migrations, models\n'), ((6280, 6373), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (6296, 6373), False, 'from django.db import migrations, models\n'), ((6403, 6459), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (6423, 6459), False, 'from django.db import migrations, models\n'), ((6489, 6631), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""Doctor"""', 'to': 'settings.AUTH_USER_MODEL'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name='Doctor', to=settings.AUTH_USER_MODEL)\n", (6506, 6631), False, 'from django.db import migrations, models\n'), ((6655, 6694), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""smirk.Note"""'}), "(to='smirk.Note')\n", (6677, 6694), False, 'from django.db import migrations, models\n'), ((6830, 6923), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (6846, 6923), False, 'from django.db import migrations, models\n'), ((6954, 6986), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (6970, 6986), False, 'from django.db import migrations, models\n'), ((7014, 7053), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': '"""documents"""'}), "(upload_to='documents')\n", (7030, 7053), False, 'from django.db import migrations, models\n'), ((7087, 7143), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (7107, 7143), False, 'from django.db import migrations, models\n'), ((7282, 7333), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (7298, 7333), False, 'from django.db import migrations, models\n'), ((7368, 7678), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[(b'Doctor Exam', b'Doctor Exam'), (b'Test Result', b'Test Result'), (\n b'Diagnosis', b'Diagnosis'), (b'Insurance Claim', b'Insurance Claim'),\n (b'Patient Doctor Correspondence', b'Patient Doctor Correspondence'), (\n b'Raw', b'Raw')]", 'default': '"""Doctor Exam"""', 'max_length': '(200)'}), "(choices=[(b'Doctor Exam', b'Doctor Exam'), (b'Test Result',\n b'Test Result'), (b'Diagnosis', b'Diagnosis'), (b'Insurance Claim',\n b'Insurance Claim'), (b'Patient Doctor Correspondence',\n b'Patient Doctor Correspondence'), (b'Raw', b'Raw')], default=\n 'Doctor Exam', max_length=200)\n", (7384, 7678), False, 'from django.db import migrations, models\n'), ((7696, 7759), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Record_Date"""'}), "(auto_now=True, verbose_name='Record_Date')\n", (7716, 7759), False, 'from django.db import migrations, models\n'), ((7793, 7849), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (7813, 7849), False, 'from django.db import migrations, models\n'), ((7889, 7978), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""Edit_Permissions"""', 'to': 'settings.AUTH_USER_MODEL'}), "(related_name='Edit_Permissions', to=settings.\n AUTH_USER_MODEL)\n", (7911, 7978), False, 'from django.db import migrations, models\n'), ((8002, 8143), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""Owner"""', 'to': 'settings.AUTH_USER_MODEL'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name='Owner', to=settings.AUTH_USER_MODEL)\n", (8019, 8143), False, 'from django.db import migrations, models\n'), ((8169, 8312), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""Patient"""', 'to': 'settings.AUTH_USER_MODEL'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name='Patient', to=settings.AUTH_USER_MODEL)\n", (8186, 8312), False, 'from django.db import migrations, models\n'), ((8347, 8436), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""View_Permissions"""', 'to': 'settings.AUTH_USER_MODEL'}), "(related_name='View_Permissions', to=settings.\n AUTH_USER_MODEL)\n", (8369, 8436), False, 'from django.db import migrations, models\n'), ((8577, 8670), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (8593, 8670), False, 'from django.db import migrations, models\n'), ((8694, 8735), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""Date"""'}), "(verbose_name='Date')\n", (8714, 8735), False, 'from django.db import migrations, models\n'), ((8880, 8973), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (8896, 8973), False, 'from django.db import migrations, models\n'), ((8997, 9061), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date of exam"""'}), "(auto_now=True, verbose_name='Date of exam')\n", (9017, 9061), False, 'from django.db import migrations, models\n'), ((9088, 9120), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (9104, 9120), False, 'from django.db import migrations, models\n'), ((9149, 9181), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (9165, 9181), False, 'from django.db import migrations, models\n'), ((9215, 9271), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date"""'}), "(auto_now=True, verbose_name='Date')\n", (9235, 9271), False, 'from django.db import migrations, models\n'), ((9301, 9420), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to=settings.AUTH_USER_MODEL)\n', (9318, 9420), False, 'from django.db import migrations, models\n'), ((9445, 9554), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""smirk.Record"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='smirk.Record')\n", (9462, 9554), False, 'from django.db import migrations, models\n')] |
# routes for front-end part of project
from flask import url_for, render_template, request
from server import app
@app.route('/', methods = ['GET'])
def index_page():
return render_template('/front-end/index.html')
@app.route('/login', methods = ['GET'])
def login_page():
return render_template('/front-end/login.html')
@app.route('/forgot', methods = ['GET'])
def forgot_page():
return render_template('/front-end/forgot.html')
@app.route('/flipbook', methods = ['GET'])
def flipbook_page():
try:
id = request.args.get('id')
facebook_logo_image_url = request.url_root + url_for('files', filename=f'{id}/logo_image/logo.jpg')
return render_template('/front-end/flipbook.html', facebook_logo_image_url = facebook_logo_image_url)
except:
return render_template('/front-end/flipbook.html', facebook_logo_image_url = '')
@app.route('/confirm-page', methods = ['GET'])
def confirm_page():
return render_template('/front-end/confirm-page.html')
| [
"flask.render_template",
"flask.request.args.get",
"server.app.route",
"flask.url_for"
] | [((126, 157), 'server.app.route', 'app.route', (['"""/"""'], {'methods': "['GET']"}), "('/', methods=['GET'])\n", (135, 157), False, 'from server import app\n'), ((236, 272), 'server.app.route', 'app.route', (['"""/login"""'], {'methods': "['GET']"}), "('/login', methods=['GET'])\n", (245, 272), False, 'from server import app\n'), ((351, 388), 'server.app.route', 'app.route', (['"""/forgot"""'], {'methods': "['GET']"}), "('/forgot', methods=['GET'])\n", (360, 388), False, 'from server import app\n'), ((469, 508), 'server.app.route', 'app.route', (['"""/flipbook"""'], {'methods': "['GET']"}), "('/flipbook', methods=['GET'])\n", (478, 508), False, 'from server import app\n'), ((907, 950), 'server.app.route', 'app.route', (['"""/confirm-page"""'], {'methods': "['GET']"}), "('/confirm-page', methods=['GET'])\n", (916, 950), False, 'from server import app\n'), ((191, 231), 'flask.render_template', 'render_template', (['"""/front-end/index.html"""'], {}), "('/front-end/index.html')\n", (206, 231), False, 'from flask import url_for, render_template, request\n'), ((306, 346), 'flask.render_template', 'render_template', (['"""/front-end/login.html"""'], {}), "('/front-end/login.html')\n", (321, 346), False, 'from flask import url_for, render_template, request\n'), ((423, 464), 'flask.render_template', 'render_template', (['"""/front-end/forgot.html"""'], {}), "('/front-end/forgot.html')\n", (438, 464), False, 'from flask import url_for, render_template, request\n'), ((986, 1033), 'flask.render_template', 'render_template', (['"""/front-end/confirm-page.html"""'], {}), "('/front-end/confirm-page.html')\n", (1001, 1033), False, 'from flask import url_for, render_template, request\n'), ((557, 579), 'flask.request.args.get', 'request.args.get', (['"""id"""'], {}), "('id')\n", (573, 579), False, 'from flask import url_for, render_template, request\n'), ((705, 802), 'flask.render_template', 'render_template', (['"""/front-end/flipbook.html"""'], {'facebook_logo_image_url': 'facebook_logo_image_url'}), "('/front-end/flipbook.html', facebook_logo_image_url=\n facebook_logo_image_url)\n", (720, 802), False, 'from flask import url_for, render_template, request\n'), ((634, 688), 'flask.url_for', 'url_for', (['"""files"""'], {'filename': 'f"""{id}/logo_image/logo.jpg"""'}), "('files', filename=f'{id}/logo_image/logo.jpg')\n", (641, 688), False, 'from flask import url_for, render_template, request\n'), ((829, 900), 'flask.render_template', 'render_template', (['"""/front-end/flipbook.html"""'], {'facebook_logo_image_url': '""""""'}), "('/front-end/flipbook.html', facebook_logo_image_url='')\n", (844, 900), False, 'from flask import url_for, render_template, request\n')] |
from uff.ic.mell.sentimentembedding.utils.data_converstion_utils import convert_tensor2array
from uff.ic.mell.sentimentembedding.modelos.modelo import Modelo
import pandas as pd
import numpy as np
import torch
from enum import Enum
from tokenizers import ByteLevelBPETokenizer
class ModeloTransformer(Modelo):
# média dos tensores da concatenação dos 4 útimos layers - CONTEXT_CONCAT,
# média dos tensores do último layer - CONTEXT_LAST
# embedding do token [CLS] - CONTEXT_CLS
# media dos embeddings estaticos das palavras STATIC_AVG
METHOD = Enum("METHOD", "CONTEXT_CONCAT CONTEXT_LAST CONTEXT_CLS STATIC_AVG")
def __init__(self, name:str, config, tokenizer, originalModel, embedMethod:METHOD):
"""
Metodo construtor
name: qualquer string que identifique o modelo
config: algo do modelo de Transformers. BertConfig() por exemplo
tokenizer: tokernizer do modelo. BertTokenizer por exemplo
originalModel: modelo propriamente dito. BertModel por exemplo
embedMethod: metodo de geracao de embedding das sentencas. Deve ser uma das opcoes do
enum METHOD
"""
super().__init__(name)
self.config = config
self.tokenizer = tokenizer
self.originalModel = originalModel
self.embedMethod = embedMethod
def embTexts(self, dataSeries:pd.Series, **kwagars) -> pd.DataFrame:
'''Função para gerar embedding da BASE DE TWEETS com média dos tensores dos tokens
Parâmetros:
dataSeries: dataframe['tweet']
return: dataframe com média dos tensores de cada token que perfaz o tweet
'''
retorno = []
if (self.embedMethod != ModeloTransformer.METHOD.STATIC_AVG):
# TODO: Verificar se realmente é necessário definir este tamanho explicitamente
# se ficar assim e algum modelo gerar os embeddings de outro tamanho vai dar problema
if (self.embedMethod == ModeloTransformer.METHOD.CONTEXT_CONCAT):
#montando array para receber embedding dos tweets do dataframe
embeddings = np.ndarray((len(dataSeries),3072))
else:
#montando array para receber embedding dos tweets do dataframe
embeddings = np.ndarray((len(dataSeries),768))
for i, text in enumerate(dataSeries):
#gerando embeding do text
tweet = self.get_tweet_embed(text, self.embedMethod)
#convertando em um array e inserindo no array criado
embeddings[i] = convert_tensor2array(tweet.to(device="cpu"))
return pd.DataFrame(embeddings)
else:
for i, text in enumerate(dataSeries):
retorno.append(self.transform_sentence_to_avgembword(text))
return pd.DataFrame(retorno)
def get_tweet_embed(self, text, method:METHOD, add=True):
'''Função para gerar embedding do TWEET
Parâmetros:
text: tweet a ser tokenizado
method: conforme enum METHOD
add: Boolean para adição ou não de tokens especiais, como [CLS]
return: média dos tensores de cada token que perfaz o tweet
'''
self.originalModel.cuda()
# tokenizar texto, transformar num tensor e enviar para a GPU
tokens_tensor = torch.tensor([self.tokenizer.encode(text, add_special_tokens=add)]).cuda()
if (method != ModeloTransformer.METHOD.STATIC_AVG):
with torch.no_grad():
out = self.originalModel(tokens_tensor)
hidden_states = out[2] # selecionando apenas os tensores
if (method == ModeloTransformer.METHOD.CONTEXT_CONCAT):
# get last four layers
last_four_layers = [hidden_states[i] for i in (-1, -2, -3, -4)]
# cast layers to a tuple and concatenate over the last dimension
cat_hidden_states = torch.cat(tuple(last_four_layers), dim= -1)
# take the mean of the concatenated vector over the token dimension
cat_sentence_embedding = torch.mean(cat_hidden_states, dim=1)
return cat_sentence_embedding # gerando o embedding da sentença pela média dos embeddings dos tokens concatenados dos 4 últimos layers
else:
if(method == ModeloTransformer.METHOD.CONTEXT_LAST):
return torch.mean(hidden_states[-1], dim=1) # gerando o embedding da sentença pela média dos embeddings dos tokens
else:
if(method == ModeloTransformer.METHOD.CONTEXT_CLS):
return hidden_states[-1][:,0,:]
def transform_sentence_to_avgembword(self, text:str):
"""
Metodo para gerar embedding das sentencas a partir dos embeddings
estaticos do modelo usando a media
Parametros:
texts: sentenca a ser feito o embedding usando
a media das palavras que a compoe
Return:
retorna um [] com os embeddings das sentencas fazendo a media
dos embeddings dos tokens que a compoe
"""
self.originalModel.cuda()
# pegando os ids de cada palavra do texto
input_ids = self.tokenizer.encode(text, add_special_tokens=False)
#print(input_ids)
# pegando o embedding de cada palavra do texto
#print("#####################")
ids_tensor = torch.tensor([input_ids]).cuda() #gera um tensor de ids das palavras das sentencas
#print(ids_tensor.shape)
embeddings_palavras = self.originalModel.get_input_embeddings()(ids_tensor) # me retorna um tensor de dim 1 x qtdIds x 768
#print("#####################")
#print(embeddings_palavras[0])
# tirando a media e transformando de tensor para array
#t_stack = torch.stack(embeddings_palavras[0])
#print("#####################")
#print(t_stack)
mean = torch.mean(embeddings_palavras[0], dim=0) # tiro a primeira dimensao do tensor que esta vazia para fazer a media por coluna
#print("#####################")
#print (mean)
#print("#####################")
mean_arr = convert_tensor2array(torch.unsqueeze(mean, 0)) # recoloco a primeira dimensao para o convert funcionar
#print (mean_arr)
return mean_arr
def tokenize_sentences(self, sentences):
input_ids = [] # For every sentence...
for sent in sentences:
encoded_sent = self.tokenizer.encode(sent,add_special_tokens=True)
# Add the encoded sentence to the list.
input_ids.append(encoded_sent) # Print sentence 0, now as a list of IDs.
print('Original: ', sentences[0])
print('Token IDs:', input_ids[0])
return input_ids
def train_tokenizer(self,file_path,outDir):
# Initialize a tokenizer
tokenizer = ByteLevelBPETokenizer()
# Customize training
tokenizer.train(files=file_path, vocab_size=52_000, min_frequency=2, special_tokens=[
"<s>",
"<pad>",
"</s>",
"<unk>",
"<mask>",
])
self.tokenizer=tokenizer
tokenizer.save(outDir)
| [
"torch.mean",
"torch.unsqueeze",
"torch.tensor",
"enum.Enum",
"tokenizers.ByteLevelBPETokenizer",
"pandas.DataFrame",
"torch.no_grad"
] | [((569, 637), 'enum.Enum', 'Enum', (['"""METHOD"""', '"""CONTEXT_CONCAT CONTEXT_LAST CONTEXT_CLS STATIC_AVG"""'], {}), "('METHOD', 'CONTEXT_CONCAT CONTEXT_LAST CONTEXT_CLS STATIC_AVG')\n", (573, 637), False, 'from enum import Enum\n'), ((6246, 6287), 'torch.mean', 'torch.mean', (['embeddings_palavras[0]'], {'dim': '(0)'}), '(embeddings_palavras[0], dim=0)\n', (6256, 6287), False, 'import torch\n'), ((7197, 7220), 'tokenizers.ByteLevelBPETokenizer', 'ByteLevelBPETokenizer', ([], {}), '()\n', (7218, 7220), False, 'from tokenizers import ByteLevelBPETokenizer\n'), ((2800, 2824), 'pandas.DataFrame', 'pd.DataFrame', (['embeddings'], {}), '(embeddings)\n', (2812, 2824), True, 'import pandas as pd\n'), ((2984, 3005), 'pandas.DataFrame', 'pd.DataFrame', (['retorno'], {}), '(retorno)\n', (2996, 3005), True, 'import pandas as pd\n'), ((6512, 6536), 'torch.unsqueeze', 'torch.unsqueeze', (['mean', '(0)'], {}), '(mean, 0)\n', (6527, 6536), False, 'import torch\n'), ((3677, 3692), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3690, 3692), False, 'import torch\n'), ((5723, 5748), 'torch.tensor', 'torch.tensor', (['[input_ids]'], {}), '([input_ids])\n', (5735, 5748), False, 'import torch\n'), ((4324, 4360), 'torch.mean', 'torch.mean', (['cat_hidden_states'], {'dim': '(1)'}), '(cat_hidden_states, dim=1)\n', (4334, 4360), False, 'import torch\n'), ((4642, 4678), 'torch.mean', 'torch.mean', (['hidden_states[-1]'], {'dim': '(1)'}), '(hidden_states[-1], dim=1)\n', (4652, 4678), False, 'import torch\n')] |
import editdistance
class EditDistanceService:
INSTACE = None
@classmethod
def create(cls):
if cls.INSTACE is None:
cls.INSTACE = EditDistanceService()
@classmethod
def instance(cls):
if cls.INSTACE is None:
cls.create()
return cls.INSTACE
def compute(self, words1, words2):
return editdistance.eval(words1, words2)
# if len(s1) > len(s2):
# s1, s2 = s2, s1
# distances = range(len(s1) + 1)
# for i2, c2 in enumerate(s2):
# distances_ = [i2+1]
# for i1, c1 in enumerate(s1):
# if c1 == c2:
# distances_.append(distances[i1])
# else:
# distances_.append(1 + min((distances[i1], distances[i1 + 1], distances_[-1])))
# distances = distances_
# return distances[-1]
| [
"editdistance.eval"
] | [((367, 400), 'editdistance.eval', 'editdistance.eval', (['words1', 'words2'], {}), '(words1, words2)\n', (384, 400), False, 'import editdistance\n')] |
import gym
from tf_rl.common.memory import ReplayBuffer
size = 100000
env = gym.make("CartPole-v0")
memory = ReplayBuffer(size=size, traj_dir="./traj/")
state = env.reset()
action = env.action_space.sample()
next_state, reward, done, info = env.step(action)
env.close()
for _ in range(size):
memory.add(state, action, reward, next_state, done)
print(len(memory))
memory.save()
del memory
memory = ReplayBuffer(size=size, recover_data=True, traj_dir="./traj/")
print(len(memory)) | [
"tf_rl.common.memory.ReplayBuffer",
"gym.make"
] | [((76, 99), 'gym.make', 'gym.make', (['"""CartPole-v0"""'], {}), "('CartPole-v0')\n", (84, 99), False, 'import gym\n'), ((109, 152), 'tf_rl.common.memory.ReplayBuffer', 'ReplayBuffer', ([], {'size': 'size', 'traj_dir': '"""./traj/"""'}), "(size=size, traj_dir='./traj/')\n", (121, 152), False, 'from tf_rl.common.memory import ReplayBuffer\n'), ((404, 466), 'tf_rl.common.memory.ReplayBuffer', 'ReplayBuffer', ([], {'size': 'size', 'recover_data': '(True)', 'traj_dir': '"""./traj/"""'}), "(size=size, recover_data=True, traj_dir='./traj/')\n", (416, 466), False, 'from tf_rl.common.memory import ReplayBuffer\n')] |
import numpy as np
import pandas as pd
from sklearn.model_selection import StratifiedKFold
import lightgbm as lgb
import xgboost as xgb
# read dataset
df_train = pd.read_csv('train.csv')
df_test = pd.read_csv('test.csv')
# gini function
def gini(actual, pred, cmpcol = 0, sortcol = 1):
assert( len(actual) == len(pred) )
all = np.asarray(np.c_[ actual, pred, np.arange(len(actual)) ], dtype=np.float)
all = all[ np.lexsort((all[:,2], -1*all[:,1])) ]
totalLosses = all[:,0].sum()
giniSum = all[:,0].cumsum().sum() / totalLosses
giniSum -= (len(actual) + 1) / 2.
return giniSum / len(actual)
def gini_normalized(a, p):
return gini(a, p) / gini(a, a)
def gini_xgb(preds, dtrain):
labels = dtrain.get_label()
gini_score = gini_normalized(labels, preds)
return 'gini', gini_score
def gini_lgb(preds, dtrain):
labels = dtrain.get_label()
gini_score = gini_normalized(labels, preds)
return 'gini', gini_score, True
# define fold number
kfold = 5
skf = StratifiedKFold(n_splits=kfold, random_state=42)
sub = pd.DataFrame()
sub['id'] = test_id
sub['target'] = np.zeros_like(test_id)
params_xgd = {
'min_child_weight': 10.0,
'objective': 'binary:logistic',
'max_depth': 7,
'max_delta_step': 1.8,
'colsample_bytree': 0.4,
'subsample': 0.8,
'eta': 0.005,
'gamma': 0.65,
'num_boost_round' : 700
}
params_lgb = {
'max_depth': 7,
'learning_rate': 0.005,
'objective': 'binary'
}
for i, (train_index, test_index) in enumerate(skf.split(X, y)):
print('[Fold %d/%d]' % (i + 1, kfold))
X_train, X_valid = X[train_index], X[test_index]
y_train, y_valid = y[train_index], y[test_index]
d_train = lgb.Dataset(X_train, y_train)
d_valid = lgb.Dataset(X_valid, y_valid)
watchlist = [d_train, d_valid]
model_lgb = lgb.train(params_lgb, d_train, 1600, watchlist, early_stopping_rounds = 70, feval = gini_lgb, verbose_eval = 100)
d_train = xgb.DMatrix(X_train, y_train)
d_valid = xgb.DMatrix(X_valid, y_valid)
d_test = xgb.DMatrix(test.values)
watchlist = [(d_train, 'train'), (d_valid, 'valid')]
model_xgb = xgb.train(params_xgd, d_train, 1600, watchlist, early_stopping_rounds = 70, feval = gini_xgb, maximize = True, verbose_eval = 100)
print('[Fold %d/%d Prediciton:]' % (i + 1, kfold))
pred_xgb = model_xgb.predict(d_test, ntree_limit = mdl.best_ntree_limit)
pred_lgb = model_lgb.predict(test.values)
# 0.7 from xgb, 0.3 from lgb. You can play around here
sub['target'] += (pred_xgb * 0.7 + pred_lgb * 0.3) / kfold | [
"pandas.read_csv",
"xgboost.train",
"lightgbm.train",
"sklearn.model_selection.StratifiedKFold",
"numpy.lexsort",
"lightgbm.Dataset",
"pandas.DataFrame",
"xgboost.DMatrix",
"numpy.zeros_like"
] | [((163, 187), 'pandas.read_csv', 'pd.read_csv', (['"""train.csv"""'], {}), "('train.csv')\n", (174, 187), True, 'import pandas as pd\n'), ((198, 221), 'pandas.read_csv', 'pd.read_csv', (['"""test.csv"""'], {}), "('test.csv')\n", (209, 221), True, 'import pandas as pd\n'), ((1012, 1060), 'sklearn.model_selection.StratifiedKFold', 'StratifiedKFold', ([], {'n_splits': 'kfold', 'random_state': '(42)'}), '(n_splits=kfold, random_state=42)\n', (1027, 1060), False, 'from sklearn.model_selection import StratifiedKFold\n'), ((1067, 1081), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (1079, 1081), True, 'import pandas as pd\n'), ((1118, 1140), 'numpy.zeros_like', 'np.zeros_like', (['test_id'], {}), '(test_id)\n', (1131, 1140), True, 'import numpy as np\n'), ((1715, 1744), 'lightgbm.Dataset', 'lgb.Dataset', (['X_train', 'y_train'], {}), '(X_train, y_train)\n', (1726, 1744), True, 'import lightgbm as lgb\n'), ((1759, 1788), 'lightgbm.Dataset', 'lgb.Dataset', (['X_valid', 'y_valid'], {}), '(X_valid, y_valid)\n', (1770, 1788), True, 'import lightgbm as lgb\n'), ((1841, 1952), 'lightgbm.train', 'lgb.train', (['params_lgb', 'd_train', '(1600)', 'watchlist'], {'early_stopping_rounds': '(70)', 'feval': 'gini_lgb', 'verbose_eval': '(100)'}), '(params_lgb, d_train, 1600, watchlist, early_stopping_rounds=70,\n feval=gini_lgb, verbose_eval=100)\n', (1850, 1952), True, 'import lightgbm as lgb\n'), ((1970, 1999), 'xgboost.DMatrix', 'xgb.DMatrix', (['X_train', 'y_train'], {}), '(X_train, y_train)\n', (1981, 1999), True, 'import xgboost as xgb\n'), ((2014, 2043), 'xgboost.DMatrix', 'xgb.DMatrix', (['X_valid', 'y_valid'], {}), '(X_valid, y_valid)\n', (2025, 2043), True, 'import xgboost as xgb\n'), ((2057, 2081), 'xgboost.DMatrix', 'xgb.DMatrix', (['test.values'], {}), '(test.values)\n', (2068, 2081), True, 'import xgboost as xgb\n'), ((2156, 2282), 'xgboost.train', 'xgb.train', (['params_xgd', 'd_train', '(1600)', 'watchlist'], {'early_stopping_rounds': '(70)', 'feval': 'gini_xgb', 'maximize': '(True)', 'verbose_eval': '(100)'}), '(params_xgd, d_train, 1600, watchlist, early_stopping_rounds=70,\n feval=gini_xgb, maximize=True, verbose_eval=100)\n', (2165, 2282), True, 'import xgboost as xgb\n'), ((426, 465), 'numpy.lexsort', 'np.lexsort', (['(all[:, 2], -1 * all[:, 1])'], {}), '((all[:, 2], -1 * all[:, 1]))\n', (436, 465), True, 'import numpy as np\n')] |
import os
import re
import warnings
from uuid import uuid4, UUID
import shapely.geometry
import geopandas as gpd
import pandas as pd
import numpy as np
from geojson import LineString, Point, Polygon, Feature, FeatureCollection, MultiPolygon
try:
import simplejson as json
except ImportError:
import json
from .config import get_settings
from ..static import UriType
def _abs_path(path, mkdir=True):
"""Gets the absolute path for a file to be within the Quest directory,
and will create a directory of that filename.
Args:
path (string): A string that is a filename.
mkdir (bool): A boolean if the user wants to create the directory.
Returns:
A string of an absolute path with a file from somewhere with in the Quest directory.
"""
if not os.path.isabs(path):
path = os.path.join(get_quest_dir(), path)
if mkdir:
os.makedirs(path, exist_ok=True)
return path
def bbox2poly(x1, y1, x2, y2, reverse_order=False, as_geojson=False, as_shapely=False):
"""Converts a bounding box to a polygon.
Args:
x1 (int): An int for the first x coordinate.
y1 (int): An int for the first y coordinate.
x2 (int): An int for the second x coordinate.
y2 (int): An int for the second y coordinate.
reverse_order (bool): A boolean to switch the order of the x and y coordinates.
as_geojson (bool): A bool to convert the polygon to a geojson object.
as_shapely (bool): A bool to convert the polygon to a shapely object.
Returns:
If the bool is false for both geojson and shapely then just a list is returned.
If the bool is true for both geojson and shapely then a shapley object is returned.
If the bool is true for just the geojson, then a geojson object is returned.
If the bool is true for just the shapely, then a shapely object is returned.
"""
if reverse_order:
x1, y1 = y1, x1
x2, y2 = y2, x2
xmin, xmax = [float(x1), float(x2)]
ymin, ymax = [float(y1), float(y2)]
poly = list([[xmin, ymin], [xmin, ymax], [xmax, ymax], [xmax, ymin]])
poly.append(poly[0])
if not (as_geojson or as_shapely):
return poly
if as_geojson:
polygon = Polygon
multi_polygon = MultiPolygon
if as_shapely:
polygon = shapely.geometry.Polygon
multi_polygon = shapely.geometry.MultiPolygon
xmin2 = xmax2 = None
if xmin < -180:
xmin2 = 360 + xmin
xmin = -180
if xmax > 180:
xmax2 = xmax - 360
xmax = 180
if xmin2 is None and xmax2 is None:
return polygon(poly)
# else bbox spans 180 longitude so create multipolygon
poly1 = list([[xmin, ymin], [xmin, ymax], [xmax, ymax], [xmax, ymin]])
poly1.append(poly1[0])
xmin = xmin2 or -180
xmax = xmax2 or 180
poly2 = list([[xmin, ymin], [xmin, ymax], [xmax, ymax], [xmax, ymin]])
poly2.append(poly2[0])
return multi_polygon(polygons=[polygon(poly1), polygon(poly2)])
def classify_uris(uris, grouped=True, as_dataframe=True, require_same_type=False, exclude=None, raise_if_empty=True):
"""Converts a list of uris into a pandas dataframe.
Notes:
Classified by resource type.
Args:
uris (list or string): List of Quest uris to classify into the following types: 'collections', 'services',
'publishers', or 'datasets'.
grouped (bool): If True returns
Pandas GroupBy object (see: https://pandas.pydata.org/pandas-docs/stable/groupby.html)
as_dataframe (bool): If True returns a Pandas DataFrame
require_same_type (bool): If True raises a `ValueError` if uris of more than one type are passed in.
exclude (list or string): List of uri types to not allow. If a uri of an excluded type is passed in
then a `ValueError` will be raised.
Returns:
A pandas dataframe.
"""
uris = listify(uris)
df = pd.DataFrame(uris, columns=['uri'])
df['type'] = UriType.COLLECTION
uuid_idx = df['uri'].apply(is_uuid)
service_idx = df['uri'].str.startswith('svc://')
publish_idx = df['uri'].str.startswith('pub://')
dataset_idx = uuid_idx & df['uri'].str.startswith('d')
df['type'][service_idx] = UriType.SERVICE
df['type'][publish_idx] = UriType.PUBLISHER
df['type'][dataset_idx] = UriType.DATASET
df.set_index('uri', drop=False, inplace=True)
grouped_df = df.groupby('type')
if raise_if_empty:
if df.empty:
raise ValueError('At least one uri must be specified.')
if exclude is not None:
for uri_type in exclude:
if uri_type in grouped_df.groups:
raise ValueError('Uris for {0} are not allowed.'.format(uri_type))
if require_same_type and len(grouped_df.groups.keys()) > 1:
raise ValueError('All uris must be of the same type')
if not as_dataframe:
groups = {k: list(v) for k, v in grouped_df.groups.items()}
return groups
if grouped:
return grouped_df
return df
def construct_service_uri(provider, service, catalog_id=None):
"""Builds a uri from the given parameters.
Args:
provider (string): A string of the provider.
service (string): A string of the service.
catalog_id (string): A string of the catalog_id.
Returns:
If there is no catalog_id then the uri will just be the provider
and service, else the catalog_id will be appended to the end of the
uri.
"""
uri = 'svc://{}:{}'.format(provider, service)
if catalog_id is not None:
uri = '{}/{}'.format(uri, catalog_id)
return uri
def convert_nodata_to_nans(xarr):
"""
Args:
xarr:
Returns:
"""
nodata_attr = [k for k in xarr.attrs.keys() if k.lower().startswith('nodata')][0]
nodata = xarr.attrs[nodata_attr]
if nodata:
if str(xarr.dtype).startswith('int') or str(xarr.dtype).startswith('uint'):
xarr.values = xarr.values.astype(np.float32)
xarr.values[xarr.values == nodata] = np.nan
return xarr
def get_cache_dir(service=None):
"""Gets the absolute path of the cached directory.
Args:
service (string): A string of the specific service the user wants.
Returns:
A string of the path to the cached directory.
"""
settings = get_settings()
path = _abs_path(settings['CACHE_DIR'])
if service is not None:
path = os.path.join(path, service)
return path
def get_projects_dir():
"""Gets the absolute path of the projects directory within Quest.
Returns:
An absolute path leading to the project directory from within Quest.
"""
settings = get_settings()
return _abs_path(settings['PROJECTS_DIR'], mkdir=False)
def get_quest_dir():
"""Gets the absolute path of the Quest directory.
Returns:
An absolute path of the Quest directory.
"""
settings = get_settings()
return settings['BASE_DIR']
def is_remote_uri(path):
"""Checks if the incoming path is a remote uri.
Args:
path (string): A string that is either a path or uri.
Returns:
If the path is a remote destination then true, false otherwise.
"""
return bool(re.search('^https?\://', path))
def is_uuid(uuid):
"""Check if string is a uuid4.
Notes:
source: https://gist.github.com/ShawnMilo/7777304
Args:
uuid (int): A universal unique identifier.
Returns:
If the uuid is version 4 then true, else false otherwise.
"""
try:
val = UUID(uuid, version=4)
except ValueError:
# If it's a value error, then the string is not a valid UUID.
return False
# If the uuid_string is a valid hex code, but an invalid uuid4,
# the UUID.__init__ will convert it to a valid uuid4.
# This is bad for validation purposes.
return val.hex == uuid
def listify(liststr, delimiter=','):
"""Converts a string into a list.
Args:
liststr (string): A string of words or etc.
delimiter (char): A char that will be used as the delimiter identifier.
Returns:
If a string then a string will be a list.
If nothing is sent in, then none will be returned.
If a list, then a list will be returned.
If not a list or string, then the item will be returned.
"""
if liststr is None:
return None
if isinstance(liststr, (tuple, list, set, dict)):
return liststr
elif isinstance(liststr, str):
return [s.strip() for s in liststr.split(delimiter)]
else:
return [liststr]
def parse_service_uri(uri):
"""Parses a service uri into separate provider, service, and catalog_id strings.
Examples:
usgs-nwis:dv/0800345522
gebco-bathymetry
usgs-ned:1-arc-second
Args:
uri (string): A string that is a uri.
Returns:
Three strings are returned from the parsed uri.
"""
svc, catalog_id = (uri.split('://')[-1].split('/', 1) + [None])[:2]
provider, service = (svc.split(':') + [None])[:2]
return provider, service, catalog_id
def setattr_on_dataframe(df, attr, value, warnings_filter='ignore'):
with warnings.catch_warnings():
warnings.simplefilter(warnings_filter)
setattr(df, attr, value)
def to_geodataframe(feature_collection):
"""Converts a dictionary to a GeoPandas Dataframe object.
Args:
feature_collection (dictionary): A dictionary that contains features.
Returns:
A GeoPandas Dataframe.
"""
features = {}
for feature in feature_collection['features']:
data = feature['properties']
data.update({
'service_id': feature['id'],
'geometry': shapely.geometry.shape(feature['geometry'])
})
features[feature['id']] = data
return gpd.GeoDataFrame.from_dict(features, orient='index')
def to_geojson(df):
"""Converts a dataframe to a geojson object.
Args:
df (dataframe): A dataframe that is being converted to a geojson object.
Returns:
A geojson object is what is being returned.
"""
_func = {
'LineString': LineString,
'Point': Point,
'Polygon': Polygon,
}
features = []
if not df.empty:
# TODO what is this code doing and is it now obsolete with the new DB?
idx = df.columns.str.startswith('_')
r = {field: field[1:] for field in df.columns[idx]}
for uid, row in df.iterrows():
metadata = json.loads(row[~idx].dropna().to_json())
row = row[idx].rename(index=r)
# create geojson geometry
geometry = None
if row['geom_type'] is not None:
coords = row['geom_coords']
if not isinstance(coords, (list, tuple)):
coords = json.loads(coords)
geometry = _func[row['geom_type']](coords)
del row['geom_type']
del row['geom_coords']
# split fields into properties and metadata
properties = json.loads(row.dropna().to_json())
properties.update({'metadata': metadata})
features.append(Feature(geometry=geometry, properties=properties,
id=uid))
return FeatureCollection(features)
def to_json_default_handler(obj):
"""Gets an attribute from the object.
Notes:
This method is confusing and the name is confusing.
Args:
obj (object): An object of some nature.
Returns:
If the object has an attribute isoformat, then return it.
"""
if hasattr(obj, 'isoformat'):
return obj.isoformat()
def uuid(resource_type):
"""Generate a new uuid.
Notes:
First character of uuid is replaced with 'd' for resource_type dataset.
Args:
resource_type (string): A string that is a type of resource i.e. 'dataset'.
Returns:
A new uuid from the resource type.
"""
uuid = uuid4().hex
if resource_type == 'dataset':
uuid = 'd' + uuid[1:]
return uuid
| [
"json.loads",
"geojson.FeatureCollection",
"uuid.UUID",
"os.path.isabs",
"os.makedirs",
"geojson.Feature",
"os.path.join",
"warnings.catch_warnings",
"uuid.uuid4",
"warnings.simplefilter",
"geopandas.GeoDataFrame.from_dict",
"pandas.DataFrame",
"re.search"
] | [((3981, 4016), 'pandas.DataFrame', 'pd.DataFrame', (['uris'], {'columns': "['uri']"}), "(uris, columns=['uri'])\n", (3993, 4016), True, 'import pandas as pd\n'), ((9949, 10001), 'geopandas.GeoDataFrame.from_dict', 'gpd.GeoDataFrame.from_dict', (['features'], {'orient': '"""index"""'}), "(features, orient='index')\n", (9975, 10001), True, 'import geopandas as gpd\n'), ((11409, 11436), 'geojson.FeatureCollection', 'FeatureCollection', (['features'], {}), '(features)\n', (11426, 11436), False, 'from geojson import LineString, Point, Polygon, Feature, FeatureCollection, MultiPolygon\n'), ((804, 823), 'os.path.isabs', 'os.path.isabs', (['path'], {}), '(path)\n', (817, 823), False, 'import os\n'), ((899, 931), 'os.makedirs', 'os.makedirs', (['path'], {'exist_ok': '(True)'}), '(path, exist_ok=True)\n', (910, 931), False, 'import os\n'), ((6512, 6539), 'os.path.join', 'os.path.join', (['path', 'service'], {}), '(path, service)\n', (6524, 6539), False, 'import os\n'), ((7314, 7345), 're.search', 're.search', (['"""^https?\\\\://"""', 'path'], {}), "('^https?\\\\://', path)\n", (7323, 7345), False, 'import re\n'), ((7645, 7666), 'uuid.UUID', 'UUID', (['uuid'], {'version': '(4)'}), '(uuid, version=4)\n', (7649, 7666), False, 'from uuid import uuid4, UUID\n'), ((9296, 9321), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (9319, 9321), False, 'import warnings\n'), ((9331, 9369), 'warnings.simplefilter', 'warnings.simplefilter', (['warnings_filter'], {}), '(warnings_filter)\n', (9352, 9369), False, 'import warnings\n'), ((12117, 12124), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (12122, 12124), False, 'from uuid import uuid4, UUID\n'), ((11302, 11359), 'geojson.Feature', 'Feature', ([], {'geometry': 'geometry', 'properties': 'properties', 'id': 'uid'}), '(geometry=geometry, properties=properties, id=uid)\n', (11309, 11359), False, 'from geojson import LineString, Point, Polygon, Feature, FeatureCollection, MultiPolygon\n'), ((10957, 10975), 'json.loads', 'json.loads', (['coords'], {}), '(coords)\n', (10967, 10975), False, 'import json\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision.models
import torchvision.datasets.folder
import torchvision.transforms as transforms
import torchvision.transforms.functional as Ft
from pytorch_transformers import BertTokenizer
import os
import db
from PIL import Image
import cv2
import numpy
import time
import copy
import math
import sys
sys.path.insert(0, './bottom-up-attention/')
sys.path.insert(0, './bottom-up-attention/caffe/python/')
sys.path.insert(0, './bottom-up-attention/lib/')
sys.path.insert(0, './bottom-up-attention/tools/')
sys.path.append('./errorcam')
import caffe
caffe.set_mode_gpu()
from fast_rcnn.config import cfg, cfg_from_file
from fast_rcnn.test import im_detect,_get_blobs
from fast_rcnn.nms_wrapper import nms
import cv2
cfg_from_file('bottom-up-attention/experiments/cfgs/faster_rcnn_end2end_resnet.yml')
weights = 'bottom-up-attention/data/faster_rcnn_models/resnet101_faster_rcnn_final.caffemodel'
prototxt = 'bottom-up-attention/models/vg/ResNet-101/faster_rcnn_end2end_final/test.prototxt'
self_fast_rcnn = caffe.Net(prototxt, caffe.TEST, weights=weights);
import errorcam.models.attention_refine.atten_refine_network as att_refine
from errorcam.scripts.pytorchgradcam.gradcam import GradCam
from scipy.stats import spearmanr as correlation_func_atten
from statsmodels.stats.weightstats import ztest
import numpy as np
import json
#t0=time.time();
#im_file = 'val/n01532829_2439.JPEG'
# Similar to get_detections_from_im
#import requests
#response=requests.get('http://diva-1:5001/val/n01532829_2439.JPEG');
#image=Image.open(BytesIO(response.content));
#image=image.copy();
#im=F.to_tensor(image);
#im=(im*255).permute(1,2,0);
#im=torch.stack((im[:,:,2],im[:,:,1],im[:,:,0]),dim=2);
#im=im.cpu();
#im=im.numpy();
#im = cv2.imread(im_file)
#scores, boxes, attr_scores, rel_scores = im_detect(net, im)
#print('Loaded %f'%(time.time()-t0));
#a=0/0;
#QA classifier
import qa_classifier as qa_classifier
qa_classifier=qa_classifier.qa_classifier;
qtypes=['object', 'color', 'action', 'count', 'time', 'weather']
import model_7x7 as base_model
import lru_cache
import time
lru_mask_rcnn=lru_cache.new(100);
class xvqa:
def __init__(self,args_models):
self.in_use=0;
#Prepare ResNet152 for feature extraction
with torch.no_grad():
resnet152=torchvision.models.resnet152(pretrained=True)
resnet152=nn.Sequential(*list(resnet152.children())[:-2]).cuda();
resnet152=nn.DataParallel(resnet152).cuda()
resnet152.eval();
self.resnet152=resnet152;
#Prepare BERT tokenizer for question
self.tokenizer=BertTokenizer.from_pretrained('bert-base-uncased');
self.tokenizer.max_qlength=30;
#Prepare several BERT-VQA models for QA
print('Loading model')
models=[];
qfvs=[];
for m in args_models:
args_m=torch.load(os.path.join(m['root'],'args.pt'));
model=base_model.simple_vqa_model(args_m).cuda();
model=nn.DataParallel(model).cuda()
checkpoint=torch.load(os.path.join(m['root'],'model_checkpoint.pt'));
model.load_state_dict(checkpoint['model_state'])
model.eval()
model.answer_dictionary=torch.load(os.path.join(m['root'],'answer_dictionary.pt'));
model.args=args_m;
models.append(model);
qfv=torch.load(os.path.join(m['root'],'qfv.pt'))
qfvs.append(qfv);
self.models=models;
self.qfvs=qfvs;
self.qfvs_imkey=torch.load('res/models/qfv_imkey.pt');
#Prepare fast-rcnn detector
#cfg_from_file('bottom-up-attention/experiments/cfgs/faster_rcnn_end2end_resnet.yml')
#weights = 'bottom-up-attention/data/faster_rcnn_models/resnet101_faster_rcnn_final.caffemodel'
#prototxt = 'bottom-up-attention/models/vg/ResNet-101/faster_rcnn_end2end_final/test.prototxt'
#self.fast_rcnn = caffe.Net(prototxt, caffe.TEST, weights=weights);
def loadGloveModel(gloveFile):
print("Loading Glove Model")
f = open(gloveFile,'r', encoding='utf8')
model = {}
for line in f:
splitLine = line.split()
word = splitLine[0]
embedding = np.array([float(val) for val in splitLine[1:]])
model[word] = embedding
print("Done.",len(model)," words loaded!")
return model
#Get w2v
self.w2v = loadGloveModel("errorcam/glove.6B.300d.txt");
atten_dim = (4,12,115,115)
model_init_args = {"im_feat_dim": (7,7,2048), "hidden_feat_size": 96, "atten_dim": np.prod(atten_dim), "ans_dim":3129, "ques_cam":False}
self.attention_refine_model = att_refine.uncertainatt_refinedatt_net_cam_bigger(**model_init_args).cuda()
model_suffix = "model_3_5501.pt"
exp_name = "exp4_fullmodel_corrpred_refinedattn_uncertainCAM_bigger"
self.attention_refine_model.load_state_dict(torch.load("errorcam/checkpoints/"+exp_name+"/"+model_suffix))
self.gradcam = GradCam(self.attention_refine_model)
return;
def get_lock(self):
while self.in_use>0:
time.sleep(0.2);
print('locked');
self.in_use=1;
return;
def release_lock(self):
self.in_use=0;
return;
def parse_question(self,qtext):
if isinstance(qtext,list):
qtokens=[];
question=[];
for qi in qtext:
qtokens_i,question_i=self.parse_question(qi);
qtokens.append(qtokens_i);
question.append(question_i);
with torch.no_grad():
question=torch.stack(question,dim=0);
return qtokens,question;
else:
qtokens=self.tokenizer.tokenize(qtext);
if len(qtokens)>self.tokenizer.max_qlength-2:
qtokens=qtokens[:self.tokenizer.max_qlength-2];
qtokens=['[CLS]']+qtokens+['[SEP]'];
question=self.tokenizer.convert_tokens_to_ids(qtokens);
question=question+[0]*(self.tokenizer.max_qlength-len(question));
question=torch.LongTensor(question);
return qtokens,question;
def get_7x7_features(self,Is):
#Resize & Normalize
with torch.no_grad():
It=[]
for I in Is:
I=F.adaptive_avg_pool2d(I,(224,224));
I=Ft.normalize(I,mean=[0.485, 0.456, 0.406],std=[0.229, 0.224, 0.225]);
It.append(I);
It=torch.stack(It,dim=0);
#Extract features
fvs=[];
batch=8;
for i in range(0,len(It),batch):
r=min(i+batch,len(It));
fv=self.resnet152(It[i:r]);
fvs.append(fv);
fvs=torch.cat(fvs,dim=0);
return fvs;
def get_maskrcnn_features(self,Is):
try:
self.get_lock();
caffe.set_mode_gpu()
conf_thresh=0.2
min_boxes=36
max_boxes=36
net=self_fast_rcnn;
fv=[];
boxes_=[];
for iid in range(len(Is)):
I=Is[iid]
k=I.numpy().tostring();
if k in lru_mask_rcnn:
fv_i=lru_mask_rcnn[k]['fv'].clone();
boxes_i=lru_mask_rcnn[k]['boxes'].clone();
fv.append(fv_i);
boxes_.append(boxes_i);
else:
t0=time.time();
I=I.cuda();
im=(I*255).permute(1,2,0);
im=torch.stack((im[:,:,2],im[:,:,1],im[:,:,0]),dim=2);
im=im.cpu();
print(im.shape,im.max(),im.min())
im=im.numpy();
print('chpt1 %f'%float(time.time()-t0));
scores, boxes, attr_scores, rel_scores = im_detect(net, im)
print('chpt2 %f'%float(time.time()-t0));
# Keep the original boxes, don't worry about the regression bbox outputs
rois = net.blobs['rois'].data.copy()
# unscale back to raw image space
blobs, im_scales = _get_blobs(im, None)
print('chpt3 %f'%float(time.time()-t0));
cls_boxes = rois[:, 1:5] / im_scales[0]
cls_prob = net.blobs['cls_prob'].data
attr_prob = net.blobs['attr_prob'].data
pool5 = net.blobs['pool5_flat'].data
# Keep only the best detections
max_conf = numpy.zeros((rois.shape[0]))
for cls_ind in range(1,cls_prob.shape[1]):
cls_scores = scores[:, cls_ind]
try:
dets = numpy.hstack((cls_boxes, cls_scores[:, numpy.newaxis])).astype(numpy.float32)
except:
print(cls_boxes.shape);
print(cls_scores.shape);
dets = numpy.hstack((cls_boxes, cls_scores[:, numpy.newaxis])).astype(numpy.float32)
keep = numpy.array(nms(dets, cfg.TEST.NMS))
max_conf[keep] = numpy.where(cls_scores[keep] > max_conf[keep], cls_scores[keep], max_conf[keep])
keep_boxes = numpy.where(max_conf >= conf_thresh)[0]
if len(keep_boxes) < min_boxes:
keep_boxes = numpy.argsort(max_conf)[::-1][:min_boxes]
elif len(keep_boxes) > max_boxes:
keep_boxes = numpy.argsort(max_conf)[::-1][:max_boxes]
print('chpt4 %f'%float(time.time()-t0));
imh=I.shape[1];
imw=I.shape[2];
boxes_i=torch.from_numpy(cls_boxes[keep_boxes]).view(1,36,4);
boxes_i=boxes_i/torch.Tensor([imw,imh,imw,imh]).view(1,1,4);
fv_i=torch.from_numpy(pool5[keep_boxes]).view(1,36,2048);
print(fv_i.shape,boxes_i.shape);
lru_mask_rcnn[k]={'fv':fv_i.clone().cpu(),'boxes':boxes_i.clone().cpu()};
print('chpt5 %f'%float(time.time()-t0));
fv.append(fv_i);
boxes_.append(boxes_i);
fv=torch.cat(fv,dim=0);
boxes_=torch.cat(boxes_,dim=0);
self.release_lock();
except:
self.release_lock();
a=0/0;
return fv,boxes_;
def vqa(self,Is,Qs,use_model=''):
qtokens,q=self.parse_question(Qs);
print(qtokens)
fv7x7=self.get_7x7_features(Is);
fv36,boxes=self.get_maskrcnn_features(Is);
with torch.no_grad():
print(fv7x7.shape,fv36.shape,q.shape);
scores,attn=self.models[use_model](fv36,fv7x7.permute(0,2,3,1),q);
scores=scores.data.cpu();
attn=torch.stack(attn,dim=1).data.cpu();
top1_conf,pred=scores.max(dim=1);
As=[self.models[use_model].answer_dictionary[i] for i in pred.tolist()];
return db.Table({'I':Is,'Q':Qs,'A':As,'scores':scores,'attention':attn,'qtoken':qtokens,'qtensor':q,'features_7x7':fv7x7,'features_fv36':fv36,'bbox':boxes,'model':[use_model for q in Qs]});
#attn: 7x7 matrix
#imurl: image url
#output_fname: fname wrt root
def write_spatial_attention(self,I,attn,output_fname):
eps=1e-4
I=Ft.to_pil_image(I);
I=I.resize((224, 224))
I=numpy.asarray(I).astype(numpy.float32)
attn=attn.view(7,7).numpy()
attn=cv2.resize(attn, (224, 224))
attn=(attn-numpy.min(attn)+eps)/(numpy.max(attn)-numpy.min(attn)+eps)
att_heatmap=cv2.applyColorMap(numpy.uint8(255*attn), cv2.COLORMAP_JET)
alpha = 0.5
output_image=(1-alpha)*att_heatmap+alpha*I;
cv2.imwrite(output_fname,output_image)
return;
def write_object_attention(self,I,attn_rpn,bbox,attn_fname,token_ind=-1):
def apply_mask(image, mask, color, alpha=0.7):
for c in range(3):
image[:, :, c] = numpy.where(mask == 1,
image[:, :, c] *
(1 - alpha) + alpha * color[c] * 255,
image[:, :, c])
return image
def apply_obj_mask(masked_image, mask, actual_image, weight):
mask = numpy.repeat(mask[:,:,numpy.newaxis], 3, axis=2)
obj_image = numpy.ones(actual_image.shape)*255
numpy.copyto(obj_image, actual_image, where=(mask==1))
white_image = numpy.ones(actual_image.shape)*255
if weight< 0.3:
weight=weight+0.15
obj_img_weighted = weight*obj_image + (1-weight)*white_image
numpy.copyto(masked_image, obj_img_weighted, where=(mask==1))
return masked_image
def computeIOU(box1, box2):
#boxes should be in (y1, x1, y2, x2)
box1 = numpy.asarray(box1).astype(numpy.float32)
box2 = numpy.asarray(box2).astype(numpy.float32)
iou_box_x1 = max(box1[1], box2[1])
iou_box_y1 = max(box1[0], box2[0])
iou_box_x2 = min(box1[3], box2[3])
iou_box_y2 = min(box1[2], box2[2])
iou_h = max(0, iou_box_y2-iou_box_y1)
iou_w = max(0, iou_box_x2 - iou_box_x1)
roi_area = (iou_h * iou_w)
box1_area = numpy.absolute((box1[3] - box1[1]) * (box1[2] - box1[0]))
box2_area = numpy.absolute((box2[3] - box2[1]) * (box2[2] - box2[0]))
iou = roi_area/float(box1_area + box2_area - roi_area)
return iou
def compute_box_distance(box1, box2):
#boxes in (y1, x1, y2, x2)
box1 = numpy.asarray(box1).astype(numpy.float32)
box2 = numpy.asarray(box2).astype(numpy.float32)
cntr_box1_x = int((box1[1] + box1[3])/2)
cntr_box1_y = int((box1[0] + box1[2])/2)
cntr_box2_x = int((box2[1] + box2[3])/2)
cntr_box2_y = int((box2[0] + box2[2])/2)
dist = numpy.sqrt((cntr_box1_x - cntr_box2_x)**2 + (cntr_box1_y - cntr_box2_y)**2)
return dist
def computeWeights(mrcnn_boxes, rpn_boxes, box_weights):
epsilon = 1e-5
rcnn_box_weights = []
for ind, rcnn_box in enumerate(mrcnn_boxes):
max_area = 0
all_iou = []
all_weights = []
for rpn_ind, rpn_box in enumerate(rpn_boxes):
iou_area = computeIOU(rcnn_box, rpn_box)
all_iou.append(iou_area)
all_weights.append(box_weights[rpn_ind])
if len(all_iou) >= 1 and numpy.sum(all_iou)>0:
final_weight = numpy.exp(numpy.log(numpy.sum(numpy.exp(numpy.log(numpy.asarray(all_iou)) + numpy.log(numpy.asarray(all_weights))))) -(numpy.log(float(numpy.sum(all_iou)+ epsilon))))
rcnn_box_weights.append(final_weight)
else:
rcnn_box_weights.append(0)
return rcnn_box_weights
def make_rpn_attention_im(actual_image,attention_rpn,bboxes,attn_fname,token_ind=-1):
im_boxes=(bboxes.numpy()*256).astype(numpy.int32)
final_obj_weights = attention_rpn.numpy()
actual_image = Ft.to_pil_image(actual_image).resize((256, 256))
if len(final_obj_weights) != 0:
if numpy.max(final_obj_weights) > 0:
final_obj_weights = numpy.exp(numpy.log(final_obj_weights) - numpy.log(numpy.max(final_obj_weights)))
img_arr = numpy.asarray(actual_image).astype(numpy.float32)
masked_image = numpy.ones(img_arr.shape) * 255
masked_image = img_arr * 0.1 + masked_image * 0.9
if len(final_obj_weights) != 0:
obj_atten_inds = numpy.argsort(final_obj_weights)
else:
obj_atten_inds = []
obj_atten_inds = obj_atten_inds[::-1]
top_N = 5 # int(N * float(3) / 4)
for i in obj_atten_inds[:top_N][::-1]:
if final_obj_weights[i] > 0:
mask = numpy.zeros((256,256))
x0, y0, x1, y1 = im_boxes[i]
mask[y0:y1, x0:x1]=1
masked_image=apply_obj_mask(masked_image,mask,img_arr,float(final_obj_weights[i]))
## draw origin box (clicked box and draw arrows from that box to attended boxes)
## will only work for cases where we have such box to box attention, think about generalizing this later
if token_ind>29 and token_ind<66:
origin_box = im_boxes[token_ind-30]
ox0, oy0, ox1, oy1 = origin_box
cv2.rectangle(masked_image,(origin_box[0],origin_box[1]),(origin_box[2],origin_box[3]),(100,100,100),5)
for i in obj_atten_inds[:top_N]:
x0, y0, x1, y1 = im_boxes[i]
cv2.rectangle(masked_image, (x0, y0), (x1, y1), (50, 50, 50), 1)
pt1, pt2 = compute_closest_corner(origin_box, im_boxes[i])
cv2.arrowedLine(masked_image, pt1, pt2, (100,100,100), 2,8,0,0.05)
#masked_im = Image.fromarray(masked_image.astype(numpy.float32))
cv2.imwrite(attn_fname,masked_image[:,:,::-1])
return;
def compute_closest_corner(box1, box2):
ax0, ay0, ax1, ay1 = box1
bx0, by0, bx1, by1 = box2
min_d = float("inf")
for ax in [ax0, ax1]:
for bx in [bx0, bx1]:
d = abs(ax-bx)
if d<min_d:
ax_c = ax
bx_c = bx
min_d = d
min_d = float("inf")
for ay in [ay0, ay1]:
for by in [by0, by1]:
d = abs(ay-by)
if d<min_d:
ay_c = ay
by_c = by
min_d = d
return (ax_c, ay_c), (bx_c, by_c)
make_rpn_attention_im(I,attn_rpn,bbox,attn_fname,token_ind);
return;
def explain_errormap(self,table_vqa):
key=table_vqa['id'][0];
I=table_vqa['I'][0]
Q=table_vqa['Q'][0]
fv7x7=table_vqa['features_7x7'][0:1].clone()#.permute(0,2,3,1).view(1,49,2048);
attn=table_vqa['attention'][0:1];
answer_prob=F.softmax(table_vqa['scores'][0:1],dim=1);
def get_avg_w2v(question, w2v):
q_w = question.lower().split("?")[0].split(" ")
avg_feats = []
for w in q_w:
if w in w2v:
avg_feats.append(w2v[w])
return np.average(avg_feats, axis=0)
def get_err_weight(p):
weight = (p/0.175)**4 # empirically defined by what looks good on the matplotlib colormap.
if weight>1:
weight=1.0
return weight
#get question features
ques_feats = torch.from_numpy(get_avg_w2v(Q,self.w2v))
ques_feats = ques_feats.cuda().float().unsqueeze(0)
#get failure prediction probability. Using this to weigh the error maps results in better visualization.
model_out = self.attention_refine_model(attn.cuda().view(1,-1), fv7x7.cuda(), ques_feats, answer_prob.cuda());
fail_pred = model_out['wrong_pred']
fail_pred = float(fail_pred.squeeze().detach().cpu())
weight = get_err_weight(fail_pred)
print(attn.shape,fv7x7.shape,ques_feats.shape,answer_prob.shape)
att_map, _ = self.gradcam([attn.cuda().view(1,-1), fv7x7.cuda(), ques_feats, answer_prob.cuda()])
actual_image = Ft.to_pil_image(I).resize((224,224))
actual_image=numpy.asarray(actual_image).astype(numpy.float32)
processed_img = cv2.resize(actual_image, (224,224))
att_map = att_map.reshape((7,7))
att_map = cv2.resize(att_map, (224,224))
epsilon = 1e-3
att_heatmap = cv2.applyColorMap(np.uint8(255 * att_map), cv2.COLORMAP_JET)
alpha = 0.5
output_image = (1 - alpha) * att_heatmap *weight + alpha * processed_img
errmap_im_file_name='./attn/%s_errormap.jpg'%key;
cv2.imwrite(errmap_im_file_name, output_image)
return errmap_im_file_name;
def explain_attention_map_average(self,table_vqa):
key=table_vqa['id'][0];
attn=table_vqa['attention'][0];
qtoken=table_vqa['qtoken'][0];
L=len(qtoken);
attn_sp=attn[-1,:,:L, 66:].mean(0).mean(0).view(7,7);
attn_fname='./attn/%s_spatial_average.jpg'%key;
self.write_spatial_attention(table_vqa['I'][0],attn_sp,attn_fname);
return attn_fname;
def explain_attention_map_all(self,table_vqa):
key=table_vqa['id'][0];
attn=table_vqa['attention'][0];
qtoken=table_vqa['qtoken'][0];
L=len(qtoken);
attn_fname=[];
for i in range(L):
attn_sp=attn[-1,:,i, 66:].mean(0).view(7,7);
attn_fname_i='./attn/%s_spatial_w%d.jpg'%(key,i);
self.write_spatial_attention(table_vqa['I'][0],attn_sp,attn_fname_i);
attn_fname.append(attn_fname_i);
return attn_fname;
def explain_object_attention_average(self,table_vqa):
key=table_vqa['id'][0];
attn=table_vqa['attention'][0];
bbox=table_vqa['bbox'][0];
qtoken=table_vqa['qtoken'][0];
L=len(qtoken);
attn_rpn=attn[-1,-1,:L,30:66].mean(0);
attn_fname='./attn/%s_object_average.jpg'%key;
self.write_object_attention(table_vqa['I'][0],attn_rpn,bbox,attn_fname)
return attn_fname;
def explain_object_attention_all(self,table_vqa):
key=table_vqa['id'][0];
attn=table_vqa['attention'][0];
bbox=table_vqa['bbox'][0];
qtoken=table_vqa['qtoken'][0];
L=len(qtoken);
attn_fname=[];
for i in range(L):
attn_rpn=attn[-1,-1,i,30:66];
attn_fname_i='./attn/%s_object_w%d.jpg'%(key,i);
self.write_object_attention(table_vqa['I'][0],attn_rpn,bbox,attn_fname_i)
attn_fname.append(attn_fname_i);
return attn_fname;
#def explain_attention_map_pairs(self,table_vqa):
def explain_top_answers(self,table_vqa,k=5):
n=len(table_vqa);
topk_answers=[];
topk_confidence=[];
for i in range(n):
use_model=table_vqa['model'][i];
s=table_vqa['scores'][i];
p=F.softmax(s,dim=0);
p,ind=p.sort(dim=0,descending=True);
p=p[:k].tolist();
ind=ind[:k].tolist();
a=[self.models[use_model].answer_dictionary[j] for j in ind];
topk_answers_i=[];
for j in range(len(a)):
topk_answers_i.append({'answer':a[j],'confidence':p[j]});
topk_answers.append(topk_answers_i);
return topk_answers;
def explain_related_qas(self,table_vqa,k=5):
n=len(table_vqa);
topk_qas=[];
for i in range(n):
#Compute vector for question
use_model=table_vqa['model'][i];
I=table_vqa['I'][i];
qtext=table_vqa['Q'][i]
q=self.question_vector_v0(qtext,batch=50,model=use_model);
#Query related question
precomputed_qfv=self.qfvs[use_model]['qfv'];
precomputed_q=self.qfvs[use_model]['q'];
s=torch.mm(precomputed_qfv,q.view(-1,1)).view(-1);
s,ind=s.sort(dim=0,descending=True);
ind=ind.tolist();
s=s.tolist();
#Read questions and call VQA
topk_qas_i=[];
for j in range(k):
topk_qas_i.append({'question':precomputed_q[ind[j]],'r':s[j]});
result=self.vqa([I]*k,[x['question'] for x in topk_qas_i],use_model=use_model);
for j in range(k):
topk_qas_i[j]['answer']=result['A'][j];
topk_qas.append(topk_qas_i);
#Call VQA in batch mode
return topk_qas;
#Question type as perceived by the model
def explain_qtype(self,table_vqa):
qac=qa_classifier();
qtype=[];
n=len(table_vqa);
for i in range(n):
question=table_vqa['Q'][i];
answer=table_vqa['A'][i];
qtype.append(qac.classify_qa(question=question,answer=answer))
return qtype;
def question_vector_v0(self,qtext,T=15,std=1e-3,batch=4,model=0):
def logmeanexp(inputs,dim=None,keepdim=False):
return (inputs-F.log_softmax(inputs,dim=dim).data).mean(dim,keepdim=keepdim)-math.log(inputs.size(dim));
seeds=[t*1000 for t in range(T)]; #Fix seeds across runs
#Preprocess question
_,q=self.parse_question(qtext);
q=q.view(1,-1);
feature=self.qfvs_imkey['fv36'].cuda();
feature_7x7=self.qfvs_imkey['fv49'].cuda();
model2=copy.deepcopy(self.models[model]);
model2.train();
s=[];
for t in range(T):
st=[];
rng_state=torch.random.get_rng_state();
torch.random.manual_seed(seeds[t]);
#Run the model, pairing the q with each images
with torch.no_grad():
for j in range(0,feature.shape[0],batch):
r=min(j+batch,feature.shape[0]);
scores,_=model2(feature[j:r],feature_7x7[j:r],q.repeat(r-j,1));
scores=F.log_softmax(scores,dim=1).data;
st.append(scores);
torch.random.set_rng_state(rng_state);
st=torch.cat(st,dim=0);
s.append(st.data);
s=torch.stack(s,dim=0); #TxKx3129
savg=logmeanexp(s,dim=0,keepdim=True);
sdiff=s-savg;
s=s.permute(1,0,2);
sdiff=sdiff.permute(1,2,0);
v=torch.bmm(torch.exp(s),torch.exp(sdiff))/T;
return v.view(-1).cpu();
| [
"numpy.uint8",
"numpy.prod",
"numpy.copyto",
"sys.path.insert",
"numpy.sqrt",
"qa_classifier",
"cv2.rectangle",
"fast_rcnn.test.im_detect",
"torchvision.transforms.functional.to_pil_image",
"torch.LongTensor",
"numpy.hstack",
"numpy.log",
"time.sleep",
"torch.exp",
"numpy.argsort",
"to... | [((380, 424), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""./bottom-up-attention/"""'], {}), "(0, './bottom-up-attention/')\n", (395, 424), False, 'import sys\n'), ((425, 482), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""./bottom-up-attention/caffe/python/"""'], {}), "(0, './bottom-up-attention/caffe/python/')\n", (440, 482), False, 'import sys\n'), ((483, 531), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""./bottom-up-attention/lib/"""'], {}), "(0, './bottom-up-attention/lib/')\n", (498, 531), False, 'import sys\n'), ((532, 582), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""./bottom-up-attention/tools/"""'], {}), "(0, './bottom-up-attention/tools/')\n", (547, 582), False, 'import sys\n'), ((583, 612), 'sys.path.append', 'sys.path.append', (['"""./errorcam"""'], {}), "('./errorcam')\n", (598, 612), False, 'import sys\n'), ((627, 647), 'caffe.set_mode_gpu', 'caffe.set_mode_gpu', ([], {}), '()\n', (645, 647), False, 'import caffe\n'), ((796, 885), 'fast_rcnn.config.cfg_from_file', 'cfg_from_file', (['"""bottom-up-attention/experiments/cfgs/faster_rcnn_end2end_resnet.yml"""'], {}), "(\n 'bottom-up-attention/experiments/cfgs/faster_rcnn_end2end_resnet.yml')\n", (809, 885), False, 'from fast_rcnn.config import cfg, cfg_from_file\n'), ((1087, 1135), 'caffe.Net', 'caffe.Net', (['prototxt', 'caffe.TEST'], {'weights': 'weights'}), '(prototxt, caffe.TEST, weights=weights)\n', (1096, 1135), False, 'import caffe\n'), ((2169, 2187), 'lru_cache.new', 'lru_cache.new', (['(100)'], {}), '(100)\n', (2182, 2187), False, 'import lru_cache\n'), ((2688, 2738), 'pytorch_transformers.BertTokenizer.from_pretrained', 'BertTokenizer.from_pretrained', (['"""bert-base-uncased"""'], {}), "('bert-base-uncased')\n", (2717, 2738), False, 'from pytorch_transformers import BertTokenizer\n'), ((3636, 3673), 'torch.load', 'torch.load', (['"""res/models/qfv_imkey.pt"""'], {}), "('res/models/qfv_imkey.pt')\n", (3646, 3673), False, 'import torch\n'), ((5221, 5257), 'errorcam.scripts.pytorchgradcam.gradcam.GradCam', 'GradCam', (['self.attention_refine_model'], {}), '(self.attention_refine_model)\n', (5228, 5257), False, 'from errorcam.scripts.pytorchgradcam.gradcam import GradCam\n'), ((11654, 11864), 'db.Table', 'db.Table', (["{'I': Is, 'Q': Qs, 'A': As, 'scores': scores, 'attention': attn, 'qtoken':\n qtokens, 'qtensor': q, 'features_7x7': fv7x7, 'features_fv36': fv36,\n 'bbox': boxes, 'model': [use_model for q in Qs]}"], {}), "({'I': Is, 'Q': Qs, 'A': As, 'scores': scores, 'attention': attn,\n 'qtoken': qtokens, 'qtensor': q, 'features_7x7': fv7x7, 'features_fv36':\n fv36, 'bbox': boxes, 'model': [use_model for q in Qs]})\n", (11662, 11864), False, 'import db\n'), ((12006, 12024), 'torchvision.transforms.functional.to_pil_image', 'Ft.to_pil_image', (['I'], {}), '(I)\n', (12021, 12024), True, 'import torchvision.transforms.functional as Ft\n'), ((12155, 12183), 'cv2.resize', 'cv2.resize', (['attn', '(224, 224)'], {}), '(attn, (224, 224))\n', (12165, 12183), False, 'import cv2\n'), ((12421, 12460), 'cv2.imwrite', 'cv2.imwrite', (['output_fname', 'output_image'], {}), '(output_fname, output_image)\n', (12432, 12460), False, 'import cv2\n'), ((19498, 19540), 'torch.nn.functional.softmax', 'F.softmax', (["table_vqa['scores'][0:1]"], {'dim': '(1)'}), "(table_vqa['scores'][0:1], dim=1)\n", (19507, 19540), True, 'import torch.nn.functional as F\n'), ((20969, 21005), 'cv2.resize', 'cv2.resize', (['actual_image', '(224, 224)'], {}), '(actual_image, (224, 224))\n', (20979, 21005), False, 'import cv2\n'), ((21064, 21095), 'cv2.resize', 'cv2.resize', (['att_map', '(224, 224)'], {}), '(att_map, (224, 224))\n', (21074, 21095), False, 'import cv2\n'), ((21387, 21433), 'cv2.imwrite', 'cv2.imwrite', (['errmap_im_file_name', 'output_image'], {}), '(errmap_im_file_name, output_image)\n', (21398, 21433), False, 'import cv2\n'), ((25483, 25498), 'qa_classifier', 'qa_classifier', ([], {}), '()\n', (25496, 25498), True, 'import qa_classifier as qa_classifier\n'), ((26311, 26344), 'copy.deepcopy', 'copy.deepcopy', (['self.models[model]'], {}), '(self.models[model])\n', (26324, 26344), False, 'import copy\n'), ((27068, 27089), 'torch.stack', 'torch.stack', (['s'], {'dim': '(0)'}), '(s, dim=0)\n', (27079, 27089), False, 'import torch\n'), ((2324, 2339), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2337, 2339), False, 'import torch\n'), ((4797, 4815), 'numpy.prod', 'np.prod', (['atten_dim'], {}), '(atten_dim)\n', (4804, 4815), True, 'import numpy as np\n'), ((5135, 5202), 'torch.load', 'torch.load', (["('errorcam/checkpoints/' + exp_name + '/' + model_suffix)"], {}), "('errorcam/checkpoints/' + exp_name + '/' + model_suffix)\n", (5145, 5202), False, 'import torch\n'), ((5353, 5368), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (5363, 5368), False, 'import time\n'), ((6391, 6417), 'torch.LongTensor', 'torch.LongTensor', (['question'], {}), '(question)\n', (6407, 6417), False, 'import torch\n'), ((6537, 6552), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (6550, 6552), False, 'import torch\n'), ((6797, 6819), 'torch.stack', 'torch.stack', (['It'], {'dim': '(0)'}), '(It, dim=0)\n', (6808, 6819), False, 'import torch\n'), ((7094, 7115), 'torch.cat', 'torch.cat', (['fvs'], {'dim': '(0)'}), '(fvs, dim=0)\n', (7103, 7115), False, 'import torch\n'), ((7244, 7264), 'caffe.set_mode_gpu', 'caffe.set_mode_gpu', ([], {}), '()\n', (7262, 7264), False, 'import caffe\n'), ((10842, 10862), 'torch.cat', 'torch.cat', (['fv'], {'dim': '(0)'}), '(fv, dim=0)\n', (10851, 10862), False, 'import torch\n'), ((10882, 10906), 'torch.cat', 'torch.cat', (['boxes_'], {'dim': '(0)'}), '(boxes_, dim=0)\n', (10891, 10906), False, 'import torch\n'), ((11261, 11276), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (11274, 11276), False, 'import torch\n'), ((12300, 12323), 'numpy.uint8', 'numpy.uint8', (['(255 * attn)'], {}), '(255 * attn)\n', (12311, 12323), False, 'import numpy\n'), ((13021, 13071), 'numpy.repeat', 'numpy.repeat', (['mask[:, :, numpy.newaxis]', '(3)'], {'axis': '(2)'}), '(mask[:, :, numpy.newaxis], 3, axis=2)\n', (13033, 13071), False, 'import numpy\n'), ((13154, 13208), 'numpy.copyto', 'numpy.copyto', (['obj_image', 'actual_image'], {'where': '(mask == 1)'}), '(obj_image, actual_image, where=mask == 1)\n', (13166, 13208), False, 'import numpy\n'), ((13457, 13518), 'numpy.copyto', 'numpy.copyto', (['masked_image', 'obj_img_weighted'], {'where': '(mask == 1)'}), '(masked_image, obj_img_weighted, where=mask == 1)\n', (13469, 13518), False, 'import numpy\n'), ((14177, 14234), 'numpy.absolute', 'numpy.absolute', (['((box1[3] - box1[1]) * (box1[2] - box1[0]))'], {}), '((box1[3] - box1[1]) * (box1[2] - box1[0]))\n', (14191, 14234), False, 'import numpy\n'), ((14259, 14316), 'numpy.absolute', 'numpy.absolute', (['((box2[3] - box2[1]) * (box2[2] - box2[0]))'], {}), '((box2[3] - box2[1]) * (box2[2] - box2[0]))\n', (14273, 14316), False, 'import numpy\n'), ((14919, 14998), 'numpy.sqrt', 'numpy.sqrt', (['((cntr_box1_x - cntr_box2_x) ** 2 + (cntr_box1_y - cntr_box2_y) ** 2)'], {}), '((cntr_box1_x - cntr_box2_x) ** 2 + (cntr_box1_y - cntr_box2_y) ** 2)\n', (14929, 14998), False, 'import numpy\n'), ((18290, 18339), 'cv2.imwrite', 'cv2.imwrite', (['attn_fname', 'masked_image[:, :, ::-1]'], {}), '(attn_fname, masked_image[:, :, ::-1])\n', (18301, 18339), False, 'import cv2\n'), ((19818, 19847), 'numpy.average', 'np.average', (['avg_feats'], {'axis': '(0)'}), '(avg_feats, axis=0)\n', (19828, 19847), True, 'import numpy as np\n'), ((21168, 21191), 'numpy.uint8', 'np.uint8', (['(255 * att_map)'], {}), '(255 * att_map)\n', (21176, 21191), True, 'import numpy as np\n'), ((23731, 23750), 'torch.nn.functional.softmax', 'F.softmax', (['s'], {'dim': '(0)'}), '(s, dim=0)\n', (23740, 23750), True, 'import torch.nn.functional as F\n'), ((26452, 26480), 'torch.random.get_rng_state', 'torch.random.get_rng_state', ([], {}), '()\n', (26478, 26480), False, 'import torch\n'), ((26494, 26528), 'torch.random.manual_seed', 'torch.random.manual_seed', (['seeds[t]'], {}), '(seeds[t])\n', (26518, 26528), False, 'import torch\n'), ((26943, 26980), 'torch.random.set_rng_state', 'torch.random.set_rng_state', (['rng_state'], {}), '(rng_state)\n', (26969, 26980), False, 'import torch\n'), ((26997, 27017), 'torch.cat', 'torch.cat', (['st'], {'dim': '(0)'}), '(st, dim=0)\n', (27006, 27017), False, 'import torch\n'), ((2963, 2997), 'os.path.join', 'os.path.join', (["m['root']", '"""args.pt"""'], {}), "(m['root'], 'args.pt')\n", (2975, 2997), False, 'import os\n'), ((3143, 3189), 'os.path.join', 'os.path.join', (["m['root']", '"""model_checkpoint.pt"""'], {}), "(m['root'], 'model_checkpoint.pt')\n", (3155, 3189), False, 'import os\n'), ((3324, 3371), 'os.path.join', 'os.path.join', (["m['root']", '"""answer_dictionary.pt"""'], {}), "(m['root'], 'answer_dictionary.pt')\n", (3336, 3371), False, 'import os\n'), ((3478, 3511), 'os.path.join', 'os.path.join', (["m['root']", '"""qfv.pt"""'], {}), "(m['root'], 'qfv.pt')\n", (3490, 3511), False, 'import os\n'), ((4889, 4957), 'errorcam.models.attention_refine.atten_refine_network.uncertainatt_refinedatt_net_cam_bigger', 'att_refine.uncertainatt_refinedatt_net_cam_bigger', ([], {}), '(**model_init_args)\n', (4938, 4957), True, 'import errorcam.models.attention_refine.atten_refine_network as att_refine\n'), ((5853, 5868), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5866, 5868), False, 'import torch\n'), ((5895, 5923), 'torch.stack', 'torch.stack', (['question'], {'dim': '(0)'}), '(question, dim=0)\n', (5906, 5923), False, 'import torch\n'), ((6615, 6651), 'torch.nn.functional.adaptive_avg_pool2d', 'F.adaptive_avg_pool2d', (['I', '(224, 224)'], {}), '(I, (224, 224))\n', (6636, 6651), True, 'import torch.nn.functional as F\n'), ((6669, 6739), 'torchvision.transforms.functional.normalize', 'Ft.normalize', (['I'], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(I, mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n', (6681, 6739), True, 'import torchvision.transforms.functional as Ft\n'), ((12067, 12083), 'numpy.asarray', 'numpy.asarray', (['I'], {}), '(I)\n', (12080, 12083), False, 'import numpy\n'), ((12678, 12776), 'numpy.where', 'numpy.where', (['(mask == 1)', '(image[:, :, c] * (1 - alpha) + alpha * color[c] * 255)', 'image[:, :, c]'], {}), '(mask == 1, image[:, :, c] * (1 - alpha) + alpha * color[c] * \n 255, image[:, :, c])\n', (12689, 12776), False, 'import numpy\n'), ((13094, 13124), 'numpy.ones', 'numpy.ones', (['actual_image.shape'], {}), '(actual_image.shape)\n', (13104, 13124), False, 'import numpy\n'), ((13248, 13278), 'numpy.ones', 'numpy.ones', (['actual_image.shape'], {}), '(actual_image.shape)\n', (13258, 13278), False, 'import numpy\n'), ((16643, 16668), 'numpy.ones', 'numpy.ones', (['img_arr.shape'], {}), '(img_arr.shape)\n', (16653, 16668), False, 'import numpy\n'), ((16827, 16859), 'numpy.argsort', 'numpy.argsort', (['final_obj_weights'], {}), '(final_obj_weights)\n', (16840, 16859), False, 'import numpy\n'), ((17735, 17850), 'cv2.rectangle', 'cv2.rectangle', (['masked_image', '(origin_box[0], origin_box[1])', '(origin_box[2], origin_box[3])', '(100, 100, 100)', '(5)'], {}), '(masked_image, (origin_box[0], origin_box[1]), (origin_box[2],\n origin_box[3]), (100, 100, 100), 5)\n', (17748, 17850), False, 'import cv2\n'), ((20837, 20855), 'torchvision.transforms.functional.to_pil_image', 'Ft.to_pil_image', (['I'], {}), '(I)\n', (20852, 20855), True, 'import torchvision.transforms.functional as Ft\n'), ((20895, 20922), 'numpy.asarray', 'numpy.asarray', (['actual_image'], {}), '(actual_image)\n', (20908, 20922), False, 'import numpy\n'), ((26606, 26621), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (26619, 26621), False, 'import torch\n'), ((27253, 27265), 'torch.exp', 'torch.exp', (['s'], {}), '(s)\n', (27262, 27265), False, 'import torch\n'), ((27266, 27282), 'torch.exp', 'torch.exp', (['sdiff'], {}), '(sdiff)\n', (27275, 27282), False, 'import torch\n'), ((2509, 2535), 'torch.nn.DataParallel', 'nn.DataParallel', (['resnet152'], {}), '(resnet152)\n', (2524, 2535), True, 'import torch.nn as nn\n'), ((3017, 3052), 'model_7x7.simple_vqa_model', 'base_model.simple_vqa_model', (['args_m'], {}), '(args_m)\n', (3044, 3052), True, 'import model_7x7 as base_model\n'), ((3079, 3101), 'torch.nn.DataParallel', 'nn.DataParallel', (['model'], {}), '(model)\n', (3094, 3101), True, 'import torch.nn as nn\n'), ((7820, 7831), 'time.time', 'time.time', ([], {}), '()\n', (7829, 7831), False, 'import time\n'), ((7935, 7994), 'torch.stack', 'torch.stack', (['(im[:, :, 2], im[:, :, 1], im[:, :, 0])'], {'dim': '(2)'}), '((im[:, :, 2], im[:, :, 1], im[:, :, 0]), dim=2)\n', (7946, 7994), False, 'import torch\n'), ((8231, 8249), 'fast_rcnn.test.im_detect', 'im_detect', (['net', 'im'], {}), '(net, im)\n', (8240, 8249), False, 'from fast_rcnn.test import im_detect, _get_blobs\n'), ((8575, 8595), 'fast_rcnn.test._get_blobs', '_get_blobs', (['im', 'None'], {}), '(im, None)\n', (8585, 8595), False, 'from fast_rcnn.test import im_detect, _get_blobs\n'), ((9017, 9043), 'numpy.zeros', 'numpy.zeros', (['rois.shape[0]'], {}), '(rois.shape[0])\n', (9028, 9043), False, 'import numpy\n'), ((12203, 12218), 'numpy.min', 'numpy.min', (['attn'], {}), '(attn)\n', (12212, 12218), False, 'import numpy\n'), ((12225, 12240), 'numpy.max', 'numpy.max', (['attn'], {}), '(attn)\n', (12234, 12240), False, 'import numpy\n'), ((12241, 12256), 'numpy.min', 'numpy.min', (['attn'], {}), '(attn)\n', (12250, 12256), False, 'import numpy\n'), ((13669, 13688), 'numpy.asarray', 'numpy.asarray', (['box1'], {}), '(box1)\n', (13682, 13688), False, 'import numpy\n'), ((13730, 13749), 'numpy.asarray', 'numpy.asarray', (['box2'], {}), '(box2)\n', (13743, 13749), False, 'import numpy\n'), ((14546, 14565), 'numpy.asarray', 'numpy.asarray', (['box1'], {}), '(box1)\n', (14559, 14565), False, 'import numpy\n'), ((14607, 14626), 'numpy.asarray', 'numpy.asarray', (['box2'], {}), '(box2)\n', (14620, 14626), False, 'import numpy\n'), ((16248, 16277), 'torchvision.transforms.functional.to_pil_image', 'Ft.to_pil_image', (['actual_image'], {}), '(actual_image)\n', (16263, 16277), True, 'import torchvision.transforms.functional as Ft\n'), ((16374, 16402), 'numpy.max', 'numpy.max', (['final_obj_weights'], {}), '(final_obj_weights)\n', (16383, 16402), False, 'import numpy\n'), ((16566, 16593), 'numpy.asarray', 'numpy.asarray', (['actual_image'], {}), '(actual_image)\n', (16579, 16593), False, 'import numpy\n'), ((17134, 17157), 'numpy.zeros', 'numpy.zeros', (['(256, 256)'], {}), '((256, 256))\n', (17145, 17157), False, 'import numpy\n'), ((17957, 18021), 'cv2.rectangle', 'cv2.rectangle', (['masked_image', '(x0, y0)', '(x1, y1)', '(50, 50, 50)', '(1)'], {}), '(masked_image, (x0, y0), (x1, y1), (50, 50, 50), 1)\n', (17970, 18021), False, 'import cv2\n'), ((18121, 18192), 'cv2.arrowedLine', 'cv2.arrowedLine', (['masked_image', 'pt1', 'pt2', '(100, 100, 100)', '(2)', '(8)', '(0)', '(0.05)'], {}), '(masked_image, pt1, pt2, (100, 100, 100), 2, 8, 0, 0.05)\n', (18136, 18192), False, 'import cv2\n'), ((9691, 9776), 'numpy.where', 'numpy.where', (['(cls_scores[keep] > max_conf[keep])', 'cls_scores[keep]', 'max_conf[keep]'], {}), '(cls_scores[keep] > max_conf[keep], cls_scores[keep], max_conf[keep]\n )\n', (9702, 9776), False, 'import numpy\n'), ((9826, 9862), 'numpy.where', 'numpy.where', (['(max_conf >= conf_thresh)'], {}), '(max_conf >= conf_thresh)\n', (9837, 9862), False, 'import numpy\n'), ((11463, 11487), 'torch.stack', 'torch.stack', (['attn'], {'dim': '(1)'}), '(attn, dim=1)\n', (11474, 11487), False, 'import torch\n'), ((15602, 15620), 'numpy.sum', 'numpy.sum', (['all_iou'], {}), '(all_iou)\n', (15611, 15620), False, 'import numpy\n'), ((26845, 26873), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['scores'], {'dim': '(1)'}), '(scores, dim=1)\n', (26858, 26873), True, 'import torch.nn.functional as F\n'), ((9625, 9648), 'fast_rcnn.nms_wrapper.nms', 'nms', (['dets', 'cfg.TEST.NMS'], {}), '(dets, cfg.TEST.NMS)\n', (9628, 9648), False, 'from fast_rcnn.nms_wrapper import nms\n'), ((10312, 10351), 'torch.from_numpy', 'torch.from_numpy', (['cls_boxes[keep_boxes]'], {}), '(cls_boxes[keep_boxes])\n', (10328, 10351), False, 'import torch\n'), ((10472, 10507), 'torch.from_numpy', 'torch.from_numpy', (['pool5[keep_boxes]'], {}), '(pool5[keep_boxes])\n', (10488, 10507), False, 'import torch\n'), ((16458, 16486), 'numpy.log', 'numpy.log', (['final_obj_weights'], {}), '(final_obj_weights)\n', (16467, 16486), False, 'import numpy\n'), ((9955, 9978), 'numpy.argsort', 'numpy.argsort', (['max_conf'], {}), '(max_conf)\n', (9968, 9978), False, 'import numpy\n'), ((10402, 10436), 'torch.Tensor', 'torch.Tensor', (['[imw, imh, imw, imh]'], {}), '([imw, imh, imw, imh])\n', (10414, 10436), False, 'import torch\n'), ((16499, 16527), 'numpy.max', 'numpy.max', (['final_obj_weights'], {}), '(final_obj_weights)\n', (16508, 16527), False, 'import numpy\n'), ((25912, 25942), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['inputs'], {'dim': 'dim'}), '(inputs, dim=dim)\n', (25925, 25942), True, 'import torch.nn.functional as F\n'), ((8152, 8163), 'time.time', 'time.time', ([], {}), '()\n', (8161, 8163), False, 'import time\n'), ((8293, 8304), 'time.time', 'time.time', ([], {}), '()\n', (8302, 8304), False, 'import time\n'), ((8639, 8650), 'time.time', 'time.time', ([], {}), '()\n', (8648, 8650), False, 'import time\n'), ((9229, 9284), 'numpy.hstack', 'numpy.hstack', (['(cls_boxes, cls_scores[:, numpy.newaxis])'], {}), '((cls_boxes, cls_scores[:, numpy.newaxis]))\n', (9241, 9284), False, 'import numpy\n'), ((10088, 10111), 'numpy.argsort', 'numpy.argsort', (['max_conf'], {}), '(max_conf)\n', (10101, 10111), False, 'import numpy\n'), ((10194, 10205), 'time.time', 'time.time', ([], {}), '()\n', (10203, 10205), False, 'import time\n'), ((10715, 10726), 'time.time', 'time.time', ([], {}), '()\n', (10724, 10726), False, 'import time\n'), ((9479, 9534), 'numpy.hstack', 'numpy.hstack', (['(cls_boxes, cls_scores[:, numpy.newaxis])'], {}), '((cls_boxes, cls_scores[:, numpy.newaxis]))\n', (9491, 9534), False, 'import numpy\n'), ((15794, 15812), 'numpy.sum', 'numpy.sum', (['all_iou'], {}), '(all_iou)\n', (15803, 15812), False, 'import numpy\n'), ((15709, 15731), 'numpy.asarray', 'numpy.asarray', (['all_iou'], {}), '(all_iou)\n', (15722, 15731), False, 'import numpy\n'), ((15745, 15771), 'numpy.asarray', 'numpy.asarray', (['all_weights'], {}), '(all_weights)\n', (15758, 15771), False, 'import numpy\n')] |
import cv2
import numpy as np
from pyautogui import screenshot
from pyautogui import size as get_screen_size
from core.screen.screen_rectangle import ScreenRectangle
class ScreenshotImage:
def __init__(self, in_region: ScreenRectangle = None):
screen_width, screen_height = get_screen_size()
region_coordinates = (0, 0, screen_width, screen_height)
if in_region is not None:
region_coordinates = (in_region.start_point.x, in_region.start_point.y, in_region.width, in_region.height)
screen_pil_image = screenshot(region=region_coordinates)
self._gray_array = cv2.cvtColor(np.array(screen_pil_image), cv2.COLOR_BGR2GRAY)
height, width = self._gray_array.shape
self._width = width
self._height = height
@property
def image_gray_array(self):
return self._gray_array
@property
def width(self) -> int:
return self._width
@property
def height(self) -> int:
return self._height
def binarize(self):
# img2 = cv2.adaptiveThreshold(self._gray_array, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 11, 2)
return cv2.threshold(self._gray_array, 0, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU)[1]
| [
"cv2.threshold",
"numpy.array",
"pyautogui.screenshot",
"pyautogui.size"
] | [((289, 306), 'pyautogui.size', 'get_screen_size', ([], {}), '()\n', (304, 306), True, 'from pyautogui import size as get_screen_size\n'), ((554, 591), 'pyautogui.screenshot', 'screenshot', ([], {'region': 'region_coordinates'}), '(region=region_coordinates)\n', (564, 591), False, 'from pyautogui import screenshot\n'), ((633, 659), 'numpy.array', 'np.array', (['screen_pil_image'], {}), '(screen_pil_image)\n', (641, 659), True, 'import numpy as np\n'), ((1167, 1243), 'cv2.threshold', 'cv2.threshold', (['self._gray_array', '(0)', '(255)', '(cv2.THRESH_BINARY | cv2.THRESH_OTSU)'], {}), '(self._gray_array, 0, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU)\n', (1180, 1243), False, 'import cv2\n')] |
from typing import (List,
Tuple)
from tests.utils import (RawPointsList,
RawPolygon,
enum_to_values)
from wagyu.bound import Bound as PortedBound
from wagyu.box import Box as PortedBox
from wagyu.edge import Edge as PortedEdge
from wagyu.enums import (EdgeSide as PortedEdgeSide,
FillKind as PortedFillKind,
OperationKind as PortedOperationKind,
PolygonKind as PortedPolygonKind)
from wagyu.intersect_node import IntersectNode as PortedIntersectNode
from wagyu.linear_ring import LinearRing as PortedLinearRing
from wagyu.local_minimum import (LocalMinimum as PortedLocalMinimum,
LocalMinimumList as PortedLocalMinimumList)
from wagyu.point import Point as PortedPoint
from wagyu.polygon import (Multipolygon as PortedMultipolygon,
Polygon as PortedPolygon)
from wagyu.ring import Ring as PortedRing
from wagyu.ring_manager import RingManager as PortedRingManager
from wagyu.wagyu import Wagyu as PortedWagyu
PortedBound = PortedBound
PortedBox = PortedBox
PortedEdge = PortedEdge
PortedEdgeSide = PortedEdgeSide
PortedFillKind = PortedFillKind
PortedIntersectNode = PortedIntersectNode
PortedLinearRing = PortedLinearRing
PortedLinearRingWithPolygonKind = Tuple[PortedLinearRing, PortedPolygonKind]
PortedLocalMinimum = PortedLocalMinimum
PortedLocalMinimumList = PortedLocalMinimumList
PortedMultipolygon = PortedMultipolygon
PortedOperationKind = PortedOperationKind
PortedPoint = PortedPoint
PortedPolygon = PortedPolygon
PortedPolygonKind = PortedPolygonKind
PortedRing = PortedRing
PortedRingManager = PortedRingManager
PortedWagyu = PortedWagyu
ported_edges_sides = enum_to_values(PortedEdgeSide)
ported_fill_kinds = enum_to_values(PortedFillKind)
ported_operation_kinds = enum_to_values(PortedOperationKind)
ported_polygon_kinds = enum_to_values(PortedPolygonKind)
def to_ported_linear_rings_points(raw_points: RawPointsList
) -> List[PortedPoint]:
points = [PortedPoint(x, y) for x, y in raw_points]
return points + [points[0]]
def to_ported_polygon_linear_rings(raw_polygon: RawPolygon
) -> List[PortedLinearRing]:
raw_border, raw_holes = raw_polygon
return ([PortedLinearRing(to_ported_linear_rings_points(raw_border))]
+ [PortedLinearRing(to_ported_linear_rings_points(raw_hole))
for raw_hole in raw_holes])
def to_ported_local_minimum_list(linear_rings_with_polygon_kinds
: List[PortedLinearRingWithPolygonKind]
) -> PortedLocalMinimumList:
result = PortedLocalMinimumList()
for linear_ring, polygon_kind in linear_rings_with_polygon_kinds:
result.add_linear_ring(linear_ring, polygon_kind)
return result
| [
"tests.utils.enum_to_values",
"wagyu.point.Point",
"wagyu.local_minimum.LocalMinimumList"
] | [((1781, 1811), 'tests.utils.enum_to_values', 'enum_to_values', (['PortedEdgeSide'], {}), '(PortedEdgeSide)\n', (1795, 1811), False, 'from tests.utils import RawPointsList, RawPolygon, enum_to_values\n'), ((1832, 1862), 'tests.utils.enum_to_values', 'enum_to_values', (['PortedFillKind'], {}), '(PortedFillKind)\n', (1846, 1862), False, 'from tests.utils import RawPointsList, RawPolygon, enum_to_values\n'), ((1888, 1923), 'tests.utils.enum_to_values', 'enum_to_values', (['PortedOperationKind'], {}), '(PortedOperationKind)\n', (1902, 1923), False, 'from tests.utils import RawPointsList, RawPolygon, enum_to_values\n'), ((1947, 1980), 'tests.utils.enum_to_values', 'enum_to_values', (['PortedPolygonKind'], {}), '(PortedPolygonKind)\n', (1961, 1980), False, 'from tests.utils import RawPointsList, RawPolygon, enum_to_values\n'), ((2759, 2783), 'wagyu.local_minimum.LocalMinimumList', 'PortedLocalMinimumList', ([], {}), '()\n', (2781, 2783), True, 'from wagyu.local_minimum import LocalMinimum as PortedLocalMinimum, LocalMinimumList as PortedLocalMinimumList\n'), ((2115, 2132), 'wagyu.point.Point', 'PortedPoint', (['x', 'y'], {}), '(x, y)\n', (2126, 2132), True, 'from wagyu.point import Point as PortedPoint\n')] |
#!/usr/bin/env python3
import glob
import json
import xml.dom.minidom as minidom
import json
install = minidom.parse('build/install.rdf')
ta = install.getElementsByTagNameNS('*', 'targetApplication')[0]
with open('schema/supported.json') as f:
min_version = json.load(f)
for client, version in min_version.items():
client = {'zotero': '<EMAIL>', 'jurism': '<EMAIL>' }[client]
_id = next(node for node in ta.getElementsByTagNameNS('*', 'id') if node.firstChild.nodeValue == client)
for node in _id.parentNode.getElementsByTagNameNS('*', 'minVersion'):
node.firstChild.replaceWholeText(version)
print('minimum', client, 'version', version)
with open('build/install.rdf', 'w') as f:
install.writexml(f)
| [
"json.load",
"xml.dom.minidom.parse"
] | [((105, 139), 'xml.dom.minidom.parse', 'minidom.parse', (['"""build/install.rdf"""'], {}), "('build/install.rdf')\n", (118, 139), True, 'import xml.dom.minidom as minidom\n'), ((263, 275), 'json.load', 'json.load', (['f'], {}), '(f)\n', (272, 275), False, 'import json\n')] |
import urllib.request
import json
import sys
import os
data = ''
url = sys.argv[1]
output_folder = sys.argv[2]
file_name = sys.argv[3]
with urllib.request.urlopen(url) as response:
data = response.read().decode('utf-8')
index = 1
filename = output_folder + '/' + file_name + '.json'
os.makedirs(os.path.dirname(filename), exist_ok=True)
with open(filename, "w") as output_file:
output_file.write(data)
| [
"os.path.dirname"
] | [((301, 326), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (316, 326), False, 'import os\n')] |
from pprint import pprint
import argparse
def parse_args():
parser = argparse.ArgumentParser()
# Data input settings
parser.add_argument('--dataset', type=str, default='Semantic_Segmentation_Dataset/', help='name of dataset')
# Optimization: General
parser.add_argument('--bs', type=int, default = 8 )
parser.add_argument('--epochs', type=int,help='Number of epochs',default= 250)
parser.add_argument('--workers', type=int,help='Number of workers',default=4)
parser.add_argument('--model', help='model name',default='densenet')
parser.add_argument('--evalsplit', help='eval spolit',default='val')
parser.add_argument('--lr', type=float,default= 1e-3,help='Learning rate')
parser.add_argument('--save', help='save folder name',default='0try')
parser.add_argument('--seed', type=int, default=1111, help='random seed')
parser.add_argument('--load', type=str, default='best_model.pkl', help='load checkpoint file name')
parser.add_argument('--resume', action='store_true', help='resume train from load chkpoint')
parser.add_argument('--test', action='store_true', help='test only')
parser.add_argument('--savemodel',action='store_true',help='checkpoint save the model')
parser.add_argument('--testrun', action='store_true', help='test run with few dataset')
parser.add_argument('--expname', type=str, default='info', help='extra explanation of the method')
parser.add_argument('--useGPU', type=str, default=True, help='Set it as False if GPU is unavailable')
# parse
args = parser.parse_args()
opt = vars(args)
pprint('parsed input parameters:')
pprint(opt)
return args
if __name__ == '__main__':
opt = parse_args()
print('opt[\'dataset\'] is ', opt.dataset)
| [
"pprint.pprint",
"argparse.ArgumentParser"
] | [((75, 100), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (98, 100), False, 'import argparse\n'), ((1607, 1641), 'pprint.pprint', 'pprint', (['"""parsed input parameters:"""'], {}), "('parsed input parameters:')\n", (1613, 1641), False, 'from pprint import pprint\n'), ((1646, 1657), 'pprint.pprint', 'pprint', (['opt'], {}), '(opt)\n', (1652, 1657), False, 'from pprint import pprint\n')] |
# Copyright 2016-2020 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
# ReFrame Project Developers. See the top-level LICENSE file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# Torque backend
#
# - Initial version submitted by <NAME>, <NAME> (VUB)
#
import re
import os
import time
import reframe.utility.os_ext as os_ext
from reframe.core.backends import register_scheduler
from reframe.core.exceptions import JobError, JobSchedulerError
from reframe.core.logging import getlogger
from reframe.core.schedulers.pbs import PbsJobScheduler, _run_strict
JOB_STATES = {
'Q': 'QUEUED',
'H': 'HELD',
'R': 'RUNNING',
'E': 'EXITING',
'T': 'MOVED',
'W': 'WAITING',
'S': 'SUSPENDED',
'C': 'COMPLETED',
}
@register_scheduler('torque')
class TorqueJobScheduler(PbsJobScheduler):
TASKS_OPT = '-l nodes={num_nodes}:ppn={num_cpus_per_node}'
def _update_nodelist(self, job, nodespec):
if job.nodelist is not None:
return
job._nodelist = [x.split('/')[0] for x in nodespec.split('+')]
job._nodelist.sort()
def poll(self, *jobs):
if jobs:
# Filter out non-jobs
jobs = [job for job in jobs if job is not None]
if not jobs:
return
completed = os_ext.run_command(
f'qstat -f {" ".join(job.jobid for job in jobs)}'
)
# Depending on the configuration, completed jobs will remain on the job
# list for a limited time, or be removed upon completion.
# If qstat cannot find any of the job IDs, it will return 153.
# Otherwise, it will return with return code 0 and print information
# only for the jobs it could find.
if completed.returncode == 153:
getlogger().debug(
'return code = 153: jobids not known by scheduler, '
'assuming all jobs completed'
)
for job in jobs:
job._state = 'COMPLETED'
return
if completed.returncode != 0:
raise JobSchedulerError(
f'qstat failed with exit code {completed.returncode} '
f'(standard error follows):\n{completed.stderr}'
)
# Store information for each job separately
jobinfo = {}
for job_raw_info in completed.stdout.split('\n\n'):
jobid_match = re.search(
r'^Job Id:\s*(?P<jobid>\S+)', job_raw_info, re.MULTILINE
)
if jobid_match:
jobid = jobid_match.group('jobid')
jobinfo[jobid] = job_raw_info
for job in jobs:
if job.jobid not in jobinfo:
getlogger().debug(
f'jobid {job.jobid} not known to scheduler, '
f'assuming job completed'
)
job._state = 'COMPLETED'
job._completed = True
continue
info = jobinfo[job.jobid]
state_match = re.search(
r'^\s*job_state = (?P<state>[A-Z])', info, re.MULTILINE
)
if not state_match:
getlogger().debug(
f'job state not found (job info follows):\n{info}'
)
continue
state = state_match.group('state')
job._state = JOB_STATES[state]
nodelist_match = re.search(
r'exec_host = (?P<nodespec>[\S\t\n]+)',
info, re.MULTILINE
)
if nodelist_match:
nodespec = nodelist_match.group('nodespec')
nodespec = re.sub(r'[\n\t]*', '', nodespec)
self._update_nodelist(job, nodespec)
if job.state == 'COMPLETED':
exitcode_match = re.search(
r'^\s*exit_status = (?P<code>\d+)',
info, re.MULTILINE,
)
if exitcode_match:
job._exitcode = int(exitcode_match.group('code'))
# We report a job as finished only when its stdout/stderr are
# written back to the working directory
stdout = os.path.join(job.workdir, job.stdout)
stderr = os.path.join(job.workdir, job.stderr)
out_ready = os.path.exists(stdout) and os.path.exists(stderr)
done = job.cancelled or out_ready
if done:
job._completed = True
elif (job.state in ['QUEUED', 'HELD', 'WAITING'] and
job.max_pending_time):
if (time.time() - job.submit_time >= job.max_pending_time):
self.cancel(job)
job._exception = JobError('maximum pending time exceeded')
| [
"reframe.core.exceptions.JobSchedulerError",
"os.path.exists",
"reframe.core.exceptions.JobError",
"os.path.join",
"reframe.core.logging.getlogger",
"re.sub",
"reframe.core.backends.register_scheduler",
"time.time",
"re.search"
] | [((753, 781), 'reframe.core.backends.register_scheduler', 'register_scheduler', (['"""torque"""'], {}), "('torque')\n", (771, 781), False, 'from reframe.core.backends import register_scheduler\n'), ((2071, 2202), 'reframe.core.exceptions.JobSchedulerError', 'JobSchedulerError', (['f"""qstat failed with exit code {completed.returncode} (standard error follows):\n{completed.stderr}"""'], {}), '(\n f"""qstat failed with exit code {completed.returncode} (standard error follows):\n{completed.stderr}"""\n )\n', (2088, 2202), False, 'from reframe.core.exceptions import JobError, JobSchedulerError\n'), ((2400, 2468), 're.search', 're.search', (['"""^Job Id:\\\\s*(?P<jobid>\\\\S+)"""', 'job_raw_info', 're.MULTILINE'], {}), "('^Job Id:\\\\s*(?P<jobid>\\\\S+)', job_raw_info, re.MULTILINE)\n", (2409, 2468), False, 'import re\n'), ((3024, 3090), 're.search', 're.search', (['"""^\\\\s*job_state = (?P<state>[A-Z])"""', 'info', 're.MULTILINE'], {}), "('^\\\\s*job_state = (?P<state>[A-Z])', info, re.MULTILINE)\n", (3033, 3090), False, 'import re\n'), ((3422, 3493), 're.search', 're.search', (['"""exec_host = (?P<nodespec>[\\\\S\\\\t\\\\n]+)"""', 'info', 're.MULTILINE'], {}), "('exec_host = (?P<nodespec>[\\\\S\\\\t\\\\n]+)', info, re.MULTILINE)\n", (3431, 3493), False, 'import re\n'), ((3656, 3689), 're.sub', 're.sub', (['"""[\\\\n\\\\t]*"""', '""""""', 'nodespec'], {}), "('[\\\\n\\\\t]*', '', nodespec)\n", (3662, 3689), False, 'import re\n'), ((3817, 3883), 're.search', 're.search', (['"""^\\\\s*exit_status = (?P<code>\\\\d+)"""', 'info', 're.MULTILINE'], {}), "('^\\\\s*exit_status = (?P<code>\\\\d+)', info, re.MULTILINE)\n", (3826, 3883), False, 'import re\n'), ((4207, 4244), 'os.path.join', 'os.path.join', (['job.workdir', 'job.stdout'], {}), '(job.workdir, job.stdout)\n', (4219, 4244), False, 'import os\n'), ((4270, 4307), 'os.path.join', 'os.path.join', (['job.workdir', 'job.stderr'], {}), '(job.workdir, job.stderr)\n', (4282, 4307), False, 'import os\n'), ((1776, 1787), 'reframe.core.logging.getlogger', 'getlogger', ([], {}), '()\n', (1785, 1787), False, 'from reframe.core.logging import getlogger\n'), ((4336, 4358), 'os.path.exists', 'os.path.exists', (['stdout'], {}), '(stdout)\n', (4350, 4358), False, 'import os\n'), ((4363, 4385), 'os.path.exists', 'os.path.exists', (['stderr'], {}), '(stderr)\n', (4377, 4385), False, 'import os\n'), ((2706, 2717), 'reframe.core.logging.getlogger', 'getlogger', ([], {}), '()\n', (2715, 2717), False, 'from reframe.core.logging import getlogger\n'), ((3169, 3180), 'reframe.core.logging.getlogger', 'getlogger', ([], {}), '()\n', (3178, 3180), False, 'from reframe.core.logging import getlogger\n'), ((4759, 4800), 'reframe.core.exceptions.JobError', 'JobError', (['"""maximum pending time exceeded"""'], {}), "('maximum pending time exceeded')\n", (4767, 4800), False, 'from reframe.core.exceptions import JobError, JobSchedulerError\n'), ((4629, 4640), 'time.time', 'time.time', ([], {}), '()\n', (4638, 4640), False, 'import time\n')] |
import random
import logging
import numpy as np
import tensorflow as tf
class DeepQNetworkModel:
def __init__(self,
session,
layers_size,
memory,
default_batch_size=None,
default_learning_rate=None,
default_epsilon=None,
gamma=0.99,
min_samples_for_predictions=0,
double_dqn=False,
learning_procedures_to_q_target_switch=1000,
tau=1,
maximize_entropy=False,
var_scope_name=None):
"""
Create a new Deep Q Network model
:param session: a tf.Session to be used
:param layers_size: a list of numbers, representing the number of nodes in each layer of the network
:param memory: an instance of type memory_buffers.Memory
:param default_batch_size: the default batch size for training
:param default_learning_rate: the default learning rate for training
:param default_epsilon: the default epsilon to be used for the eps-greedy policy
:param gamma: the discount factor
:param min_samples_for_predictions: the minimum number of seen state-transitions required to make predictions.
random numbers will be selected until this number has reached
:param double_dqn: boolean, should a Double Deep Q Network should be used or not
:param learning_procedures_to_q_target_switch: how many learning procedures are required before the main network
is copied to the q-target network. relevant only if double_dqn = True.
:param tau: a number in the range [0,1] determining the mixture of the main network weights and q-target weights
which will be inserted to q-target. tau=1 copies the main network weights to the q-target network as
they are (as should be according to the original paper). tau=0 will keep q-target weights unchanged,
meaning no knowledge will be transferred.
relevant only if double_dqn = True.
:param maximize_entropy: boolean, determining if the network should try to optimize the Q values entropy
:param var_scope_name: when more than one model are generated, each needs its own variable scope. If the two
or more models are suppose to share their weights, they both should have the same variable scope name.
This is irrelevant when only one instance of the model is used.
"""
self.output_size = layers_size[-1]
self.session = session
self.default_batch_size = default_batch_size
self.default_learning_rate = default_learning_rate
self.default_epsilon = default_epsilon
self.min_samples_for_predictions = min_samples_for_predictions
self.learning_procedures_to_q_target_switch = learning_procedures_to_q_target_switch
self.tau = tau
self.maximize_entropy = maximize_entropy
self.memory = memory
# print("Layers_size: ", layers_size)
# print("Output size: ", self.output_size)
# print("Input size: ", layers_size[0])
self.q_network = self.__create_q_network(input_size=layers_size[0], output_size=self.output_size,
hidden_layers_size=layers_size[1:-1], gamma=gamma,
maximize_entropy=maximize_entropy,
var_scope_name=var_scope_name,
layer_name_suffix='qnn')
if double_dqn:
self.target_q_network = self.__create_q_network(input_size=layers_size[0], output_size=self.output_size,
hidden_layers_size=layers_size[1:-1], gamma=gamma,
maximize_entropy=maximize_entropy,
var_scope_name=var_scope_name,
layer_name_suffix='qt')
else:
self.target_q_network = None
def __create_q_network(self, input_size, output_size, hidden_layers_size, gamma, maximize_entropy,
var_scope_name, layer_name_suffix):
scope_name = var_scope_name or tf.compat.v1.get_variable_scope().name
reuse = tf.compat.v1.AUTO_REUSE if var_scope_name else False
with tf.compat.v1.variable_scope(scope_name, reuse=reuse):
qnn = QNetwork(input_size=input_size, output_size=output_size, hidden_layers_size=hidden_layers_size,
gamma=gamma, maximize_entropy=maximize_entropy, layer_name_suffix=layer_name_suffix)
return qnn
def learn(self, learning_rate=None, batch_size=None):
"""
Initialize a learning attempt
:param learning_rate: a learning rate overriding default_learning_rate
:param batch_size: a batch_size overriding default_batch_size
:return: None if no learning was made, or the cost of learning if it did happen
"""
current_batch_size = batch_size if batch_size is not None else self.default_batch_size
if self.memory.counter % current_batch_size != 0 or self.memory.counter == 0:
logging.debug('Passing on learning procedure')
pass
else:
logging.debug('Starting learning procedure...')
batch = self.memory.sample(current_batch_size)
# print("batch: ", batch)
# print("batch.reshape(-1): ", batch.reshape(-1), " ", batch.reshape(-1).shape)
#print("self.target_q_network.states: ", self.target_q_network.states)
#print("self.__fetch_from_batch(batch, 'next_state'): ", self.__fetch_from_batch(batch, 'next_state'))
qt = self.session.run(self.target_q_network.output,
feed_dict={self.target_q_network.states: self.__fetch_from_batch(batch, 'next_state')})
#print(self.__fetch_from_batch(batch, 'is_terminal'))
terminals = self.__fetch_from_batch(batch, 'is_terminal')
for i in range(terminals.size):
if terminals[i]:
qt[i] = np.zeros(self.output_size)
lr = learning_rate if learning_rate is not None else self.default_learning_rate
_, cost = self.session.run([self.q_network.optimizer, self.q_network.cost],
feed_dict={self.q_network.states: self.__fetch_from_batch(batch, 'state'),
self.q_network.r: self.__fetch_from_batch(batch, 'reward'),
self.q_network.enumerated_actions: self.__fetch_from_batch(batch, 'action', enum=True),
self.q_network.q_target: qt,
self.q_network.learning_rate: lr})
logging.debug('Batch number: %s | Q-Network cost: %s | Learning rate: %s',
self.memory.counter // current_batch_size, cost, lr)
if self.target_q_network is not None and self.memory.counter % (self.learning_procedures_to_q_target_switch * current_batch_size) == 0:
logging.info('Copying Q-Network to Q-Target...')
tf_vars = tf.compat.v1.trainable_variables()
num_of_vars = len(tf_vars)
operations = []
for i, v in enumerate(tf_vars[0:num_of_vars // 2]):
operations.append(tf_vars[i + num_of_vars // 2].assign(
(v.value() * self.tau) + ((1 - self.tau) * tf_vars[i + num_of_vars // 2].value())))
self.session.run(operations)
return cost
def act(self, state, epsilon=None):
"""
Select an action for the given state
:param state: a Numpy array representing a state
:param epsilon: an epsilon value to be used for the eps-greedy policy, overriding default_epsilon
:return: a number representing the selected action
"""
eps = epsilon if epsilon is not None else self.default_epsilon
rnd = random.random()
if rnd < eps or self.memory.counter < self.min_samples_for_predictions:
action = random.randint(0, self.output_size - 1)
logging.debug("Choosing a random action: %s [Epsilon = %s]", action, eps)
else:
prediction = self.session.run(self.q_network.output,
feed_dict={self.q_network.states: np.expand_dims(state, axis=0)})
prediction = np.squeeze(prediction)
action = np.argmax(prediction)
logging.debug("Predicted action for state %s is %s (network output: %s) [Epsilon = %s]",
state, action, prediction, eps)
return action
def add_to_memory(self, state, action, reward, next_state, is_terminal_state):
"""
Add new state-transition to memory
:param state: a Numpy array representing a state
:param action: an integer representing the selected action
:param reward: a number representing the received reward
:param next_state: a Numpy array representing the state reached after performing the action
:param is_terminal_state: boolean. mark state as a terminal_state. next_state will have no effect.
"""
self.memory.append({'state': state, 'action': action, 'reward': reward,
'next_state': next_state, 'is_terminal': is_terminal_state})
def __fetch_from_batch(self, batch, key, enum=False):
# print("batch: ", batch)
if key == 'next_state' or key == 'state':
if enum:
return np.array(list(enumerate(map(lambda x: x[key].reshape(-1), batch))))
else:
return np.array(list(map(lambda x: x[key].reshape(-1), batch)))
else:
if enum:
return np.array(list(enumerate(map(lambda x: x[key], batch))))
else:
return np.array(list(map(lambda x: x[key], batch)))
class QNetwork:
"""
A Q-Network implementation
"""
def __init__(self, input_size, output_size, hidden_layers_size, gamma, maximize_entropy, layer_name_suffix):
self.q_target = tf.compat.v1.placeholder(shape=(None, output_size), dtype=tf.float32)
self.r = tf.compat.v1.placeholder(shape=None, dtype=tf.float32)
self.states = tf.compat.v1.placeholder(shape=(None, input_size), dtype=tf.float32)
self.enumerated_actions = tf.compat.v1.placeholder(shape=(None, 2), dtype=tf.int32)
self.learning_rate = tf.compat.v1.placeholder(shape=[], dtype=tf.float32)
layer = self.states
for i in range(len(hidden_layers_size)):
layer = tf.compat.v1.layers.dense(inputs=layer, units=hidden_layers_size[i], activation=tf.nn.relu,
name='{}_dense_layer_{}'.format(layer_name_suffix,i),
kernel_initializer=tf.compat.v1.keras.initializers.VarianceScaling(scale=1.0, mode="fan_avg", distribution="uniform"))
self.output = tf.compat.v1.layers.dense(inputs=layer, units=output_size,
name='{}_dense_layer_{}'.format(layer_name_suffix,len(hidden_layers_size)),
kernel_initializer=tf.compat.v1.keras.initializers.VarianceScaling(scale=1.0, mode="fan_avg", distribution="uniform"))
self.predictions = tf.gather_nd(self.output, indices=self.enumerated_actions)
if maximize_entropy:
self.future_q = tf.math.log(tf.reduce_sum(input_tensor=tf.exp(self.q_target), axis=1))
else:
self.future_q = tf.reduce_max(input_tensor=self.q_target, axis=1)
self.labels = self.r + (gamma * self.future_q)
self.cost = tf.reduce_mean(input_tensor=tf.compat.v1.losses.mean_squared_error(labels=self.labels, predictions=self.predictions))
self.optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.cost)
| [
"tensorflow.compat.v1.placeholder",
"tensorflow.compat.v1.variable_scope",
"logging.debug",
"tensorflow.compat.v1.get_variable_scope",
"tensorflow.compat.v1.train.AdamOptimizer",
"numpy.argmax",
"logging.info",
"numpy.squeeze",
"tensorflow.reduce_max",
"tensorflow.exp",
"numpy.zeros",
"tensorf... | [((8348, 8363), 'random.random', 'random.random', ([], {}), '()\n', (8361, 8363), False, 'import random\n'), ((10521, 10590), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', ([], {'shape': '(None, output_size)', 'dtype': 'tf.float32'}), '(shape=(None, output_size), dtype=tf.float32)\n', (10545, 10590), True, 'import tensorflow as tf\n'), ((10608, 10662), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', ([], {'shape': 'None', 'dtype': 'tf.float32'}), '(shape=None, dtype=tf.float32)\n', (10632, 10662), True, 'import tensorflow as tf\n'), ((10685, 10753), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', ([], {'shape': '(None, input_size)', 'dtype': 'tf.float32'}), '(shape=(None, input_size), dtype=tf.float32)\n', (10709, 10753), True, 'import tensorflow as tf\n'), ((10788, 10845), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', ([], {'shape': '(None, 2)', 'dtype': 'tf.int32'}), '(shape=(None, 2), dtype=tf.int32)\n', (10812, 10845), True, 'import tensorflow as tf\n'), ((10875, 10927), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', ([], {'shape': '[]', 'dtype': 'tf.float32'}), '(shape=[], dtype=tf.float32)\n', (10899, 10927), True, 'import tensorflow as tf\n'), ((11741, 11799), 'tensorflow.gather_nd', 'tf.gather_nd', (['self.output'], {'indices': 'self.enumerated_actions'}), '(self.output, indices=self.enumerated_actions)\n', (11753, 11799), True, 'import tensorflow as tf\n'), ((4560, 4612), 'tensorflow.compat.v1.variable_scope', 'tf.compat.v1.variable_scope', (['scope_name'], {'reuse': 'reuse'}), '(scope_name, reuse=reuse)\n', (4587, 4612), True, 'import tensorflow as tf\n'), ((5410, 5456), 'logging.debug', 'logging.debug', (['"""Passing on learning procedure"""'], {}), "('Passing on learning procedure')\n", (5423, 5456), False, 'import logging\n'), ((5500, 5547), 'logging.debug', 'logging.debug', (['"""Starting learning procedure..."""'], {}), "('Starting learning procedure...')\n", (5513, 5547), False, 'import logging\n'), ((7107, 7239), 'logging.debug', 'logging.debug', (['"""Batch number: %s | Q-Network cost: %s | Learning rate: %s"""', '(self.memory.counter // current_batch_size)', 'cost', 'lr'], {}), "('Batch number: %s | Q-Network cost: %s | Learning rate: %s', \n self.memory.counter // current_batch_size, cost, lr)\n", (7120, 7239), False, 'import logging\n'), ((8465, 8504), 'random.randint', 'random.randint', (['(0)', '(self.output_size - 1)'], {}), '(0, self.output_size - 1)\n', (8479, 8504), False, 'import random\n'), ((8517, 8590), 'logging.debug', 'logging.debug', (['"""Choosing a random action: %s [Epsilon = %s]"""', 'action', 'eps'], {}), "('Choosing a random action: %s [Epsilon = %s]', action, eps)\n", (8530, 8590), False, 'import logging\n'), ((8803, 8825), 'numpy.squeeze', 'np.squeeze', (['prediction'], {}), '(prediction)\n', (8813, 8825), True, 'import numpy as np\n'), ((8847, 8868), 'numpy.argmax', 'np.argmax', (['prediction'], {}), '(prediction)\n', (8856, 8868), True, 'import numpy as np\n'), ((8881, 9010), 'logging.debug', 'logging.debug', (['"""Predicted action for state %s is %s (network output: %s) [Epsilon = %s]"""', 'state', 'action', 'prediction', 'eps'], {}), "(\n 'Predicted action for state %s is %s (network output: %s) [Epsilon = %s]',\n state, action, prediction, eps)\n", (8894, 9010), False, 'import logging\n'), ((11970, 12019), 'tensorflow.reduce_max', 'tf.reduce_max', ([], {'input_tensor': 'self.q_target', 'axis': '(1)'}), '(input_tensor=self.q_target, axis=1)\n', (11983, 12019), True, 'import tensorflow as tf\n'), ((4439, 4472), 'tensorflow.compat.v1.get_variable_scope', 'tf.compat.v1.get_variable_scope', ([], {}), '()\n', (4470, 4472), True, 'import tensorflow as tf\n'), ((7425, 7473), 'logging.info', 'logging.info', (['"""Copying Q-Network to Q-Target..."""'], {}), "('Copying Q-Network to Q-Target...')\n", (7437, 7473), False, 'import logging\n'), ((7500, 7534), 'tensorflow.compat.v1.trainable_variables', 'tf.compat.v1.trainable_variables', ([], {}), '()\n', (7532, 7534), True, 'import tensorflow as tf\n'), ((11614, 11716), 'tensorflow.compat.v1.keras.initializers.VarianceScaling', 'tf.compat.v1.keras.initializers.VarianceScaling', ([], {'scale': '(1.0)', 'mode': '"""fan_avg"""', 'distribution': '"""uniform"""'}), "(scale=1.0, mode='fan_avg',\n distribution='uniform')\n", (11661, 11716), True, 'import tensorflow as tf\n'), ((12123, 12216), 'tensorflow.compat.v1.losses.mean_squared_error', 'tf.compat.v1.losses.mean_squared_error', ([], {'labels': 'self.labels', 'predictions': 'self.predictions'}), '(labels=self.labels, predictions=self\n .predictions)\n', (12161, 12216), True, 'import tensorflow as tf\n'), ((12238, 12304), 'tensorflow.compat.v1.train.AdamOptimizer', 'tf.compat.v1.train.AdamOptimizer', ([], {'learning_rate': 'self.learning_rate'}), '(learning_rate=self.learning_rate)\n', (12270, 12304), True, 'import tensorflow as tf\n'), ((6362, 6388), 'numpy.zeros', 'np.zeros', (['self.output_size'], {}), '(self.output_size)\n', (6370, 6388), True, 'import numpy as np\n'), ((11262, 11364), 'tensorflow.compat.v1.keras.initializers.VarianceScaling', 'tf.compat.v1.keras.initializers.VarianceScaling', ([], {'scale': '(1.0)', 'mode': '"""fan_avg"""', 'distribution': '"""uniform"""'}), "(scale=1.0, mode='fan_avg',\n distribution='uniform')\n", (11309, 11364), True, 'import tensorflow as tf\n'), ((8746, 8775), 'numpy.expand_dims', 'np.expand_dims', (['state'], {'axis': '(0)'}), '(state, axis=0)\n', (8760, 8775), True, 'import numpy as np\n'), ((11896, 11917), 'tensorflow.exp', 'tf.exp', (['self.q_target'], {}), '(self.q_target)\n', (11902, 11917), True, 'import tensorflow as tf\n')] |
"""
Training utilities.
Author: <NAME> (<EMAIL>)
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import glob
import os
import sys
import tensorflow as tf
import time
from google.protobuf.text_format import Merge, MessageToString
from fewshot.data.data_factory import get_dataset
class ExperimentLogger():
def __init__(self, writer):
self._writer = writer
def log(self, name, niter, value, family=None):
tf.summary.scalar(name, float(value), step=niter)
def flush(self):
"""Flushes results to disk."""
self._writer.flush()
def close(self):
"""Closes writer."""
self._writer.close()
def save_config(config, save_folder):
"""Saves configuration to a file."""
if not os.path.isdir(save_folder):
os.makedirs(save_folder)
config_file = os.path.join(save_folder, "config.prototxt")
with open(config_file, "w") as f:
f.write(MessageToString(config))
cmd_file = os.path.join(save_folder, "cmd-{}.txt".format(int(time.time())))
if not os.path.exists(cmd_file):
with open(cmd_file, "w") as f:
f.write(' '.join(sys.argv))
def get_config(config_file, config_cls):
"""Reads configuration."""
config = config_cls()
Merge(open(config_file).read(), config)
return config
def get_data_fs(env_config, load_train=False):
"""Gets few-shot dataset."""
train_split = env_config.train_fs_split
if train_split is None or (train_split == env_config.train_split and
not load_train):
data_train_fs = None
else:
data_train_fs = get_dataset(env_config.dataset, env_config.data_folder,
env_config.train_fs_split)
if env_config.val_fs_split is None:
data_val_fs = None
else:
data_val_fs = get_dataset(env_config.dataset, env_config.data_folder,
env_config.val_fs_split)
if env_config.test_fs_split is None:
data_test_fs = None
else:
data_test_fs = get_dataset(env_config.dataset, env_config.data_folder,
env_config.test_fs_split)
return {
'train_fs': data_train_fs,
'val_fs': data_val_fs,
'test_fs': data_test_fs,
'metadata': env_config
}
def latest_file(folder, prefix):
"""Query the most recent checkpoint."""
list_of_files = glob.glob(os.path.join(folder, prefix + '*'))
if len(list_of_files) == 0:
return None
latest_file = max(list_of_files, key=lambda f: int(f.split('-')[-1]))
return latest_file
| [
"os.path.exists",
"os.makedirs",
"os.path.join",
"os.path.isdir",
"google.protobuf.text_format.MessageToString",
"time.time",
"fewshot.data.data_factory.get_dataset"
] | [((849, 893), 'os.path.join', 'os.path.join', (['save_folder', '"""config.prototxt"""'], {}), "(save_folder, 'config.prototxt')\n", (861, 893), False, 'import os\n'), ((776, 802), 'os.path.isdir', 'os.path.isdir', (['save_folder'], {}), '(save_folder)\n', (789, 802), False, 'import os\n'), ((808, 832), 'os.makedirs', 'os.makedirs', (['save_folder'], {}), '(save_folder)\n', (819, 832), False, 'import os\n'), ((1054, 1078), 'os.path.exists', 'os.path.exists', (['cmd_file'], {}), '(cmd_file)\n', (1068, 1078), False, 'import os\n'), ((1595, 1682), 'fewshot.data.data_factory.get_dataset', 'get_dataset', (['env_config.dataset', 'env_config.data_folder', 'env_config.train_fs_split'], {}), '(env_config.dataset, env_config.data_folder, env_config.\n train_fs_split)\n', (1606, 1682), False, 'from fewshot.data.data_factory import get_dataset\n'), ((1797, 1882), 'fewshot.data.data_factory.get_dataset', 'get_dataset', (['env_config.dataset', 'env_config.data_folder', 'env_config.val_fs_split'], {}), '(env_config.dataset, env_config.data_folder, env_config.val_fs_split\n )\n', (1808, 1882), False, 'from fewshot.data.data_factory import get_dataset\n'), ((1998, 2084), 'fewshot.data.data_factory.get_dataset', 'get_dataset', (['env_config.dataset', 'env_config.data_folder', 'env_config.test_fs_split'], {}), '(env_config.dataset, env_config.data_folder, env_config.\n test_fs_split)\n', (2009, 2084), False, 'from fewshot.data.data_factory import get_dataset\n'), ((2353, 2387), 'os.path.join', 'os.path.join', (['folder', "(prefix + '*')"], {}), "(folder, prefix + '*')\n", (2365, 2387), False, 'import os\n'), ((942, 965), 'google.protobuf.text_format.MessageToString', 'MessageToString', (['config'], {}), '(config)\n', (957, 965), False, 'from google.protobuf.text_format import Merge, MessageToString\n'), ((1030, 1041), 'time.time', 'time.time', ([], {}), '()\n', (1039, 1041), False, 'import time\n')] |
# The original GA algorithm is here:
import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt
import math
class City:
def __init__(self, x, y):
self.x = x
self.y = y
def distance(self, city):
xDis = abs(self.x - city.x)
yDis = abs(self.y - city.y)
distance = np.sqrt((xDis ** 2) + (yDis ** 2))
return distance
def __repr__(self):
return "(" + str(self.x) + "," + str(self.y) + ")"
class Fitness:
def __init__(self, route):
self.route = route
self.distance = 0
self.fitness = 0.0
def routeDistance(self):
if self.distance == 0:
pathDistance = 0
for i in range(0, len(self.route)):
fromCity = self.route[i]
toCity = None
if i + 1 < len(self.route):
toCity = self.route[i + 1]
else:
toCity = self.route[0]
pathDistance += fromCity.distance(toCity)
self.distance = pathDistance
return self.distance
def routeFitness(self):
if self.fitness == 0:
dis = self.routeDistance()
self.fitness = dis
return self.fitness
def createRoute(cityList):
route = random.sample(cityList, len(cityList))
return route
def initialPopulation(popSize, cityList):
population = []
for i in range(0, popSize):
population.append(createRoute(cityList))
return population
def rankRoutes(population):
fitnessResults = {}
for i in range(0, len(population)):
fitnessResults[i] = Fitness(population[i]).routeFitness()
return sorted(fitnessResults.items(), key=operator.itemgetter(1), reverse=False)
def selection(popRanked, eliteSize):
selectionResults = []
for i in range(0, eliteSize):
selectionResults.append(popRanked[i][0])
popRanked_pre = popRanked[:len(popRanked)]
for i in range(0, len(popRanked) - eliteSize):
c1 = random.sample(popRanked_pre, 1)
c2 = random.sample(popRanked_pre, 1)
winner = None
if c1[0][1] > c2[0][1]:
winner = c1
else:
winner = c2
selectionResults.append(winner[0][0])
return selectionResults
def matingPool(population, selectionResults):
matingpool = []
for i in range(0, len(selectionResults)):
index = selectionResults[i]
matingpool.append(population[index])
return matingpool
def breed(parent1, parent2):
child = []
childP1 = []
childP2 = []
geneA = int(random.random() * len(parent1))
geneB = int(random.random() * len(parent1))
startGene = min(geneA, geneB)
endGene = max(geneA, geneB)
for i in range(startGene, endGene):
childP1.append(parent1[i])
childP2 = [item for item in parent2 if item not in childP1]
child = childP1 + childP2
return child
def breedPopulation(matingpool, eliteSize):
children = []
length = len(matingpool) - eliteSize
pool = random.sample(matingpool, len(matingpool))
for i in range(0, eliteSize):
children.append(matingpool[i])
for i in range(0, length):
child = breed(pool[i], pool[len(matingpool) - i - 1])
children.append(child)
return children
def mutate(individual, mutationRate):
for swapped in range(len(individual)):
if (random.random() < mutationRate):
swapWith = int(random.random() * len(individual))
city1 = individual[swapped]
city2 = individual[swapWith]
individual[swapped] = city2
individual[swapWith] = city1
return individual
def mutatePopulation(population, mutationRate):
mutatedPop = []
for ind in range(0, len(population)):
mutatedInd = mutate(population[ind], mutationRate)
mutatedPop.append(mutatedInd)
return mutatedPop
def nextGeneration(currentGen, eliteSize, mutationRate):
popRanked = rankRoutes(currentGen)
selectionResults = selection(popRanked, eliteSize)
matingpool = matingPool(currentGen, selectionResults)
children = breedPopulation(matingpool, eliteSize)
return children
def geneticAlgorithm(population, popSize, eliteSize, mutationRate, generations):
pop = initialPopulation(popSize, population)
print("Initial distance: " + str(1 / rankRoutes(pop)[0][1]))
for i in range(0, generations):
pop = nextGeneration(pop, eliteSize, mutationRate)
print("Final distance: " + str(1 / rankRoutes(pop)[0][1]))
bestRouteIndex = rankRoutes(pop)[0][0]
bestRoute = pop[bestRouteIndex]
return bestRoute
def plotting():
l1 = list()
for c in best:
l1.append([c.x, c.y])
l = np.asarray(l1)
plt.clf()
plt.scatter(l[:, 0].T, l[:, 1].T, s=10, c='k')
l1.append(l1[0])
l = np.asarray(l1)
plt.plot(l[:, 0].T, l[:, 1].T, 'r-')
# plt.show()
plt.savefig("berlin52_route.png")
def read_line(s):
l = s.split(' ')
return float(l[0]), float(l[1]), float(l[2])
def geneticAlgorithmPlot(population, popSize, eliteSize, mutationRate, generations):
pop = initialPopulation(popSize, population)
progress = []
progress.append(rankRoutes(pop)[0][1])
for i in range(0, generations):
pop = nextGeneration(pop, eliteSize, mutationRate)
print(i)
progress.append(rankRoutes(pop)[0][1])
plt.clf()
plt.plot(progress)
plt.ylabel('Distance')
plt.xlabel('Generation')
# plt.show()
plt.savefig("berlin52_distance.png")
print("Final distance: " + str(rankRoutes(pop)[0][1]))
bestRouteIndex = rankRoutes(pop)[0][0]
bestRoute = pop[bestRouteIndex]
return bestRoute
cityList = []
with open('./TSP_data', 'rt') as f:
for line in f:
a, b, c = read_line(line)
cityList.append(City(x=b, y=c))
best = geneticAlgorithmPlot(population=cityList, popSize=2000, eliteSize=1000, mutationRate=0.01, generations=2000)
plotting()
| [
"random.sample",
"matplotlib.pyplot.savefig",
"numpy.sqrt",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.clf",
"numpy.asarray",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.scatter",
"operator.itemgetter",
"random.random"
] | [((4760, 4774), 'numpy.asarray', 'np.asarray', (['l1'], {}), '(l1)\n', (4770, 4774), True, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((4779, 4788), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (4786, 4788), True, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((4793, 4839), 'matplotlib.pyplot.scatter', 'plt.scatter', (['l[:, 0].T', 'l[:, 1].T'], {'s': '(10)', 'c': '"""k"""'}), "(l[:, 0].T, l[:, 1].T, s=10, c='k')\n", (4804, 4839), True, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((4869, 4883), 'numpy.asarray', 'np.asarray', (['l1'], {}), '(l1)\n', (4879, 4883), True, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((4888, 4924), 'matplotlib.pyplot.plot', 'plt.plot', (['l[:, 0].T', 'l[:, 1].T', '"""r-"""'], {}), "(l[:, 0].T, l[:, 1].T, 'r-')\n", (4896, 4924), True, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((4946, 4979), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""berlin52_route.png"""'], {}), "('berlin52_route.png')\n", (4957, 4979), True, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((5432, 5441), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (5439, 5441), True, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((5446, 5464), 'matplotlib.pyplot.plot', 'plt.plot', (['progress'], {}), '(progress)\n', (5454, 5464), True, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((5469, 5491), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Distance"""'], {}), "('Distance')\n", (5479, 5491), True, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((5496, 5520), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Generation"""'], {}), "('Generation')\n", (5506, 5520), True, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((5542, 5578), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""berlin52_distance.png"""'], {}), "('berlin52_distance.png')\n", (5553, 5578), True, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((332, 362), 'numpy.sqrt', 'np.sqrt', (['(xDis ** 2 + yDis ** 2)'], {}), '(xDis ** 2 + yDis ** 2)\n', (339, 362), True, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((2023, 2054), 'random.sample', 'random.sample', (['popRanked_pre', '(1)'], {}), '(popRanked_pre, 1)\n', (2036, 2054), False, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((2068, 2099), 'random.sample', 'random.sample', (['popRanked_pre', '(1)'], {}), '(popRanked_pre, 1)\n', (2081, 2099), False, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((1724, 1746), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (1743, 1746), False, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((2605, 2620), 'random.random', 'random.random', ([], {}), '()\n', (2618, 2620), False, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((2653, 2668), 'random.random', 'random.random', ([], {}), '()\n', (2666, 2668), False, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((3414, 3429), 'random.random', 'random.random', ([], {}), '()\n', (3427, 3429), False, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n'), ((3474, 3489), 'random.random', 'random.random', ([], {}), '()\n', (3487, 3489), False, 'import numpy as np, random, operator, pandas as pd, matplotlib.pyplot as plt\n')] |
import sys
import yahooscraper as ys
from datetime import datetime, date
from urllib.parse import urljoin
# Environment variables
USERNAME_ENV = 'YAHOO_USERNAME'
PASSWORD_ENV = '<PASSWORD>'
# Command-line args
REQUIRED_ARGS = [
'<league_id>',
'<team_id>'
]
OPTIONAL_ARGS = []
# Error messages
LOGIN_ERROR_MSG = 'Failed to log in'
def usage():
"""
Print usage and exit
"""
msg_lines = [
' '.join((
'Usage: python',
sys.argv[0],
' '.join(REQUIRED_ARGS),
' '.join(OPTIONAL_ARGS))),
'Environment variables %s and %s must also be set' % (
USERNAME_ENV,
PASSWORD_ENV)]
sys.exit('\n\n'.join(msg_lines))
def required_num_args():
min_args = len(REQUIRED_ARGS) + 1
max_args = min_args + len(OPTIONAL_ARGS)
return range(min_args, max_args + 1)
def parsed_and_bounded_arg(i, max, min, parse):
"""
Returns parsed and bounded arg from argv.
The `parse` parameter is a single-argument function which is called with
the arg. The output of this function is only returned if it is between
min and max.
If parse fails or arg is not within bounds, None is returned.
"""
if len(sys.argv) > i:
try:
parsed_arg = parse(sys.argv[i])
return parsed_arg if min <= parsed_arg <= max else None
except:
return None
else:
return None
def date_from_argv(i, max, min=date.today()):
return parsed_and_bounded_arg(
i, max, min,
lambda arg: datetime.strptime(arg, '%Y-%m-%d').date())
def int_from_argv(i, max, min=1):
return parsed_and_bounded_arg(i, max, min, lambda arg: int(arg))
def output_team_info(session, league_id, team_id):
"""
Output team name and league
"""
response = session.get(ys.fantasy.team.url('nba', league_id, team_id))
league = ys.fantasy.team.league(response.text)
team = ys.fantasy.team.team(response.text)
print('%s - %s:\n' % (league, team))
| [
"datetime.datetime.strptime",
"yahooscraper.fantasy.team.league",
"yahooscraper.fantasy.team.url",
"yahooscraper.fantasy.team.team",
"datetime.date.today"
] | [((1473, 1485), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1483, 1485), False, 'from datetime import datetime, date\n'), ((1901, 1938), 'yahooscraper.fantasy.team.league', 'ys.fantasy.team.league', (['response.text'], {}), '(response.text)\n', (1923, 1938), True, 'import yahooscraper as ys\n'), ((1950, 1985), 'yahooscraper.fantasy.team.team', 'ys.fantasy.team.team', (['response.text'], {}), '(response.text)\n', (1970, 1985), True, 'import yahooscraper as ys\n'), ((1840, 1886), 'yahooscraper.fantasy.team.url', 'ys.fantasy.team.url', (['"""nba"""', 'league_id', 'team_id'], {}), "('nba', league_id, team_id)\n", (1859, 1886), True, 'import yahooscraper as ys\n'), ((1564, 1598), 'datetime.datetime.strptime', 'datetime.strptime', (['arg', '"""%Y-%m-%d"""'], {}), "(arg, '%Y-%m-%d')\n", (1581, 1598), False, 'from datetime import datetime, date\n')] |
import sys
import os
from .gdb import Gdb
from .oocd import Oocd
from .hw_specific import *
# useful if there is some variety of naming
hw_names = {
"esp32s2beta": "Esp32_S2",
"esp32s2_beta": "Esp32_S2",
"esp32_s2beta": "Esp32_S2",
"esp32_s2_beta": "Esp32_S2",
"esp32s2": "Esp32_S2",
"esp32-s2": "Esp32_S2",
"esp32-s2beta": "Esp32_S2",
"esp32-s2-beta": "Esp32_S2",
"esp32s2-beta": "Esp32_S2",
"esp_32": "Esp32",
"esp-32": "Esp32",
}
def get_hw_list():
hw_list = []
p = os.path.dirname(__file__)
files = os.listdir(os.path.join(p, "hw_specific"))
for f in files:
hw_list.append(os.path.splitext(f)[0])
return hw_list
def _str_to_class(classname):
return getattr(sys.modules[__name__], classname)
def get_good_name(some_name):
"""
Parameters
----------
some_name str - some name to check
Returns
-------
str - good name, recognizable by the Backend
"""
good_name = "" # empty string by default
if (some_name is None) or (not len(some_name)): # if chip_name not make sense
return good_name
better_name = hw_names.get(some_name.strip().lower(), some_name) # if there is no conversion - keep it
hw_list = get_hw_list()
for hw in hw_list:
if better_name.lower() == hw.lower(): # lower for being case insensitive
good_name = hw
break
# if nothing was found - stays ""
return good_name
def get_gdb(chip_name=None,
gdb_path=None,
log_level=None,
log_stream_handler=None,
log_file_handler=None,
log_gdb_proc_file=None,
remote_target=None,
remote_address=None,
remote_port=None, **kwargs):
"""
set to != None value to redefine get_gdb logic
Parameters
----------
chip_name : Any(None, str)
gdb_path : Any(None, str)
log_level : Any(None, str)
log_stream_handler : Any(None, str)
log_file_handler : Any(None, str)
log_gdb_proc_file : Any(None, str)
remote_target : Any(None, str)
remote_address : Any(None, str)
remote_port : Any(None, str)
Returns
-------
Gdb
"""
_gdb = _str_to_class("Gdb" + get_good_name(chip_name))
return _gdb(gdb_path=gdb_path,
log_level=log_level,
log_stream_handler=log_stream_handler,
log_file_handler=log_file_handler,
log_gdb_proc_file=log_gdb_proc_file,
remote_target=remote_target,
remote_address=remote_address,
remote_port=remote_port, **kwargs)
def get_oocd(chip_name=None,
oocd_exec=None,
oocd_scripts=None,
oocd_args=None,
ip=None,
log_level=None,
log_stream_handler=None,
log_file_handler=None,
**kwargs):
"""
set to != None value to redefine get_gdb logic
Parameters
----------
chip_name : Any(None, str)
oocd_exec : Any(None, str)
oocd_scripts : Any(None, str)
oocd_args : Any(None, str)
ip : Any(None, str)
log_level : Any(None, str)
log_stream_handler : Any(None, str)
log_file_handler : Any(None, str)
Returns
-------
Any
"""
_oocd = _str_to_class("Oocd" + get_good_name(chip_name))
return _oocd(chip_name=chip_name,
oocd_exec=oocd_exec,
oocd_scripts=oocd_scripts,
oocd_args=oocd_args,
ip=ip,
log_level=log_level,
log_stream_handler=log_stream_handler,
log_file_handler=log_file_handler, **kwargs)
| [
"os.path.dirname",
"os.path.splitext",
"os.path.join"
] | [((525, 550), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (540, 550), False, 'import os\n'), ((574, 604), 'os.path.join', 'os.path.join', (['p', '"""hw_specific"""'], {}), "(p, 'hw_specific')\n", (586, 604), False, 'import os\n'), ((649, 668), 'os.path.splitext', 'os.path.splitext', (['f'], {}), '(f)\n', (665, 668), False, 'import os\n')] |
import FWCore.ParameterSet.Config as cms
from RecoJets.JetProducers.PFClusterJetParameters_cfi import *
from RecoJets.JetProducers.AnomalousCellParameters_cfi import *
ak4PFClusterJets = cms.EDProducer(
"FastjetJetProducer",
PFClusterJetParameters,
AnomalousCellParameters,
jetAlgorithm = cms.string("AntiKt"),
rParam = cms.double(0.4)
)
| [
"FWCore.ParameterSet.Config.string",
"FWCore.ParameterSet.Config.double"
] | [((307, 327), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""AntiKt"""'], {}), "('AntiKt')\n", (317, 327), True, 'import FWCore.ParameterSet.Config as cms\n'), ((348, 363), 'FWCore.ParameterSet.Config.double', 'cms.double', (['(0.4)'], {}), '(0.4)\n', (358, 363), True, 'import FWCore.ParameterSet.Config as cms\n')] |
#!/usr/bin/env python3
import argparse
import os
import subprocess
import sys
from shutil import copyfile
from parser import parse, generate_ast, apply_transformations, ASTDump
from parse_debug import process_debug_info
def parse_args(argv):
parser = argparse.ArgumentParser(description='Simplified CC1 frontend')
parser.add_argument('--qinclude', action='append', help='Include Paths for iquote', required=False)
parser.add_argument('--binclude', action='append', help='Include Paths for Block Include', required=False)
parser.add_argument('--cc1', help='<Required> cc1 Path', required=False)
parser.add_argument('--version', help='Get Version String of cc1', required=False)
parser.add_argument('--preproc', help='preproc path', required=False)
parser.add_argument('--charmap', help='preproc charmap', required=False)
parser.add_argument('-S', action='store_true', help='Ignore parameter as agbcc does not know it', required=False)
parser.add_argument('-o', help='Output Assembly file', required=False, dest='destination')
parser.add_argument('--no-parse', action='store_true', help='disable parsing of agbcc output (debug option)',
required=False)
return parser.parse_known_args(argv)
def compile(source, output_filename, args, remainder):
cpp_args = ["cpp", "-nostdinc", "-undef"]
# Add Block Includes and Quote Includes
if args.qinclude:
for q in args.qinclude:
cpp_args += ["-iquote", q]
if args.binclude:
for b in args.binclude:
cpp_args += ["-I", b]
cpp_args += [source, "-o", source + ".i"]
subprocess.call(cpp_args)
if args.preproc and args.charmap:
pprocess = subprocess.Popen([args.preproc, source + '.i', args.charmap], stdout=subprocess.PIPE)
subprocess.call([args.cc1] + ['-o', output_filename] + remainder, stdin=pprocess.stdout)
else:
with open(source + '.i', 'r') as a:
subprocess.call([args.cc1] + ['-o', output_filename] + remainder, stdin=a)
def process_asm(input_filename, output_filename):
tree, success = parse(input_filename)
if not success:
raise ValueError('could not parse file')
ast = generate_ast(tree)
apply_transformations(ast)
with open(output_filename, 'w') as destination_file:
ASTDump(destination_file).visit(ast)
def cleanup(args, source):
for file in [f'{source}.i', f'{args.destination}.tmp']:
if os.path.exists(file):
os.remove(file)
def main(argv):
status_code = 0
args, remainder = parse_args(argv)
if args.version:
git_proc = subprocess.run(['git', '--git-dir=' + args.version + '/.git', 'rev-parse', '--short', 'HEAD'],
stdout=subprocess.PIPE)
print("pycc frontend for agbcc1 " + os.path.basename(args.version) + "@" + git_proc.stdout.decode('utf-8'))
exit(0)
source = remainder.pop(-1)
try:
if source.endswith('.c'):
asm_file = args.destination + '.tmp'
compile(source, asm_file, args, remainder)
process_debug_info(asm_file)
else:
asm_file = source
if not args.no_parse:
try:
process_asm(asm_file, args.destination)
except Exception as e:
print(f'error cleaning assembly code: {e}\nOutputting unprocessed assembly', file=sys.stderr)
copyfile(asm_file, args.destination)
status_code = 1
else:
copyfile(asm_file, args.destination)
finally:
cleanup(args, source)
exit(status_code)
if __name__ == '__main__':
main(sys.argv[1:])
| [
"os.path.exists",
"parser.ASTDump",
"argparse.ArgumentParser",
"parser.parse",
"subprocess.Popen",
"subprocess.run",
"os.remove",
"shutil.copyfile",
"subprocess.call",
"parse_debug.process_debug_info",
"os.path.basename",
"parser.apply_transformations",
"parser.generate_ast"
] | [((258, 320), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Simplified CC1 frontend"""'}), "(description='Simplified CC1 frontend')\n", (281, 320), False, 'import argparse\n'), ((1640, 1665), 'subprocess.call', 'subprocess.call', (['cpp_args'], {}), '(cpp_args)\n', (1655, 1665), False, 'import subprocess\n'), ((2119, 2140), 'parser.parse', 'parse', (['input_filename'], {}), '(input_filename)\n', (2124, 2140), False, 'from parser import parse, generate_ast, apply_transformations, ASTDump\n'), ((2220, 2238), 'parser.generate_ast', 'generate_ast', (['tree'], {}), '(tree)\n', (2232, 2238), False, 'from parser import parse, generate_ast, apply_transformations, ASTDump\n'), ((2243, 2269), 'parser.apply_transformations', 'apply_transformations', (['ast'], {}), '(ast)\n', (2264, 2269), False, 'from parser import parse, generate_ast, apply_transformations, ASTDump\n'), ((1723, 1813), 'subprocess.Popen', 'subprocess.Popen', (["[args.preproc, source + '.i', args.charmap]"], {'stdout': 'subprocess.PIPE'}), "([args.preproc, source + '.i', args.charmap], stdout=\n subprocess.PIPE)\n", (1739, 1813), False, 'import subprocess\n'), ((1817, 1910), 'subprocess.call', 'subprocess.call', (["([args.cc1] + ['-o', output_filename] + remainder)"], {'stdin': 'pprocess.stdout'}), "([args.cc1] + ['-o', output_filename] + remainder, stdin=\n pprocess.stdout)\n", (1832, 1910), False, 'import subprocess\n'), ((2472, 2492), 'os.path.exists', 'os.path.exists', (['file'], {}), '(file)\n', (2486, 2492), False, 'import os\n'), ((2639, 2761), 'subprocess.run', 'subprocess.run', (["['git', '--git-dir=' + args.version + '/.git', 'rev-parse', '--short', 'HEAD']"], {'stdout': 'subprocess.PIPE'}), "(['git', '--git-dir=' + args.version + '/.git', 'rev-parse',\n '--short', 'HEAD'], stdout=subprocess.PIPE)\n", (2653, 2761), False, 'import subprocess\n'), ((1972, 2046), 'subprocess.call', 'subprocess.call', (["([args.cc1] + ['-o', output_filename] + remainder)"], {'stdin': 'a'}), "([args.cc1] + ['-o', output_filename] + remainder, stdin=a)\n", (1987, 2046), False, 'import subprocess\n'), ((2506, 2521), 'os.remove', 'os.remove', (['file'], {}), '(file)\n', (2515, 2521), False, 'import os\n'), ((3114, 3142), 'parse_debug.process_debug_info', 'process_debug_info', (['asm_file'], {}), '(asm_file)\n', (3132, 3142), False, 'from parse_debug import process_debug_info\n'), ((3547, 3583), 'shutil.copyfile', 'copyfile', (['asm_file', 'args.destination'], {}), '(asm_file, args.destination)\n', (3555, 3583), False, 'from shutil import copyfile\n'), ((2335, 2360), 'parser.ASTDump', 'ASTDump', (['destination_file'], {}), '(destination_file)\n', (2342, 2360), False, 'from parser import parse, generate_ast, apply_transformations, ASTDump\n'), ((3452, 3488), 'shutil.copyfile', 'copyfile', (['asm_file', 'args.destination'], {}), '(asm_file, args.destination)\n', (3460, 3488), False, 'from shutil import copyfile\n'), ((2836, 2866), 'os.path.basename', 'os.path.basename', (['args.version'], {}), '(args.version)\n', (2852, 2866), False, 'import os\n')] |
# AUTOGENERATED FILE - DO NOT MODIFY!
# This file generated by Djinni from constants.djinni
from djinni.support import MultiSet # default imported in all files
from djinni.exception import CPyException # default imported in all files
from djinni.pycffi_marshal import CPyPrimitive, CPyRecord, CPyString
from PyCFFIlib_cffi import ffi, lib
from djinni import exception # this forces run of __init__.py which gives cpp option to call back into py to create exception
class ConstantRecord:
""" Record for use in constants """
c_data_set = MultiSet()
@staticmethod
def check_c_data_set_empty():
assert len(ConstantRecord.c_data_set) == 0
def __init__(self, some_integer, some_string):
self.some_integer = some_integer
self.some_string = some_string
| [
"djinni.support.MultiSet"
] | [((548, 558), 'djinni.support.MultiSet', 'MultiSet', ([], {}), '()\n', (556, 558), False, 'from djinni.support import MultiSet\n')] |
import boto3
from botocore.config import Config
from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, render
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import ensure_csrf_cookie
from rest_framework import status
from rest_framework.permissions import IsAuthenticatedOrReadOnly, AllowAny
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Organization, Tag, WelcomeMessage
from misc.models import File
from .serializers import BaseOrganizationSerializer, DetailOrganizationSerializer, \
WelcomeMessageSerializer, ExportSerializer
from misc.serializers import FileSerializer
from users.permissions import NewHirePermission, AdminPermission
from django.core import management
from sequences.models import Sequence
def home(request):
return render(request, 'index.html')
class OrgView(APIView):
permission_classes = (IsAuthenticatedOrReadOnly,)
def get(self, request):
org = BaseOrganizationSerializer(Organization.object.get())
return Response(org.data)
class OrgDetailView(APIView):
def get(self, request):
org = DetailOrganizationSerializer(Organization.object.get())
return Response(org.data)
def patch(self, request):
serializer = DetailOrganizationSerializer(Organization.object.get(), data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
Sequence.objects.all().update(auto_add=False)
if 'auto_add_sequence' in request.data:
for i in request.data['auto_add_sequence']:
seq = Sequence.objects.get(id=i)
seq.auto_add = True
seq.save()
serializer.save()
return Response(serializer.data)
class WelcomeMessageView(APIView):
permission_classes = (IsAuthenticatedOrReadOnly,)
def get(self, request):
welcome_messages = WelcomeMessage.objects.all()
serializer = WelcomeMessageSerializer(welcome_messages, many=True)
return Response(serializer.data)
def post(self, request):
serializer = WelcomeMessageSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
welcome_message = WelcomeMessage.objects.get(language=serializer.data['language'], message_type=serializer.data['message_type'])
welcome_message.message = serializer.data['message']
welcome_message.save()
return Response(serializer.data)
class TagView(APIView):
permission_classes = (IsAuthenticatedOrReadOnly,)
def get(self, request):
tags = [i.name for i in Tag.objects.all()]
return Response(tags)
class CSRFTokenView(APIView):
permission_classes = (AllowAny,)
@method_decorator(ensure_csrf_cookie)
def get(self, request):
return HttpResponse()
class FileView(APIView):
permission_classes = (AdminPermission, NewHirePermission)
def get(self, request, id, uuid):
file = get_object_or_404(File, uuid=uuid, id=id)
url = file.get_url()
return Response(url)
def post(self, request):
serializer = FileSerializer(data={'name': request.data['name'], 'ext': request.data['name'].split('.')[1]})
serializer.is_valid(raise_exception=True)
f = serializer.save()
key = str(f.id) + '-' + request.data['name'].split('.')[0] + '/' + request.data['name']
f.key = key
f.save()
s3 = boto3.client('s3',
settings.AWS_REGION,
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
config=Config(signature_version='s3v4')
)
url = s3.generate_presigned_url(ClientMethod='put_object', ExpiresIn=3600,
Params={'Bucket': settings.AWS_STORAGE_BUCKET_NAME, 'Key': key})
return Response({'url': url, 'id': f.id})
def put(self, request, id):
file = get_object_or_404(File, pk=id)
file.active = True
file.save()
return Response(FileSerializer(file).data)
def delete(self, request, id):
if request.user.role == 1:
file = get_object_or_404(File, pk=id)
file.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class LogoView(APIView):
def put(self, request, id):
file = get_object_or_404(File, pk=id)
file.active = True
file.save()
org = Organization.object.get()
org.logo = file
org.save()
return Response(FileSerializer(file).data)
class ExportView(APIView):
def post(self, request):
from io import StringIO
import json
from django.core.files.base import ContentFile
buf = StringIO()
serializer = ExportSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
management.call_command('dumpdata', serializer.data['export_model'], stdout=buf, natural_foreign=True)
buf.seek(0)
return Response(json.loads(buf.read()))
| [
"django.shortcuts.render",
"misc.serializers.FileSerializer",
"django.core.management.call_command",
"sequences.models.Sequence.objects.get",
"django.http.HttpResponse",
"botocore.config.Config",
"django.shortcuts.get_object_or_404",
"django.utils.decorators.method_decorator",
"rest_framework.respon... | [((911, 940), 'django.shortcuts.render', 'render', (['request', '"""index.html"""'], {}), "(request, 'index.html')\n", (917, 940), False, 'from django.shortcuts import get_object_or_404, render\n'), ((2818, 2854), 'django.utils.decorators.method_decorator', 'method_decorator', (['ensure_csrf_cookie'], {}), '(ensure_csrf_cookie)\n', (2834, 2854), False, 'from django.utils.decorators import method_decorator\n'), ((1133, 1151), 'rest_framework.response.Response', 'Response', (['org.data'], {}), '(org.data)\n', (1141, 1151), False, 'from rest_framework.response import Response\n'), ((1298, 1316), 'rest_framework.response.Response', 'Response', (['org.data'], {}), '(org.data)\n', (1306, 1316), False, 'from rest_framework.response import Response\n'), ((1820, 1845), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (1828, 1845), False, 'from rest_framework.response import Response\n'), ((2112, 2137), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (2120, 2137), False, 'from rest_framework.response import Response\n'), ((2527, 2552), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (2535, 2552), False, 'from rest_framework.response import Response\n'), ((2728, 2742), 'rest_framework.response.Response', 'Response', (['tags'], {}), '(tags)\n', (2736, 2742), False, 'from rest_framework.response import Response\n'), ((2898, 2912), 'django.http.HttpResponse', 'HttpResponse', ([], {}), '()\n', (2910, 2912), False, 'from django.http import HttpResponse\n'), ((3056, 3097), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['File'], {'uuid': 'uuid', 'id': 'id'}), '(File, uuid=uuid, id=id)\n', (3073, 3097), False, 'from django.shortcuts import get_object_or_404, render\n'), ((3142, 3155), 'rest_framework.response.Response', 'Response', (['url'], {}), '(url)\n', (3150, 3155), False, 'from rest_framework.response import Response\n'), ((4113, 4147), 'rest_framework.response.Response', 'Response', (["{'url': url, 'id': f.id}"], {}), "({'url': url, 'id': f.id})\n", (4121, 4147), False, 'from rest_framework.response import Response\n'), ((4196, 4226), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['File'], {'pk': 'id'}), '(File, pk=id)\n', (4213, 4226), False, 'from django.shortcuts import get_object_or_404, render\n'), ((4487, 4530), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_204_NO_CONTENT'}), '(status=status.HTTP_204_NO_CONTENT)\n', (4495, 4530), False, 'from rest_framework.response import Response\n'), ((4606, 4636), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['File'], {'pk': 'id'}), '(File, pk=id)\n', (4623, 4636), False, 'from django.shortcuts import get_object_or_404, render\n'), ((4998, 5008), 'io.StringIO', 'StringIO', ([], {}), '()\n', (5006, 5008), False, 'from io import StringIO\n'), ((5124, 5231), 'django.core.management.call_command', 'management.call_command', (['"""dumpdata"""', "serializer.data['export_model']"], {'stdout': 'buf', 'natural_foreign': '(True)'}), "('dumpdata', serializer.data['export_model'], stdout\n =buf, natural_foreign=True)\n", (5147, 5231), False, 'from django.core import management\n'), ((4415, 4445), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['File'], {'pk': 'id'}), '(File, pk=id)\n', (4432, 4445), False, 'from django.shortcuts import get_object_or_404, render\n'), ((1516, 1538), 'sequences.models.Sequence.objects.all', 'Sequence.objects.all', ([], {}), '()\n', (1536, 1538), False, 'from sequences.models import Sequence\n'), ((1688, 1714), 'sequences.models.Sequence.objects.get', 'Sequence.objects.get', ([], {'id': 'i'}), '(id=i)\n', (1708, 1714), False, 'from sequences.models import Sequence\n'), ((3849, 3881), 'botocore.config.Config', 'Config', ([], {'signature_version': '"""s3v4"""'}), "(signature_version='s3v4')\n", (3855, 3881), False, 'from botocore.config import Config\n'), ((4298, 4318), 'misc.serializers.FileSerializer', 'FileSerializer', (['file'], {}), '(file)\n', (4312, 4318), False, 'from misc.serializers import FileSerializer\n'), ((4791, 4811), 'misc.serializers.FileSerializer', 'FileSerializer', (['file'], {}), '(file)\n', (4805, 4811), False, 'from misc.serializers import FileSerializer\n')] |
import torch
import torch.nn.functional as F
def focal_loss(input: torch.Tensor, target: torch.Tensor, alpha: float = 0.25, gamma: float = 2, reduction: str = 'none'):
pt = F.softmax(input, dim=-1)
log_pt = F.log_softmax(input, dim=-1)
loss = F.nll_loss(alpha * (1 - pt).pow(gamma) * log_pt, target, reduction=reduction)
return loss
def iou_loss_with_distance(input: torch.Tensor, target: torch.Tensor, reduction: str = 'none'):
eps = 1e-8
def _calc_area(t):
return (t[:, 1] + t[:, 0]) * (t[:, 3] + t[:, 2])
inter = _calc_area(torch.minimum(input, target))
union = _calc_area(input) + _calc_area(target) - inter
iou = inter / union.clamp(min=eps)
loss = -iou.log()
if reduction == 'sum':
return loss.sum()
elif reduction == 'mean':
return loss.mean()
else:
return loss
| [
"torch.minimum",
"torch.nn.functional.softmax",
"torch.nn.functional.log_softmax"
] | [((179, 203), 'torch.nn.functional.softmax', 'F.softmax', (['input'], {'dim': '(-1)'}), '(input, dim=-1)\n', (188, 203), True, 'import torch.nn.functional as F\n'), ((217, 245), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['input'], {'dim': '(-1)'}), '(input, dim=-1)\n', (230, 245), True, 'import torch.nn.functional as F\n'), ((569, 597), 'torch.minimum', 'torch.minimum', (['input', 'target'], {}), '(input, target)\n', (582, 597), False, 'import torch\n')] |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import itertools
import operator
import autograd.extend as ag_extend
import autograd.numpy as np
import autograd.numpy.numpy_vspaces as numpy_vspaces
import autograd.tracer as ag_tracer
import funcsigs
from .patterns import (Subtract, Add, Dot, Multiply, Divide, TrueDivide, Node,
Val, Einsum, Str, Choice, Segment, Log, Sum, Tuple,
VSpaceAdd, Any, Power, Scalar, OneHot, Transpose, Inv,
Logdet, AddN, Star)
from .tracers import add_n
from .tracers import logdet
from .tracers import make_dummy
from .tracers import subvals
from .util import split_einsum_formula
from . import matchers
from . import patterns
from . import tracers
_einsum_range = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
_einsum_index_set = frozenset(_einsum_range)
### eager rewrites replace individual functions with constant-folding versions
def is_constant(x):
return not ag_tracer.isbox(x)
def _is_constant_val(x, val):
return is_constant(x) and np.all(x == val)
_is_constant_zero = functools.partial(_is_constant_val, val=0.)
_is_constant_one = functools.partial(_is_constant_val, val=1.)
def _multiply_as_einsum(x, y):
x_arr, y_arr = np.array(x), np.array(y)
new_shape = np.broadcast(x_arr, y_arr).shape
out_formula = _einsum_range[:len(new_shape)]
next_index = iter(_einsum_range[len(new_shape):])
def _make_broadcast_formula(z):
offset = len(new_shape) - len(z.shape)
return ''.join([out_formula[offset + i]
if z.shape[i] == new_shape[offset + i]
else next_index.next()
for i in range(len(z.shape))])
new_formula = '{},{}->{}'.format(_make_broadcast_formula(x_arr),
_make_broadcast_formula(y_arr),
out_formula)
return np.einsum(new_formula, x, y)
def maybe_multiply(x, y):
if _is_constant_zero(x) or _is_constant_zero(y):
return np.zeros(np.broadcast(x, y).shape, dtype=np.result_type(x, y))
if _is_constant_one(x) and np.shape(y) == np.broadcast(x, y).shape:
return y
if _is_constant_one(y) and np.shape(x) == np.broadcast(x, y).shape:
return x
return _multiply_as_einsum(x, y)
def maybe_add(x, y):
if _is_constant_zero(x) and np.shape(y) == np.broadcast(x, y).shape:
return y
if _is_constant_zero(y) and np.shape(x) == np.broadcast(x, y).shape:
return x
return add_n(x, y)
def maybe_subtract(x, y):
if _is_constant_zero(y) and np.shape(x) == np.broadcast(x, y).shape:
return x
return add_n(x, _multiply_as_einsum(-1, y))
def maybe_getitem(x, idx):
if isinstance(idx, slice):
return list(x)[idx]
else:
return x[idx]
def dot_as_einsum(x, y):
if x.ndim == 0 or y.ndim == 0: return np.einsum(',->', x, y)
if x.ndim == y.ndim == 1: return np.einsum('i,i->', x, y)
if x.ndim == 2 and y.ndim == 1: return np.einsum('ij,j->i', x, y)
if x.ndim == 1 and y.ndim == 2: return np.einsum('i,ij->j', x, y)
return np.einsum('{}ab,{}bc->{}ac'.format(
_einsum_range[:x.ndim-2][::-1], _einsum_range[:y.ndim-2][::-1],
_einsum_range[:max([x.ndim, y.ndim])-2][::-1]), x, y)
def maybe_divide(x, y):
if _is_constant_one(y) and np.shape(x) == np.broadcast(x, y).shape:
return x
elif _is_constant_one(x) and np.shape(y) == np.broadcast(x, y).shape:
return y ** -1
return _multiply_as_einsum(x, y ** -1)
# TODO(mhoffman): Consider exponent == 0. E.g., what if base could also be 0?
def maybe_power(base, exponent):
if exponent == 1:
return base
elif exponent == 0:
return 1
elif isinstance(exponent, int) and exponent > 0 and exponent < 10:
formula = ''.join([_einsum_range[i] for i in range(len(base.shape))])
in_formulas = [formula for _ in range(exponent)]
out_formula = formula
formula = _reconstitute_einsum_formula(in_formulas, out_formula)
args = [base for _ in range(exponent)]
return np.einsum(formula, *args)
else:
return base ** exponent
def _rename_formula_indices(formula):
"""Renames einsum formula indices to be in a canonical order."""
# First, ensure that indices are packed.
translation_dict = {index: _einsum_range[i] for i, index in
enumerate(np.unique([index for index in formula
if index in _einsum_index_set]))}
translator = lambda x: translation_dict[x] if x in translation_dict else x
formula = [translator(i) for i in formula]
# Next, ensure that they're alphabetical in order of appearance.
translation_dict = {}
for index in formula:
if index not in translation_dict and index in _einsum_index_set:
translation_dict[index] = _einsum_range[len(translation_dict)]
return ''.join([translator(i) for i in formula])
def debroadcast_formula(formula, *arg_ndims):
"""Given an einsum's formula string and the dimensions of the arguments
provided to the einsum, converts any broadcasting ellipses into appropriate
letters.
"""
formula = _rename_formula_indices(formula)
num_chars = len(_einsum_index_set.intersection(set(formula)))
remaining_letters = _einsum_range[num_chars:]
in_formulas, out_formula = split_einsum_formula(formula)
max_ellipsis_dims = -float('inf')
for i, in_formula in enumerate(in_formulas):
in_formula = decompose_formula(in_formula)
if '...' in in_formula:
num_ellipsis_dims = arg_ndims[i]-len(in_formula)+1
max_ellipsis_dims = max(max_ellipsis_dims, num_ellipsis_dims)
ellipsis_idx = in_formula.index('...')
in_formula[ellipsis_idx] = remaining_letters[:num_ellipsis_dims][::-1]
in_formulas[i] = ''.join(in_formula)
if '...' in out_formula:
out_formula = out_formula.replace(
'...', remaining_letters[:max_ellipsis_dims][::-1])
new_formula = _reconstitute_einsum_formula(in_formulas, out_formula)
return _rename_formula_indices(new_formula)
def _zeros_like_einsum(formula, args1, args2):
args = args1 + args2
input_formulas, output_formula = split_einsum_formula(formula)
output_formula = decompose_formula(output_formula)
input_formulas = input_formulas[:len(args1)] + input_formulas[len(args1)+1:]
input_formulas = [decompose_formula(input_formula) for
input_formula in input_formulas]
out_shape = []
for output_index in output_formula:
for i, input_formula in enumerate(input_formulas):
position = input_formula.index(output_index)
if position != -1 and output_index != '...':
out_shape.append(args[i].shape[position])
break
elif position != -1 and output_index == '...':
for offset in range(args[i].ndim-len(input_formula)+1):
out_shape.append(args[i].shape[position+offset])
return np.zeros(out_shape, dtype=np.result_type(*args))
def maybe_einsum(formula, *args):
formula = debroadcast_formula(formula, *[np.ndim(arg) for arg in args])
if any(_is_constant_zero(arg) for arg in args):
return _zeros_like_einsum(formula, args, ())
if len(args) == 1:
input_formulas, output_formula = split_einsum_formula(formula)
if input_formulas[0] == output_formula:
return args[0]
return constant_folding_einsum(formula, *args)
def maybe_vspace_add(vs, x_prev, x_new):
if x_prev is None:
return x_new
if isinstance(vs, numpy_vspaces.ArrayVSpace):
return maybe_add(x_prev, x_new)
return vs.add(x_prev, x_new)
def swapaxes(x, axis1, axis2):
"""Implements np.swapaxes as an np.einsum."""
in_formula = _einsum_range[:len(x.shape)]
out_formula = list(in_formula)
out_formula[axis1] = in_formula[axis2]
out_formula[axis2] = in_formula[axis1]
return np.einsum('{}->{}'.format(in_formula, ''.join(out_formula)), x)
### rewrite rules replace whole subgraphs with other subgraphs
class Rule(collections.namedtuple('BasicRule',
['pattern', 'rewriter', 'preds'])):
def __new__(cls, pattern, rewriter, preds=()):
return super(Rule, cls).__new__(cls, pattern, rewriter, preds)
_add_pattern = Choice((Add, Val('x'), (Add, Val('y'), Val('z'))),
(Add, (Add, Val('x'), Val('y')), Val('z')))
replace_add = Rule(_add_pattern, lambda x, y, z: add_n(x, y, z))
_add_addn_pattern = Choice((Add, Val('x'), (AddN, Segment('args'))),
(Add, (AddN, Segment('args')), Val('x')))
replace_add_addn = Rule(_add_addn_pattern,
lambda x, args: add_n(x, *args))
_addn_addn_pattern = (AddN,
Segment('args1'),
(AddN, Segment('parent_args')),
Segment('args2'))
replace_addn_addn = Rule(
_addn_addn_pattern,
lambda args1, parent_args, args2: add_n(*(parent_args + args1 + args2)))
def _duplicated_addn(x, args1, args2, args3):
return add_n(2 * x, *(args1 + args2 + args3))
_duplicated_addn_pattern = (AddN,
Segment('args1'),
Val('x'),
Segment('args2'),
Val('x'),
Segment('args3'))
replace_duplicated_addn = Rule(_duplicated_addn_pattern, _duplicated_addn)
# TODO(mattjj): figure out why we want sums as einsums, since not multiplies
_sum_pat = Choice((Sum, Node('x'), Choice(Val('axis'), Tuple('axis'), None)),
(Sum, Node('x')))
def _sum_as_einsum(x, axis=None):
if axis is None:
return np.einsum('{}->'.format(_einsum_range[:x.ndim]), x)
axis = axis if isinstance(axis, (tuple, list)) else [axis]
input_formula = _einsum_range[:x.ndim]
axis = [i % x.ndim for i in axis]
output_formula = ''.join([input_formula[i] for i in range(x.ndim)
if i not in axis])
return np.einsum('{}->{}'.format(input_formula, output_formula), x)
replace_sum = Rule(_sum_pat, _sum_as_einsum)
## move log behind an einsum if the other argument is a onehot
_log_oneh_einsum_pat = (Log,
(Einsum, Str('formula'),
(OneHot, Node('x'), Scalar('depth')),
Val('y')))
def _log_behind_onehot_einsum_pred(formula, x, depth, y):
"""Confirms sum is only over index added by one_hot."""
# TODO(matthewjmackay): broadcasting support might be needed here
if '...' in formula:
return False
in_formulas, out_formula = split_einsum_formula(formula)
oneh_index = in_formulas[0][-1]
other_indices = set([ch for in_formula in in_formulas
for ch in in_formula])
other_indices.remove(oneh_index)
out_indices = set(out_formula)
return other_indices == out_indices
def _log_behind_onehot_einsum(formula, x, depth, y):
return np.einsum(formula, tracers.one_hot(x, depth), np.log(y))
log_behind_onehot_einsum = Rule(_log_oneh_einsum_pat, _log_behind_onehot_einsum,
(_log_behind_onehot_einsum_pred,))
## move log-add behind an einsum if the other argument is a onehot
_log_addn_oneh_einsum_pat = (Log,
(AddN, Val('x'),
(Einsum, Str('formula'), Scalar('scale'),
(OneHot, Node('y'), Scalar('depth')),
Val('z'))))
def _log_addn_behind_onehot_einsum_pred(x, formula, scale, y, depth, z):
"""Confirms sum is only over index added by one_hot"""
# TODO(matthewjmackay): broadcasting support might be needed here
if '...' in formula:
return False
in_formulas, out_formula = split_einsum_formula(formula)
oneh_index = in_formulas[1][-1]
other_indices = set([ch for in_formula in in_formulas
for ch in in_formula])
other_indices.remove(oneh_index)
out_indices = set(out_formula)
return other_indices == out_indices
def _log_addn_behind_onehot_einsum(x, formula, scale, y, depth, z):
in_formulas, out_formula = split_einsum_formula(formula)
in_formulas = in_formulas[1:]
formula = _reconstitute_einsum_formula(in_formulas, out_formula)
return np.einsum(formula,
tracers.one_hot(y, depth),
np.log(add_n(x, scale*z)))
log_addn_behind_onehot_einsum = Rule(_log_addn_oneh_einsum_pat,
_log_addn_behind_onehot_einsum,
(_log_addn_behind_onehot_einsum_pred,))
## canonicalizing einsums
_einsum_distribute_pat = \
(Einsum, Str('formula'),
Segment('args1'),
(AddN('op'), Segment('add_args')),
Segment('args2'))
def _distribute_einsum(formula, op, add_args, args1, args2):
# Make sure any implicit broadcasting isn't lost.
broadcast_shape = np.broadcast(*add_args).shape
dtype = np.result_type(*add_args)
add_args = [arg * np.ones(broadcast_shape, dtype=dtype)
if not hasattr(arg, 'shape') or broadcast_shape != arg.shape
else arg
for arg in add_args]
return op(*[np.einsum(formula, *(args1 + (arg,) + args2))
for arg in add_args])
distribute_einsum = Rule(_einsum_distribute_pat, _distribute_einsum)
_einsum_transpose_pat = \
(Einsum, Str('formula'),
Segment('args1'),
(Transpose, Val('x')),
Segment('args2'))
def _transpose_inside_einsum(formula, args1, x, args2):
in_formulas, out_formula = split_einsum_formula(formula)
i = len(args1)
new_formula = _reconstitute_einsum_formula(
in_formulas[:i] + [in_formulas[i][::-1]] + in_formulas[i+1:],
out_formula)
new_args = args1 + (x,) + args2
return np.einsum(new_formula, *new_args)
transpose_inside_einsum = Rule(_einsum_transpose_pat, _transpose_inside_einsum)
def _remove_list_elements(list_to_thin, indices_to_remove):
return [item for i, item in enumerate(list_to_thin)
if i not in indices_to_remove]
def _remove_einsum_arg(formula, args1, args2):
in_formulas, out_formula = split_einsum_formula(formula)
new_formula = _reconstitute_einsum_formula(
_remove_list_elements(in_formulas, [len(args1)]), out_formula)
return np.einsum(new_formula, *(args1 + args2))
# Matches things like add_n(x*a, x*b) that can be rewritten as x * add_n(a, b).
_gatherable_add_n_einsum_pat = (
AddN, Star((Einsum, Str('formula'),
Segment('args1'), Scalar('x'), Segment('args2')),
accumulate=['formula', 'args1', 'args2']))
def _add_n_remaining_einsums(formula, args1, args2):
return add_n(*[_remove_einsum_arg(formula_i, args1_i, args2_i)
for formula_i, args1_i, args2_i in zip(formula, args1, args2)])
def _gather_log_add_n_einsum(x, formula, args1, args2):
return add_n(np.log(x), np.log(_add_n_remaining_einsums(formula, args1, args2)))
gather_log_add_einsum = Rule((Log, _gatherable_add_n_einsum_pat),
_gather_log_add_n_einsum)
def _gather_pow_add_n_einsum(x, formula, args1, args2, exponent):
return (np.power(x, exponent) *
np.power(_add_n_remaining_einsums(formula, args1, args2), exponent))
gather_pow_add_einsum = Rule(
(Power, _gatherable_add_n_einsum_pat, Scalar('exponent')),
_gather_pow_add_n_einsum)
def _gather_inv_add_einsum(x, formula, args1, args2):
return np.power(x, -1) * np.linalg.inv(_add_n_remaining_einsums(formula, args1, args2))
gather_inv_add_einsum = Rule((Inv, _gatherable_add_n_einsum_pat),
_gather_inv_add_einsum)
def _gather_logdet_add_einsum(x, formula, args1, args2):
new_sum = _add_n_remaining_einsums(formula, args1, args2)
return new_sum.shape[-1] * np.log(x) + logdet(new_sum)
gather_logdet_add_einsum = Rule((Logdet, _gatherable_add_n_einsum_pat),
_gather_logdet_add_einsum)
def _add_powers_within_einsum(formula, x, args1, args2, args3, exponent1,
exponent2):
in_formulas, out_formula = split_einsum_formula(formula)
new_formula = _reconstitute_einsum_formula(
_remove_list_elements(in_formulas, [len(args1) + 1 + len(args2)]),
out_formula)
return np.einsum(new_formula, *(args1 + (x ** (exponent1 + exponent2),)
+ args2 + args3))
def _add_powers_within_einsum_pred(formula, x, args1, args2, args3, exponent1=1,
exponent2=1):
in_formulas, out_formula = split_einsum_formula(formula)
x_indices = [len(args1), len(args1) + 1 + len(args2)]
if in_formulas[x_indices[0]] != in_formulas[x_indices[1]]:
return False
x_index_names = frozenset(in_formulas[x_indices[0]] +
in_formulas[x_indices[1]])
if any([not frozenset(in_formula).isdisjoint(x_index_names)
for i, in_formula in enumerate(in_formulas) if i not in x_indices]):
return False
return True
add_powers_within_einsum = Rule((Einsum, Str('formula'), Segment('args1'),
(Power, Val('x'), Scalar('exponent1')),
Segment('args2'),
(Power, Val('x'), Scalar('exponent2')),
Segment('args3')),
_add_powers_within_einsum,
(_add_powers_within_einsum_pred,))
def _increment_negative_power_in_einsum_r(formula, x, exponent,
args1, args2, args3):
in_formulas, out_formula = split_einsum_formula(formula)
new_formula = _reconstitute_einsum_formula(
in_formulas[:len(args1) + 1 + len(args2)] +
in_formulas[len(args1) + 2 + len(args2):], out_formula)
return np.einsum(new_formula,
*(args1 + (x ** (exponent + 1),) + args2 + args3))
# TODO(mhoffman): Add predicates that make sure formulas match.
increment_negative_power_in_einsum_r = Rule(
(Einsum, Str('formula'), Segment('args1'),
(Power, Node('x'), Scalar('exponent', lambda exponent: exponent < 0)),
Segment('args2'), Node('x'), Segment('args3')),
_increment_negative_power_in_einsum_r)
# TODO(mhoffman): Figure out cleaner way of dealing with commuting args.
def _increment_negative_power_in_einsum_l(formula, x, exponent,
args1, args2, args3):
in_formulas, out_formula = split_einsum_formula(formula)
new_formula = _reconstitute_einsum_formula(
in_formulas[:len(args1)] + in_formulas[len(args1) + 1:], out_formula)
return np.einsum(new_formula,
*(args1 + args2 + (x ** (exponent + 1),) + args3))
# TODO(mhoffman): Add predicates that make sure formulas match.
increment_negative_power_in_einsum_l = Rule(
(Einsum, Str('formula'), Segment('args1'),
Node('x'), Segment('args2'),
(Power, Node('x'), Scalar('exponent', lambda exponent: exponent < 0)),
Segment('args3')),
_increment_negative_power_in_einsum_l)
_einsum_composition_pat = \
(Einsum, Str('formula'),
Segment('args1'),
(Einsum, Str('parent_formula'), Segment('parent_args')),
Segment('args2'))
def decompose_formula(formula):
"""Given a string of indices for an argument to an einsum, returns a list
of the letters used, with '...' treated as an atomic letter.
"""
formula = formula.replace('...', '.')
decomposed = []
for idx in formula:
if idx == '.':
decomposed.append('...')
else:
decomposed.append(idx)
return decomposed
def _compose_einsums(formula, args1, args2, parent_formula, parent_args):
parent_formula = debroadcast_formula(parent_formula,
*[np.ndim(arg) for arg in parent_args])
parent_in_formulas, parent_out_formula = split_einsum_formula(parent_formula)
parent_ndim = len(parent_out_formula)
arg_ndims = ([np.ndim(arg) for arg in args1] +
[parent_ndim] +
[np.ndim(arg) for arg in args2])
formula = debroadcast_formula(formula, *arg_ndims)
in_formulas, out_formula = split_einsum_formula(formula)
i = len(args1)
if len(parent_out_formula) != len(in_formulas[i]):
raise ValueError('Input formula {} and parent formula {} have'
' inconsistent numbers of indexes, broadcasting'
'problem?'.format(in_formulas[i], parent_out_formula))
subs_map = collections.defaultdict(iter(_einsum_range).next)
# splice out the old input formula
old_in_formula = in_formulas[i]
in_formulas = in_formulas[:i] + in_formulas[i+1:]
# canonicalize input and output formulas (optional, for cleanliness)
in_formulas = [''.join(subs_map[idx] for idx in subs) for subs in in_formulas]
out_formula = ''.join(subs_map[idx] for idx in out_formula)
# identify parent output indices with corresponding input indices
subs_map.update((pidx + '_parent', subs_map[idx])
for pidx, idx in zip(parent_out_formula, old_in_formula))
# update the parent input formulas
parent_in_formulas = [''.join(subs_map[idx + '_parent'] for idx in subs)
for subs in parent_in_formulas]
# splice the formula lists and arguments
new_in_formulas = in_formulas[:i] + parent_in_formulas + in_formulas[i:]
new_args = args1 + parent_args + args2
new_formula = _reconstitute_einsum_formula(new_in_formulas, out_formula)
return np.einsum(new_formula, *new_args)
combine_einsum_compositions = Rule(_einsum_composition_pat, _compose_einsums)
def _einsum_repeated_one_hot(formula, x, depth, args1, args2, args3):
in_formulas, out_formula = split_einsum_formula(formula)
new_letter = in_formulas[len(args1)][-1]
old_letter = in_formulas[len(args1) + 1 + len(args2)][-1]
if old_letter in out_formula:
old_letter, new_letter = new_letter, old_letter
in_formulas = in_formulas[:len(args1)] + in_formulas[len(args1) + 1:]
else:
in_formulas = (in_formulas[:len(args1) + 1 + len(args2)] +
in_formulas[len(args1) + 1 + len(args2) + 1:])
for i in range(len(in_formulas)):
in_formulas[i] = in_formulas[i].replace(old_letter, new_letter)
one_hot_x = tracers.one_hot(x, depth)
return np.einsum(_reconstitute_einsum_formula(in_formulas, out_formula),
*(args1 + (one_hot_x,) + args2 + args3))
def _einsum_repeated_one_hot_pred(formula, x, depth, args1, args2, args3):
in_formulas, out_formula = split_einsum_formula(formula)
x_letter_1 = in_formulas[len(args1)][-1]
x_letter_2 = in_formulas[len(args1) + 1 + len(args2)][-1]
return (x_letter_1 != x_letter_2 and
not (x_letter_1 in out_formula and x_letter_2 in out_formula))
einsum_repeated_one_hot = Rule((Einsum, Str('formula'), Segment('args1'),
(OneHot, Val('x'), Scalar('depth')),
Segment('args2'),
(OneHot, Val('x'), Scalar('depth')),
Segment('args3')),
_einsum_repeated_one_hot,
(_einsum_repeated_one_hot_pred,))
def _reconstitute_einsum_formula(input_formulas, output_formula):
return '{}->{}'.format(','.join(input_formulas), output_formula)
## Miscellaneous expansions
def _log_einsum_expand(formula, args):
assert _check_log_einsum(formula)
result = np.log(args[0])
for arg in args[1:]:
result += np.log(arg)
return result
def _check_log_einsum(formula):
input_formulas, output_formula = split_einsum_formula(formula)
unique_input_indexes = set(list(''.join(input_formulas)))
return unique_input_indexes == set(list(output_formula))
replace_log_einsum = Rule((Log, (Einsum, Str('formula', _check_log_einsum),
Segment('args'))),
_log_einsum_expand)
## replacing autograd internal ops
replace_vspace_add = Rule((VSpaceAdd, Any('vs'), Val('x_prev'), Val('x_new')),
lambda vs, x_prev, x_new: x_prev + x_new)
## Miscellaneous simplifications
def constant_folding_einsum(formula, *args):
in_formulas, out_formula = split_einsum_formula(formula)
const_indices = []
node_indices = []
const_letters = set()
node_letters = set()
for i, (in_formula, arg) in enumerate(zip(in_formulas, args)):
if is_constant(arg):
const_indices.append(i)
const_letters.update(in_formula)
else:
node_indices.append(i)
node_letters.update(in_formula)
const_args = []
const_in_formulas = []
indices_to_remove = []
for i in const_indices:
if not node_letters.intersection(in_formulas[i]):
const_args.append(args[i])
const_in_formulas.append(in_formulas[i])
indices_to_remove.append(i)
elif node_letters.issuperset(in_formulas[i]) and np.all(args[i] == 1):
indices_to_remove.append(i)
if not indices_to_remove:
return np.einsum(formula, *args)
folded_constant = 1
if const_args:
const_letters = frozenset(''.join(const_in_formulas))
const_out_formula = ''.join([i for i in out_formula if i in const_letters])
folded_constant = np.einsum('{}->{}'.format(','.join(const_in_formulas),
const_out_formula), *const_args)
if len(indices_to_remove) == len(in_formulas):
return folded_constant
retained_in_formulas = ','.join([in_formulas[i]
for i in range(len(in_formulas))
if i not in indices_to_remove])
retained_args = [arg for i, arg in enumerate(args)
if i not in indices_to_remove]
if np.isscalar(folded_constant) and folded_constant == 0:
return 0.
elif np.isscalar(folded_constant) and folded_constant == 1:
return np.einsum('{}->{}'.format(retained_in_formulas, out_formula),
*retained_args)
else:
return np.einsum('{},{}->{}'.format(const_out_formula,
retained_in_formulas, out_formula),
*([folded_constant] + retained_args))
# TODO(mhoffman): This isn't 100% kosher for negative inputs.
# e.g., (-1 ** 2) ** 1.5 == 1, -1 ** 3 == -1.
fold_power = Rule(
(Power, (Power, Val('base'), Scalar('power1')), Scalar('power2')),
lambda base, power1, power2: maybe_power(base, power1 * power2))
### rewriter functions
def make_rewriter(rule):
"""Given a rewrite Rule, produces an attempt_rewrite function."""
pattern, rewriter, preds = rule
match = matchers.matcher(pattern)
def attempt_rewrite(node):
"""Given a node, attempt to pattern-match it and apply an in-place rewrite.
Args:
node: an ExprNode against which to match the Rule's pattern and, given a
match, apply an in-place rewrite.
Returns:
If the rewrite could not be applied, returns a falsey value. If the
rewrite was successful, return the node (which gets in-place modified).
Side-effects:
If a rewrite was successful then the returned node is modified in-place,
and in particular its parents are changed.
"""
bindings = match(node)
if bindings is not False:
rewriter_env = dict(node.kwargs, **bindings)
if all(pred(**rewriter_env) for pred in preds):
new_expr = run_rewriter(rewriter, rewriter_env)
tracers.replace_node_with_expr(node, new_expr) # modifies node in-place
return node
return False
return attempt_rewrite
def run_rewriter(rewriter, symbolic_env):
"""Runs rewriter on a symbolic environment and returns resulting expression.
Args:
rewriter: a rewriter function to be traced into a new expression.
symbolic_env: a dict of bindings that contains the rewriters' arguments as
keys and can have literals or ExprNodes as values.
Returns:
A new expression built on top of the ExprNodes in env.
"""
# include default argument values in the environment
sig = funcsigs.signature(rewriter)
defaults = {name: param.default for name, param in sig.parameters.items()
if param.default is not param.empty}
symbolic_env = dict(defaults, **symbolic_env)
# trace the rewriter function on dummy values to produce a new subexpression
args = [symbolic_env[name] for name in sig.parameters.keys()]
flat_args, unflatten = _flatten(args)
symbolic_args = ((i, arg) for i, arg in enumerate(flat_args)
if isinstance(arg, tracers.ExprNode))
argnums, argnodes = zip(*symbolic_args)
def _rewriter(*node_vals):
return rewriter(*unflatten(subvals(flat_args, zip(argnums, node_vals))))
node_vals = [tracers.make_dummy(argnode) for argnode in argnodes]
subexpr = tracers.make_expr(_rewriter, *node_vals)
# return the new subexpression evaluated in the symbolic environment
return tracers.inline_expr(subexpr, dict(zip(subexpr.free_vars, argnodes)))
def _flatten(obj):
"""Flatten a potentially-nested list/tuple data structure into a flat list."""
if not isinstance(obj, (list, tuple)):
return [obj], lambda lst: lst[0]
constructor = type(obj)
if not obj: return [], lambda lst: constructor()
sublists, unflattens = zip(*map(_flatten, obj))
lengths = list(map(len, sublists))
starts = np.subtract(np.cumsum(lengths), lengths)
flat_list = [elt for sublist in sublists for elt in sublist]
def unflatten(lst):
sublists = (lst[start:start+l] for start, l in zip(starts, lengths))
return constructor(unflatten(sublist)
for sublist, unflatten in zip(sublists, unflattens))
return flat_list, unflatten
| [
"autograd.numpy.isscalar",
"autograd.numpy.ones",
"autograd.numpy.log",
"collections.namedtuple",
"autograd.tracer.isbox",
"autograd.numpy.all",
"autograd.numpy.result_type",
"autograd.numpy.unique",
"autograd.numpy.array",
"autograd.numpy.shape",
"autograd.numpy.broadcast",
"functools.partial... | [((1775, 1819), 'functools.partial', 'functools.partial', (['_is_constant_val'], {'val': '(0.0)'}), '(_is_constant_val, val=0.0)\n', (1792, 1819), False, 'import functools\n'), ((1838, 1882), 'functools.partial', 'functools.partial', (['_is_constant_val'], {'val': '(1.0)'}), '(_is_constant_val, val=1.0)\n', (1855, 1882), False, 'import functools\n'), ((8522, 8591), 'collections.namedtuple', 'collections.namedtuple', (['"""BasicRule"""', "['pattern', 'rewriter', 'preds']"], {}), "('BasicRule', ['pattern', 'rewriter', 'preds'])\n", (8544, 8591), False, 'import collections\n'), ((2569, 2597), 'autograd.numpy.einsum', 'np.einsum', (['new_formula', 'x', 'y'], {}), '(new_formula, x, y)\n', (2578, 2597), True, 'import autograd.numpy as np\n'), ((13388, 13413), 'autograd.numpy.result_type', 'np.result_type', (['*add_args'], {}), '(*add_args)\n', (13402, 13413), True, 'import autograd.numpy as np\n'), ((14213, 14246), 'autograd.numpy.einsum', 'np.einsum', (['new_formula', '*new_args'], {}), '(new_formula, *new_args)\n', (14222, 14246), True, 'import autograd.numpy as np\n'), ((14718, 14758), 'autograd.numpy.einsum', 'np.einsum', (['new_formula', '*(args1 + args2)'], {}), '(new_formula, *(args1 + args2))\n', (14727, 14758), True, 'import autograd.numpy as np\n'), ((16709, 16795), 'autograd.numpy.einsum', 'np.einsum', (['new_formula', '*(args1 + (x ** (exponent1 + exponent2),) + args2 + args3)'], {}), '(new_formula, *(args1 + (x ** (exponent1 + exponent2),) + args2 +\n args3))\n', (16718, 16795), True, 'import autograd.numpy as np\n'), ((18242, 18315), 'autograd.numpy.einsum', 'np.einsum', (['new_formula', '*(args1 + (x ** (exponent + 1),) + args2 + args3)'], {}), '(new_formula, *(args1 + (x ** (exponent + 1),) + args2 + args3))\n', (18251, 18315), True, 'import autograd.numpy as np\n'), ((19058, 19131), 'autograd.numpy.einsum', 'np.einsum', (['new_formula', '*(args1 + args2 + (x ** (exponent + 1),) + args3)'], {}), '(new_formula, *(args1 + args2 + (x ** (exponent + 1),) + args3))\n', (19067, 19131), True, 'import autograd.numpy as np\n'), ((21888, 21921), 'autograd.numpy.einsum', 'np.einsum', (['new_formula', '*new_args'], {}), '(new_formula, *new_args)\n', (21897, 21921), True, 'import autograd.numpy as np\n'), ((23851, 23866), 'autograd.numpy.log', 'np.log', (['args[0]'], {}), '(args[0])\n', (23857, 23866), True, 'import autograd.numpy as np\n'), ((28442, 28470), 'funcsigs.signature', 'funcsigs.signature', (['rewriter'], {}), '(rewriter)\n', (28460, 28470), False, 'import funcsigs\n'), ((1657, 1675), 'autograd.tracer.isbox', 'ag_tracer.isbox', (['x'], {}), '(x)\n', (1672, 1675), True, 'import autograd.tracer as ag_tracer\n'), ((1736, 1752), 'autograd.numpy.all', 'np.all', (['(x == val)'], {}), '(x == val)\n', (1742, 1752), True, 'import autograd.numpy as np\n'), ((1932, 1943), 'autograd.numpy.array', 'np.array', (['x'], {}), '(x)\n', (1940, 1943), True, 'import autograd.numpy as np\n'), ((1945, 1956), 'autograd.numpy.array', 'np.array', (['y'], {}), '(y)\n', (1953, 1956), True, 'import autograd.numpy as np\n'), ((1971, 1997), 'autograd.numpy.broadcast', 'np.broadcast', (['x_arr', 'y_arr'], {}), '(x_arr, y_arr)\n', (1983, 1997), True, 'import autograd.numpy as np\n'), ((3497, 3519), 'autograd.numpy.einsum', 'np.einsum', (['""",->"""', 'x', 'y'], {}), "(',->', x, y)\n", (3506, 3519), True, 'import autograd.numpy as np\n'), ((3555, 3579), 'autograd.numpy.einsum', 'np.einsum', (['"""i,i->"""', 'x', 'y'], {}), "('i,i->', x, y)\n", (3564, 3579), True, 'import autograd.numpy as np\n'), ((3621, 3647), 'autograd.numpy.einsum', 'np.einsum', (['"""ij,j->i"""', 'x', 'y'], {}), "('ij,j->i', x, y)\n", (3630, 3647), True, 'import autograd.numpy as np\n'), ((3689, 3715), 'autograd.numpy.einsum', 'np.einsum', (['"""i,ij->j"""', 'x', 'y'], {}), "('i,ij->j', x, y)\n", (3698, 3715), True, 'import autograd.numpy as np\n'), ((11450, 11459), 'autograd.numpy.log', 'np.log', (['y'], {}), '(y)\n', (11456, 11459), True, 'import autograd.numpy as np\n'), ((13348, 13371), 'autograd.numpy.broadcast', 'np.broadcast', (['*add_args'], {}), '(*add_args)\n', (13360, 13371), True, 'import autograd.numpy as np\n'), ((15312, 15321), 'autograd.numpy.log', 'np.log', (['x'], {}), '(x)\n', (15318, 15321), True, 'import autograd.numpy as np\n'), ((15581, 15602), 'autograd.numpy.power', 'np.power', (['x', 'exponent'], {}), '(x, exponent)\n', (15589, 15602), True, 'import autograd.numpy as np\n'), ((15874, 15889), 'autograd.numpy.power', 'np.power', (['x', '(-1)'], {}), '(x, -1)\n', (15882, 15889), True, 'import autograd.numpy as np\n'), ((23904, 23915), 'autograd.numpy.log', 'np.log', (['arg'], {}), '(arg)\n', (23910, 23915), True, 'import autograd.numpy as np\n'), ((25387, 25412), 'autograd.numpy.einsum', 'np.einsum', (['formula', '*args'], {}), '(formula, *args)\n', (25396, 25412), True, 'import autograd.numpy as np\n'), ((26123, 26151), 'autograd.numpy.isscalar', 'np.isscalar', (['folded_constant'], {}), '(folded_constant)\n', (26134, 26151), True, 'import autograd.numpy as np\n'), ((29740, 29758), 'autograd.numpy.cumsum', 'np.cumsum', (['lengths'], {}), '(lengths)\n', (29749, 29758), True, 'import autograd.numpy as np\n'), ((2780, 2791), 'autograd.numpy.shape', 'np.shape', (['y'], {}), '(y)\n', (2788, 2791), True, 'import autograd.numpy as np\n'), ((2863, 2874), 'autograd.numpy.shape', 'np.shape', (['x'], {}), '(x)\n', (2871, 2874), True, 'import autograd.numpy as np\n'), ((3005, 3016), 'autograd.numpy.shape', 'np.shape', (['y'], {}), '(y)\n', (3013, 3016), True, 'import autograd.numpy as np\n'), ((3089, 3100), 'autograd.numpy.shape', 'np.shape', (['x'], {}), '(x)\n', (3097, 3100), True, 'import autograd.numpy as np\n'), ((3222, 3233), 'autograd.numpy.shape', 'np.shape', (['x'], {}), '(x)\n', (3230, 3233), True, 'import autograd.numpy as np\n'), ((3946, 3957), 'autograd.numpy.shape', 'np.shape', (['x'], {}), '(x)\n', (3954, 3957), True, 'import autograd.numpy as np\n'), ((7500, 7521), 'autograd.numpy.result_type', 'np.result_type', (['*args'], {}), '(*args)\n', (7514, 7521), True, 'import autograd.numpy as np\n'), ((16224, 16233), 'autograd.numpy.log', 'np.log', (['x'], {}), '(x)\n', (16230, 16233), True, 'import autograd.numpy as np\n'), ((20446, 20458), 'autograd.numpy.ndim', 'np.ndim', (['arg'], {}), '(arg)\n', (20453, 20458), True, 'import autograd.numpy as np\n'), ((26199, 26227), 'autograd.numpy.isscalar', 'np.isscalar', (['folded_constant'], {}), '(folded_constant)\n', (26210, 26227), True, 'import autograd.numpy as np\n'), ((2697, 2715), 'autograd.numpy.broadcast', 'np.broadcast', (['x', 'y'], {}), '(x, y)\n', (2709, 2715), True, 'import autograd.numpy as np\n'), ((2729, 2749), 'autograd.numpy.result_type', 'np.result_type', (['x', 'y'], {}), '(x, y)\n', (2743, 2749), True, 'import autograd.numpy as np\n'), ((2795, 2813), 'autograd.numpy.broadcast', 'np.broadcast', (['x', 'y'], {}), '(x, y)\n', (2807, 2813), True, 'import autograd.numpy as np\n'), ((2878, 2896), 'autograd.numpy.broadcast', 'np.broadcast', (['x', 'y'], {}), '(x, y)\n', (2890, 2896), True, 'import autograd.numpy as np\n'), ((3020, 3038), 'autograd.numpy.broadcast', 'np.broadcast', (['x', 'y'], {}), '(x, y)\n', (3032, 3038), True, 'import autograd.numpy as np\n'), ((3104, 3122), 'autograd.numpy.broadcast', 'np.broadcast', (['x', 'y'], {}), '(x, y)\n', (3116, 3122), True, 'import autograd.numpy as np\n'), ((3237, 3255), 'autograd.numpy.broadcast', 'np.broadcast', (['x', 'y'], {}), '(x, y)\n', (3249, 3255), True, 'import autograd.numpy as np\n'), ((3961, 3979), 'autograd.numpy.broadcast', 'np.broadcast', (['x', 'y'], {}), '(x, y)\n', (3973, 3979), True, 'import autograd.numpy as np\n'), ((4031, 4042), 'autograd.numpy.shape', 'np.shape', (['y'], {}), '(y)\n', (4039, 4042), True, 'import autograd.numpy as np\n'), ((4661, 4686), 'autograd.numpy.einsum', 'np.einsum', (['formula', '*args'], {}), '(formula, *args)\n', (4670, 4686), True, 'import autograd.numpy as np\n'), ((4967, 5036), 'autograd.numpy.unique', 'np.unique', (['[index for index in formula if index in _einsum_index_set]'], {}), '([index for index in formula if index in _einsum_index_set])\n', (4976, 5036), True, 'import autograd.numpy as np\n'), ((7602, 7614), 'autograd.numpy.ndim', 'np.ndim', (['arg'], {}), '(arg)\n', (7609, 7614), True, 'import autograd.numpy as np\n'), ((13434, 13471), 'autograd.numpy.ones', 'np.ones', (['broadcast_shape'], {'dtype': 'dtype'}), '(broadcast_shape, dtype=dtype)\n', (13441, 13471), True, 'import autograd.numpy as np\n'), ((13619, 13664), 'autograd.numpy.einsum', 'np.einsum', (['formula', '*(args1 + (arg,) + args2)'], {}), '(formula, *(args1 + (arg,) + args2))\n', (13628, 13664), True, 'import autograd.numpy as np\n'), ((20193, 20205), 'autograd.numpy.ndim', 'np.ndim', (['arg'], {}), '(arg)\n', (20200, 20205), True, 'import autograd.numpy as np\n'), ((20368, 20380), 'autograd.numpy.ndim', 'np.ndim', (['arg'], {}), '(arg)\n', (20375, 20380), True, 'import autograd.numpy as np\n'), ((25292, 25312), 'autograd.numpy.all', 'np.all', (['(args[i] == 1)'], {}), '(args[i] == 1)\n', (25298, 25312), True, 'import autograd.numpy as np\n'), ((4046, 4064), 'autograd.numpy.broadcast', 'np.broadcast', (['x', 'y'], {}), '(x, y)\n', (4058, 4064), True, 'import autograd.numpy as np\n')] |
#coding=utf-8
from core.interface.action import server_action
from core.helper.creator import create_action
from core.helper.globalvar import global_const
import os
class smng:
def __init__(self):
global_const().set_value('BASEDIR', os.path.dirname(__file__))
def run(self):
try:
action = create_action()
except:
action = create_action('help')
action.parse_parameters()
action.run()
| [
"core.helper.globalvar.global_const",
"os.path.dirname",
"core.helper.creator.create_action"
] | [((246, 271), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (261, 271), False, 'import os\n'), ((327, 342), 'core.helper.creator.create_action', 'create_action', ([], {}), '()\n', (340, 342), False, 'from core.helper.creator import create_action\n'), ((210, 224), 'core.helper.globalvar.global_const', 'global_const', ([], {}), '()\n', (222, 224), False, 'from core.helper.globalvar import global_const\n'), ((380, 401), 'core.helper.creator.create_action', 'create_action', (['"""help"""'], {}), "('help')\n", (393, 401), False, 'from core.helper.creator import create_action\n')] |
#!/usr/bin/env python3
"""
Copyright 2018 <NAME> (<EMAIL>)
https://github.com/rrwick/Bacsort
This script uses FastANI output to generate a PHYLIP distance matrix suitable for quicktree.
This file is part of Bacsort. Bacsort is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version. Bacsort is distributed in
the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along with Bacsort. If
not, see <http://www.gnu.org/licenses/>.
"""
import argparse
import sys
def get_arguments():
parser = argparse.ArgumentParser(description='Distance matrix from pairwise identities')
parser.add_argument('identities', type=str,
help='FastANI output file (or similarly formatted file with three '
'whitespace-delimited columns of assembly 1, assembly 2, percent '
'identity')
parser.add_argument('--max_dist', type=float, required=False, default=1.0,
help='Maximum allowed genomic distance')
args = parser.parse_args()
return args
def main():
args = get_arguments()
clusters = set()
distances = {}
print('', file=sys.stderr)
print('Convert FastANI distances to PHYLIP matrix', file=sys.stderr)
print('------------------------------------------------', file=sys.stderr)
fastani_output_filename = args.identities
with open(fastani_output_filename, 'rt') as fastani_output:
for line in fastani_output:
parts = line.strip().split()
cluster_1 = parts[0]
cluster_2 = parts[1]
ani = float(parts[2])
if cluster_1 == cluster_2:
distance = 0.0
else:
distance = 1.0 - (ani / 100.0)
clusters.add(cluster_1)
clusters.add(cluster_2)
add_distance(distances, cluster_1, cluster_2, distance)
add_distance(distances, cluster_2, cluster_1, distance)
print('Found {} clusters and {} distances'.format(len(clusters), len(distances)),
file=sys.stderr)
print(len(clusters))
clusters = sorted(clusters)
for i in clusters:
print(i, end='')
for j in clusters:
print('\t', end='')
try:
distance = distances[(i, j)]
except KeyError:
distance = args.max_dist
if distance > args.max_dist:
distance = args.max_dist
print('%.6f' % distance, end='')
print()
print('', file=sys.stderr)
def add_distance(distances, cluster_1, cluster_2, distance):
# If this is the first time we've seen this pair, then we just add it to the dictionary.
if (cluster_1, cluster_2) not in distances:
distances[(cluster_1, cluster_2)] = distance
# If we've seen this pair before (the other way around), then we make sure the distances are
# close (sanity check) and then save the mean distance.
else:
assert abs(distance - distances[(cluster_1, cluster_2)]) < 0.1
distances[(cluster_1, cluster_2)] = (distances[(cluster_1, cluster_2)] + distance) / 2.0
if __name__ == '__main__':
main()
| [
"argparse.ArgumentParser"
] | [((880, 959), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Distance matrix from pairwise identities"""'}), "(description='Distance matrix from pairwise identities')\n", (903, 959), False, 'import argparse\n')] |
from django.http import HttpResponse
from django.shortcuts import render, redirect, get_object_or_404
import json
from django.core.serializers.json import DjangoJSONEncoder
from rbac.models import Menu, Role
from system.models import SystemSetup
from users.models import Structure
from system.forms import *
from django.contrib.auth import logout,login,authenticate
from django.contrib.auth.decorators import login_required
def roleView(request):
ret = Menu.getMenuByRequestUrl(url=request.path_info)
ret.update(SystemSetup.getSystemSetupLastData())
return render(request, 'system/rbac/role-list.html', ret)
def roleListView(request):
fields = ['id', 'title']
ret = dict(data=list(Role.objects.values(*fields).exclude(id=1)))
return HttpResponse(json.dumps(ret), content_type='application/json')
def roleDetailView(request):
if request.method == 'GET':
ret = dict()
if 'id' in request.GET and request.GET['id']:
ret = dict(role=get_object_or_404(Role, pk=request.GET.get('id')))
return render(request, 'system/rbac/role_detail.html', ret)
else:
res = dict(result=False)
if 'id' in request.POST and request.POST['id']:
role = get_object_or_404(Role, pk=request.POST.get('id'))
else:
role = Role()
if request.POST.get('title'):
role.title = request.POST.get('title')
role.save()
res['result'] = True
return HttpResponse(json.dumps(res), content_type='application/json')
def roleDeleteView(request):
ret = dict(result=False)
if 'id' in request.POST and request.POST['id']:
id_list = map(int, request.POST.get('id').split(','))
Role.objects.filter(id__in=id_list).delete()
ret['result'] = True
return HttpResponse(json.dumps(ret), content_type='application/json')
def role2MenuView(request):
if request.method == 'GET':
if 'id' in request.GET and request.GET['id']:
role = get_object_or_404(Role, pk=request.GET.get('id'))
ret = dict(role=role)
return render(request, 'system/rbac/role_menu.html', ret)
else:
res = dict(result=False)
role = get_object_or_404(Role, pk=request.POST.get('id'))
tree = json.loads(request.POST['tree'])
role.permissions.clear()
for menu in tree:
if menu['checked'] is True:
menu_checked = get_object_or_404(Menu, pk=menu['id'])
role.permissions.add(menu_checked)
res['result'] = True
return HttpResponse(json.dumps(res), content_type='application/json')
def role2MenuListView(request):
fields = ['id', 'title', 'parent']
if 'id' in request.GET and request.GET['id']:
role = Role.objects.get(id=request.GET.get('id'))
role_menus = role.permissions.values(*fields)
ret = dict(data=list(role_menus))
else:
menus = Menu.objects.all()
ret = dict(data=list(menus.values(*fields)))
return HttpResponse(json.dumps(ret, cls=DjangoJSONEncoder), content_type='application/json')
def role2UserView(request):
if request.method == 'GET':
if 'id' in request.GET and request.GET['id']:
role = get_object_or_404(Role, pk=int(request.GET.get('id')))
added_users = role.userprofile_set.all()
all_users = User.objects.exclude(username='admin')
un_add_users = set(all_users).difference(added_users)
ret = dict(role=role, added_users=added_users, un_add_users=list(un_add_users))
return render(request, 'system/rbac/role_user.html', ret)
else:
res = dict(result=False)
id_list = None
role = get_object_or_404(Role, pk=int(request.POST.get('id')))
if 'to' in request.POST and request.POST['to']:
id_list = map(int, request.POST.getlist('to', []))
role.userprofile_set.clear()
if id_list:
for user in User.objects.filter(id__in=id_list):
role.userprofile_set.add(user)
res['result'] = True
return HttpResponse(json.dumps(res), content_type='application/json') | [
"django.shortcuts.render",
"rbac.models.Role.objects.values",
"json.loads",
"rbac.models.Role.objects.filter",
"json.dumps",
"rbac.models.Menu.objects.all",
"django.shortcuts.get_object_or_404",
"system.models.SystemSetup.getSystemSetupLastData",
"rbac.models.Menu.getMenuByRequestUrl",
"rbac.model... | [((461, 508), 'rbac.models.Menu.getMenuByRequestUrl', 'Menu.getMenuByRequestUrl', ([], {'url': 'request.path_info'}), '(url=request.path_info)\n', (485, 508), False, 'from rbac.models import Menu, Role\n'), ((573, 623), 'django.shortcuts.render', 'render', (['request', '"""system/rbac/role-list.html"""', 'ret'], {}), "(request, 'system/rbac/role-list.html', ret)\n", (579, 623), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((524, 560), 'system.models.SystemSetup.getSystemSetupLastData', 'SystemSetup.getSystemSetupLastData', ([], {}), '()\n', (558, 560), False, 'from system.models import SystemSetup\n'), ((778, 793), 'json.dumps', 'json.dumps', (['ret'], {}), '(ret)\n', (788, 793), False, 'import json\n'), ((1061, 1113), 'django.shortcuts.render', 'render', (['request', '"""system/rbac/role_detail.html"""', 'ret'], {}), "(request, 'system/rbac/role_detail.html', ret)\n", (1067, 1113), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1828, 1843), 'json.dumps', 'json.dumps', (['ret'], {}), '(ret)\n', (1838, 1843), False, 'import json\n'), ((2292, 2324), 'json.loads', 'json.loads', (["request.POST['tree']"], {}), "(request.POST['tree'])\n", (2302, 2324), False, 'import json\n'), ((2955, 2973), 'rbac.models.Menu.objects.all', 'Menu.objects.all', ([], {}), '()\n', (2971, 2973), False, 'from rbac.models import Menu, Role\n'), ((3051, 3089), 'json.dumps', 'json.dumps', (['ret'], {'cls': 'DjangoJSONEncoder'}), '(ret, cls=DjangoJSONEncoder)\n', (3061, 3089), False, 'import json\n'), ((3604, 3654), 'django.shortcuts.render', 'render', (['request', '"""system/rbac/role_user.html"""', 'ret'], {}), "(request, 'system/rbac/role_user.html', ret)\n", (3610, 3654), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1317, 1323), 'rbac.models.Role', 'Role', ([], {}), '()\n', (1321, 1323), False, 'from rbac.models import Menu, Role\n'), ((1498, 1513), 'json.dumps', 'json.dumps', (['res'], {}), '(res)\n', (1508, 1513), False, 'import json\n'), ((2116, 2166), 'django.shortcuts.render', 'render', (['request', '"""system/rbac/role_menu.html"""', 'ret'], {}), "(request, 'system/rbac/role_menu.html', ret)\n", (2122, 2166), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2602, 2617), 'json.dumps', 'json.dumps', (['res'], {}), '(res)\n', (2612, 2617), False, 'import json\n'), ((4134, 4149), 'json.dumps', 'json.dumps', (['res'], {}), '(res)\n', (4144, 4149), False, 'import json\n'), ((1730, 1765), 'rbac.models.Role.objects.filter', 'Role.objects.filter', ([], {'id__in': 'id_list'}), '(id__in=id_list)\n', (1749, 1765), False, 'from rbac.models import Menu, Role\n'), ((2455, 2493), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Menu'], {'pk': "menu['id']"}), "(Menu, pk=menu['id'])\n", (2472, 2493), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((709, 737), 'rbac.models.Role.objects.values', 'Role.objects.values', (['*fields'], {}), '(*fields)\n', (728, 737), False, 'from rbac.models import Menu, Role\n')] |
import re
import importlib.util
import sys
from .options import Option
def clean_spaces(text: str) -> str:
return re.sub(r'\s+', ' ', text).strip()
def patch_options(options, kwargs):
return {
k: options[v.key] if isinstance(v, Option) else v
for k, v in kwargs.items()
}
def wrap_global(func):
class keep_args_instance:
def __init__(self, namespace, **kwargs):
self.kwargs = kwargs
def __call__(self, *args, **kwargs):
kwargs = {**self.kwargs, **kwargs}
kwargs = patch_options(kwargs.pop('options'), kwargs)
return func(*args, **kwargs)
return keep_args_instance
# https://docs.python.org/3/library/importlib.html#implementing-lazy-imports
def lazy_import(name):
spec = importlib.util.find_spec(name)
loader = importlib.util.LazyLoader(spec.loader)
spec.loader = loader
module = importlib.util.module_from_spec(spec)
sys.modules[name] = module
loader.exec_module(module)
return module
| [
"re.sub"
] | [((120, 145), 're.sub', 're.sub', (['"""\\\\s+"""', '""" """', 'text'], {}), "('\\\\s+', ' ', text)\n", (126, 145), False, 'import re\n')] |
# write your code here
import sys
import os
import hashlib
args = sys.argv
print(sys.argv)
if len(args) < 2:
print("Directory is not specified")
sys.exit()
else:
file_ext = ''
file_ext = input('Enter file format:')
sort = input('\nSize sorting options:\n1. Descending\n2. Ascending\n\nEnter a sorting option:')
while sort not in ['1', '2']:
print('\nWrong option\n')
sort = input('Enter a sorting option:')
files_dict = dict()
for root, dirs, files in os.walk(sys.argv[1]):
for name in files:
#print(os.path.join(root, name))
#print(os.path.splitext(os.path.join(root, name))[1])
#print(os.path.getsize(os.path.join(root, name)))
if file_ext in os.path.splitext(os.path.join(root, name))[1]:
if str(os.path.getsize(os.path.join(root, name))) in files_dict.keys():
l = files_dict.get(f"{os.path.getsize(os.path.join(root, name))}")
l.append(os.path.join(root, name))
files_dict[f'{os.path.getsize(os.path.join(root, name))}'] = l
else:
l = list()
l.append(os.path.join(root, name))
files_dict[f'{os.path.getsize(os.path.join(root, name))}'] = l
f_dict = dict(filter(lambda elem: len(elem[1]) > 1, files_dict.items()))
reverse = True if sort == '1' else False
for i in sorted(f_dict, reverse=reverse):
print(f'\n{i} bytes')
for j in f_dict[i]:
print(j)
db_check = input("\nCheck for duplicates?")
while db_check not in ['yes', 'no']:
print('Wrong option')
db_check = input("\nCheck for duplicates?\n")
if db_check != 'yes':
sys.exit()
else:
n = 1
hash_dict = dict()
for i in sorted(f_dict, reverse=reverse):
h_dict = dict()
hash_dict[i] = list()
for j in f_dict[i]:
hash = hashlib.md5(open(j, 'rb').read()).hexdigest()
if hash in h_dict.keys():
l = h_dict.get(hash)
l.append(j)
h_dict[hash] = l
else:
l = list()
l.append(j)
h_dict[hash] = l
t_hdict = dict(filter(lambda elem: len(elem[1]) > 1, h_dict.items()))
hash_dict[i].append(t_hdict)
n = 1
for i in sorted(hash_dict, reverse=reverse):
print(f'\n{i} bytes')
for j in hash_dict[i]:
for x in j:
print(f'Hash: {x}')
for y in j[x]:
print(f'{n}. {y}')
n += 1
n_list = [x for x in range(1, n+1)]
db_check = input("\nDelete files?")
while db_check not in ['yes', 'no']:
print('Wrong option')
db_check = input("\nDelete files?\n")
if db_check != 'yes':
sys.exit()
else:
file_numbers = input("\nEnter file numbers to delete:")
while len(file_numbers) == 0 or not file_numbers.replace(' ', '').isnumeric():
print('\nWrong format')
file_numbers = input("\nEnter file numbers to delete:")
flag = 0
file_ints = list(map(int, list(file_numbers.split())))
if set(file_ints).issubset(set(n_list)):
flag = 1
while flag == 0:
print('\nWrong format')
file_numbers = input("\nEnter file numbers to delete:")
file_ints = list(map(int, list(file_numbers.split())))
if set(file_ints).issubset(set(n_list)):
flag = 1
saved_space = 0
for z in sorted(file_ints):
n = 1
#print(f'z: {z}')
for i in sorted(hash_dict, reverse=reverse):
#print(f'\n{i} bytes')
for j in hash_dict[i]:
for x in j:
#print(f'Hash: {x}')
for y in j[x]:
if z == n:
#print(f"Removing: z: {z}, n: {n}, y: {y}")
os.remove(y)
saved_space += int(i)
n += 1
print(f'Total freed up space: {saved_space} bytes')
| [
"os.remove",
"os.path.join",
"os.walk",
"sys.exit"
] | [((155, 165), 'sys.exit', 'sys.exit', ([], {}), '()\n', (163, 165), False, 'import sys\n'), ((502, 522), 'os.walk', 'os.walk', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (509, 522), False, 'import os\n'), ((1734, 1744), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1742, 1744), False, 'import sys\n'), ((2782, 2792), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2790, 2792), False, 'import sys\n'), ((768, 792), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (780, 792), False, 'import os\n'), ((1002, 1026), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (1014, 1026), False, 'import os\n'), ((1193, 1217), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (1205, 1217), False, 'import os\n'), ((837, 861), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (849, 861), False, 'import os\n'), ((3877, 3889), 'os.remove', 'os.remove', (['y'], {}), '(y)\n', (3886, 3889), False, 'import os\n'), ((944, 968), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (956, 968), False, 'import os\n'), ((1078, 1102), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (1090, 1102), False, 'import os\n'), ((1269, 1293), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (1281, 1293), False, 'import os\n')] |
# ------------------------------------------------------------------------------
# Copyright 2020 Forschungszentrum Jülich GmbH
# "Licensed to the Apache Software Foundation (ASF) under one or more contributor
# license agreements; and to You under the Apache License, Version 2.0. "
#
# Forschungszentrum Jülich
# Institute: Institute for Advanced Simulation (IAS)
# Section: Jülich Supercomputing Centre (JSC)
# Division: High Performance Computing in Neuroscience
# Laboratory: Simulation Laboratory Neuroscience
# Team: Multi-scale Simulation and Design
#
# ------------------------------------------------------------------------------
from mpi4py import MPI
import time
import numpy as np
from EBRAINS_InterscaleHUB.refactored_modular.Communicator import Communicator
from EBRAINS_InterscaleHUB.refactored_modular import interscalehub_utils
from EBRAINS_InterscaleHUB.refactored_modular import interscalehub_mediator as mediator
#from EBRAINS_InterscaleHUB.Interscale_hub.transformer import spiketorate
from EBRAINS_ConfigManager.global_configurations_manager.xml_parsers.default_directories_enum import DefaultDirectories
from EBRAINS_RichEndpoint.Application_Companion.common_enums import Response
# NestTvbPivot and TvbNestPivot classes:
# TODO: proper abstraction -> extract the usecase details from the general implementation
# -> Init, start, stop are pretty much the same every time
# -> incoming (receive) and outgoing (send) loops (M:N mapping)
# -> the analyse (method) should be
# a) pivot, as raw data to cosim data
# b) transform (might be trivial) and
# c) analysis (might be trivial)
# TODO: rework on the receive and send loops (both, general coding style and usecase specifics)
class CommunicatorNestTvb(Communicator):
'''
Implements the PivotBaseClass for abstracting the pivot operations and
the underlying communication protocol. This class provides wrappers
for receving the data from NEST simulator and sending it to TVB simulator
after processing/transforming to the required format.
'''
def __init__(self, configurations_manager, log_settings, name, databuffer,
intracomm, param, comm_receiver, comm_sender):
'''
'''
super().__init__(configurations_manager,
log_settings,
name,
databuffer
)
# Parameter for transformation and analysis
self.__param = param
# INTERcommunicator
# TODO: Revisit the protocol to TVB and NEST
# TODO: rank 0 and rank 1 hardcoded
if intracomm.Get_rank() == 0:
self.__comm_receiver = comm_receiver
self.__num_sending = self.__comm_receiver.Get_remote_size()
elif intracomm.Get_rank() == 1:
self.__comm_sender = comm_sender
self.__num_receiving = self.__comm_sender.Get_remote_size()
self.__logger.info("Initialised")
def start(self, intracomm):
'''
Starts the pivot operation.
M:N mapping of MPI ranks, receive data, further process data.
Receive on rank 0, do the rest on rest of the ranks.
'''
if intracomm.Get_rank() == 0: # Receiver from input sim, rank 0
self._receive()
elif intracomm.Get_rank() == 1: # Science/analyse and sender to TVB, rank 1-x
self._send()
def stop(self):
'''
TODO: proper execution of stop command
'''
self.__stop = True
def _receive(self):
'''
Receive data on rank 0. Put it into the shared mem buffer.
Replaces the former 'receive' function.
NOTE: First refactored version -> not pretty, not final.
'''
# The last two buffer entries are used for shared information
# --> they replace the status_data variable from previous version
# --> find more elegant solution?
self.__logger.info("setting up buffers")
self.__databuffer[-1] = 1 # set buffer to 'ready to receive from nest'
self.__databuffer[-2] = 0 # marks the 'head' of the buffer
# It seems the 'check' variable is used to receive tags from NEST, i.e. ready for send...
# change this in the future, also mentioned in the FatEndPoint solution from Wouter.
check = np.empty(1,dtype='b')
shape = np.empty(1, dtype='i')
count = 0
status_ = MPI.Status()
self.__logger.info("reading from buffer")
###########################################################
#TODO Refactor to move this functionality to appropriate location
#NOTE As per protocol, it should be the response message of 'init'
# command, and should return the PID and the port information
import os
from EBRAINS_RichEndpoint.Application_Companion.common_enums import INTEGRATED_SIMULATOR_APPLICATION as SIMULATOR
pid_and_local_minimum_step_size = \
{SIMULATOR.PID.name: os.getpid(),
SIMULATOR.LOCAL_MINIMUM_STEP_SIZE.name: 0.0}
print(f'{pid_and_local_minimum_step_size}')
###########################################################
# self.__logger.info("NESTtoTVB -- consumer/receiver -- Rank:"+str(self.__comm_receiver.Get_rank()))
while True:
head_ = 0 # head of the buffer, reset after each iteration
# TODO: This is still not correct. We only check for the Tag of the last rank.
# IF all ranks send always the same tag in one iteration (simulation step)
# then this works. But it should be handled differently!!!!
self.__comm_receiver.Recv([check, 1, MPI.CXX_BOOL], source=0, tag=MPI.ANY_TAG, status=status_)
status_rank_0 = status_.Get_tag()
for i in range(1, self.__num_sending):
# new: We do not care which source sends first, give MPI the freedom to send in whichever order.
# self.__comm_receiver.Recv([check, 1, MPI.CXX_BOOL], source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=status_)
# self.__logger.info("checking status")
self.__comm_receiver.Recv([check, 1, MPI.CXX_BOOL], source=i, tag=MPI.ANY_TAG, status=status_)
if status_rank_0 != status_.Get_tag():
# Case: the state of the NEST is different between the ranks
# Log the exception with traceback
interscalehub_utils.log_exception(
log_message="Abnormal state : the state of Nest is different between rank. Tag received: ",
mpi_tag_received=status_.Get_tag())
# Terminate with Error
return Response.ERROR
if status_.Get_tag() == 0:
# wait until ready to receive new data (i.e. the sender has cleared the buffer)
while self.__databuffer[-1] != 1: # TODO: use MPI, remove the sleep
time.sleep(0.001)
pass
for source in range(self.__num_sending):
# send 'ready' to the nest rank
# self.__logger.info("send ready")
self.__comm_receiver.Send([np.array(True,dtype='b'),MPI.BOOL],dest=source,tag=0)
# receive package size info
# self.__logger.info("DEBUG 121 ====> receiving size in NEST_TVB_PIVOT")
self.__comm_receiver.Recv([shape, 1, MPI.INT], source=source, tag=0, status=status_)
# self.__comm_receiver.Recv([shape, 1, MPI.INT], source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=status_)
# NEW: receive directly into the buffer
self.__comm_receiver.Recv([self.__databuffer[head_:], MPI.DOUBLE], source=source, tag=0, status=status_)
head_ += shape[0] # move head
# Mark as 'ready to do analysis'
self.__databuffer[-1] = 0
# important: head_ is first buffer index WITHOUT data.
self.__databuffer[-2] = head_
# continue receiving the data
continue
elif status_.Get_tag() == 1:
# increment the count and continue receiving the data
count += 1
continue
elif status_.Get_tag() == 2:
# NOTE: simulation ended
# everything goes fine, terminate the loop and respond with OK
return Response.OK
else:
# A 'bad' MPI tag is received,
# log the exception with traceback
interscalehub_utils.log_exception(
log_message="bad mpi tag :",
mpi_tag_received=status_.Get_tag())
# terminate with Error
return Response.ERROR
def _send(self):
'''
Send data to TVB (multiple MPI ranks possible).
Replaces the former 'send' function.
NOTE: First refactored version -> not pretty, not final.
'''
count=0 # simulation/iteration step
status_ = MPI.Status()
# self.__logger.info("NESTtoTVB -- producer/sender -- Rank:"+str(self.__comm_sender.Get_rank()))
while True:
# TODO: this communication has the 'rank 0' problem described in the beginning
accept = False
#logger.info("Nest to TVB : wait to send " )
while not accept:
req = self.__comm_sender.irecv(source=MPI.ANY_SOURCE,tag=MPI.ANY_TAG)
accept = req.wait(status_)
#logger.info(" Nest to TVB : send data status : " +str(status_.Get_tag()))
if status_.Get_tag() == 0:
# wait until the receiver has cleared the buffer, i.e. filled with new data
while self.__databuffer[-1] != 0: # TODO: use MPI, remove the sleep
time.sleep(0.001)
pass
# NOTE: calling the mediator which calls the corresponding transformer functions
times,data = mediator.spike_to_rate(self.__databuffer, count)
# Mark as 'ready to receive next simulation step'
self.__databuffer[-1] = 1
### OLD Code
#logger.info("Nest to TVB : send data :"+str(np.sum(data)) )
# time of sim step
self.__comm_sender.Send([times, MPI.DOUBLE], dest=status_.Get_source(), tag=0)
# send the size of the rate
size = np.array(int(data.shape[0]),dtype='i')
self.__comm_sender.Send([size,MPI.INT], dest=status_.Get_source(), tag=0)
# send the rates
self.__comm_sender.Send([data,MPI.DOUBLE], dest=status_.Get_source(), tag=0)
# increment the count
count+=1
# continue sending the data
continue
### OLD Code end
elif status_.Get_tag() == 1:
# NOTE: simulation ended
# everything goes fine, terminate the loop and respond with OK
return Response.OK
else:
# A 'bad' MPI tag is received,
# log the exception with traceback
interscalehub_utils.log_exception(
log_message="bad mpi tag :",
mpi_tag_received=status_.Get_tag())
# terminate with Error
return Response.ERROR
'''
def _transform(self, count):
#store: Python object, create the histogram
#analyse: Python object, calculate rates
spikerate = spiketorate(self.__param)
times, data = spikerate.spike_to_rate(count, self.__databuffer[-2], self.__databuffer)
return times, data
'''
| [
"EBRAINS_InterscaleHUB.refactored_modular.interscalehub_mediator.spike_to_rate",
"time.sleep",
"numpy.array",
"mpi4py.MPI.Status",
"numpy.empty",
"os.getpid"
] | [((4387, 4409), 'numpy.empty', 'np.empty', (['(1)'], {'dtype': '"""b"""'}), "(1, dtype='b')\n", (4395, 4409), True, 'import numpy as np\n'), ((4425, 4447), 'numpy.empty', 'np.empty', (['(1)'], {'dtype': '"""i"""'}), "(1, dtype='i')\n", (4433, 4447), True, 'import numpy as np\n'), ((4488, 4500), 'mpi4py.MPI.Status', 'MPI.Status', ([], {}), '()\n', (4498, 4500), False, 'from mpi4py import MPI\n'), ((9273, 9285), 'mpi4py.MPI.Status', 'MPI.Status', ([], {}), '()\n', (9283, 9285), False, 'from mpi4py import MPI\n'), ((5070, 5081), 'os.getpid', 'os.getpid', ([], {}), '()\n', (5079, 5081), False, 'import os\n'), ((10253, 10301), 'EBRAINS_InterscaleHUB.refactored_modular.interscalehub_mediator.spike_to_rate', 'mediator.spike_to_rate', (['self.__databuffer', 'count'], {}), '(self.__databuffer, count)\n', (10275, 10301), True, 'from EBRAINS_InterscaleHUB.refactored_modular import interscalehub_mediator as mediator\n'), ((7088, 7105), 'time.sleep', 'time.sleep', (['(0.001)'], {}), '(0.001)\n', (7098, 7105), False, 'import time\n'), ((10067, 10084), 'time.sleep', 'time.sleep', (['(0.001)'], {}), '(0.001)\n', (10077, 10084), False, 'import time\n'), ((7342, 7367), 'numpy.array', 'np.array', (['(True)'], {'dtype': '"""b"""'}), "(True, dtype='b')\n", (7350, 7367), True, 'import numpy as np\n')] |
#Import
import warnings; warnings.simplefilter('ignore') #for PCoA warnings
import pandas as pd
import numpy as np
#import data
from biom import load_table
from skbio.stats import subsample_counts
#MOCK data generation
from gneiss.util import match
from gneiss.sort import niche_sort
from simulations import block_diagonal_gaus
from simulations import build_block_model
from simulations import minimize_model
#compostional transform
from skbio.stats.composition import clr
# import observation data
in_biom='cluster_models/keyboard.biom' #import biom file
table = load_table(in_biom)
read_filter_s = lambda val, id_, md: sum(val) > 0
read_filter_f = lambda val, id_, md: sum(val) > 0
table=table.filter(read_filter_s, axis='sample')
table=table.filter(read_filter_f, axis='observation')
otutabledf=table.to_dataframe()
otutabledf=otutabledf.T
otutabledf.drop_duplicates(inplace=True)
# Get OTU to taxa match
taxonomy=table.metadata_to_dataframe('observation')
taxonomy.columns=['kingdom', 'phylum', 'class', 'order',
'family', 'genus', 'species']
taxonomy['taxonomy'] = taxonomy[taxonomy.columns].apply(lambda x: ';'.join(x), axis=1)
#mapping import
map_file='cluster_models/keyboard.txt' #import metadata
mappingdf= pd.read_table('%s'%map_file, index_col=0,low_memory=False)
mappingdf=mappingdf.replace(np.nan,'Unknown', regex=True)
mappingdf.index=list(map(str,mappingdf.index))
mappingdf=mappingdf.astype(str)
mappingdf=mappingdf[~mappingdf.index.duplicated(keep='first')]
#match the tables
otutabledf,mappingdf=match(otutabledf,mappingdf[mappingdf['host_subject_id'].isin(['M2','M3','M9'])])
otutabledf=otutabledf.T[otutabledf.sum()>0].T
otutabledf=otutabledf[otutabledf.T.sum()>0]
otutabledf.columns=[str(x) for x in otutabledf.columns]
sorting_map={'M9':2,'M2':3,'M3':1}
mappingdf['host_num']=[int(sorting_map[x]) for x in mappingdf['host_subject_id']]
mappingdf=mappingdf.apply(pd.to_numeric, errors='ignore')
#sort by niche
observed_table = niche_sort(otutabledf, mappingdf['host_num'])
mappingdf=mappingdf.T[observed_table.index].T
otutabledf=observed_table.copy()
otutabledf.to_dense().to_csv("cluster_models/base_model_keyboard_table.csv",sep=',', encoding='utf-8')
mappingdf.to_dense().to_csv("cluster_models/base_model_keyboard_meta.csv",sep=',', encoding='utf-8')
######### build the model #########
x0 = [3, 20, 20, 1e2, 1e2,1e1]
bnds = ((3,3),(0,1e2),(0,2e3),(0,1e10),(0,5e1),(1,10))
model_fit=minimize_model(x0,bnds,np.array(otutabledf.T[:104].T.as_matrix()))
base_truth,X_noise_sub=build_block_model(3, model_fit.x[1],
model_fit.x[2], model_fit.x[3]
, model_fit.x[4]
,otutabledf.shape[1]
,otutabledf.shape[0]
,overlap=model_fit.x[5]
,mapping_on=False)
save_base=[]
save_sub=[]
for rank_,overlap_ in zip([2],[20]):
#subsample_points=np.logspace(2,4,4)
seq_depth={500:3.05e2,
1000:6.1e2,
2000:1.25e3,
4000:2.5e3,
10000:6.05e3}
for sub_,model_peram in seq_depth.items():
#run model with fit variables and new variants
base_truth,X_noise_sub=build_block_model(rank_, model_peram/15, model_peram/15,
model_peram, model_peram
,200,1000,overlap=overlap_
,mapping_on=False)
base_truth=pd.DataFrame(base_truth
,index=[(rank_,overlap_,sub_,'OTU_'+str(x)) for x in range(base_truth.shape[0])]
,columns=['sample_'+str(x) for x in range(base_truth.shape[1])])
X_noise_sub=pd.DataFrame(X_noise_sub
,index=[(rank_,overlap_,sub_,'OTU_'+str(x)) for x in range(X_noise_sub.shape[0])]
,columns=['sample_'+str(x) for x in range(X_noise_sub.shape[1])])
#for X_noise_subsampled in Subsamples_noisy:
save_base.append(base_truth)
save_sub.append(X_noise_sub)
for df_,loc_ in zip([save_base,save_sub]
,['simulation_base_truth','simulation_subsampled_noisy']):
df_=pd.concat(df_,axis=0)
df_.index=pd.MultiIndex.from_tuples(df_.index)
df_.index.names = ['rank', 'overlap','sequence_depth','OTUs']
df_.to_csv('cluster_models/'+loc_+'.csv') #save both and finish | [
"biom.load_table",
"simulations.build_block_model",
"pandas.read_table",
"gneiss.sort.niche_sort",
"warnings.simplefilter",
"pandas.MultiIndex.from_tuples",
"pandas.concat"
] | [((26, 57), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (47, 57), False, 'import warnings\n'), ((567, 586), 'biom.load_table', 'load_table', (['in_biom'], {}), '(in_biom)\n', (577, 586), False, 'from biom import load_table\n'), ((1253, 1314), 'pandas.read_table', 'pd.read_table', (["('%s' % map_file)"], {'index_col': '(0)', 'low_memory': '(False)'}), "('%s' % map_file, index_col=0, low_memory=False)\n", (1266, 1314), True, 'import pandas as pd\n'), ((1991, 2036), 'gneiss.sort.niche_sort', 'niche_sort', (['otutabledf', "mappingdf['host_num']"], {}), "(otutabledf, mappingdf['host_num'])\n", (2001, 2036), False, 'from gneiss.sort import niche_sort\n'), ((2545, 2722), 'simulations.build_block_model', 'build_block_model', (['(3)', 'model_fit.x[1]', 'model_fit.x[2]', 'model_fit.x[3]', 'model_fit.x[4]', 'otutabledf.shape[1]', 'otutabledf.shape[0]'], {'overlap': 'model_fit.x[5]', 'mapping_on': '(False)'}), '(3, model_fit.x[1], model_fit.x[2], model_fit.x[3],\n model_fit.x[4], otutabledf.shape[1], otutabledf.shape[0], overlap=\n model_fit.x[5], mapping_on=False)\n', (2562, 2722), False, 'from simulations import build_block_model\n'), ((4392, 4414), 'pandas.concat', 'pd.concat', (['df_'], {'axis': '(0)'}), '(df_, axis=0)\n', (4401, 4414), True, 'import pandas as pd\n'), ((4428, 4464), 'pandas.MultiIndex.from_tuples', 'pd.MultiIndex.from_tuples', (['df_.index'], {}), '(df_.index)\n', (4453, 4464), True, 'import pandas as pd\n'), ((3360, 3497), 'simulations.build_block_model', 'build_block_model', (['rank_', '(model_peram / 15)', '(model_peram / 15)', 'model_peram', 'model_peram', '(200)', '(1000)'], {'overlap': 'overlap_', 'mapping_on': '(False)'}), '(rank_, model_peram / 15, model_peram / 15, model_peram,\n model_peram, 200, 1000, overlap=overlap_, mapping_on=False)\n', (3377, 3497), False, 'from simulations import build_block_model\n')] |
class ProjectListing(object):
@staticmethod
def list_projects(redis_connection):
"""Returns a list of projects store in redis with their
creation timestamps
Arguments:
redis_connection {RedisConnection} -- Redis connection to use as a provider for data
Returns:
list -- The list of project names and creation dates
"""
from foundations_contrib.utils import string_from_bytes
projects = redis_connection.zrange('projects', 0, -1, withscores=True)
return [{'name': string_from_bytes(name), 'created_at': created_at} for name, created_at in projects]
@staticmethod
def find_project(redis_connection, project_name):
"""Returns a single of projects store in redis with it's
creation timestamp
Arguments:
redis_connection {RedisConnection} -- Redis connection to use as a provider for data
project_name {str} -- Name of the project to find
Returns:
dict -- The dictionary of the 2 attribute from the description above or None if the project does not exist
"""
created_at = redis_connection.zscore('projects', project_name)
if created_at is None:
return None
return {'name': project_name, 'created_at': created_at}
| [
"foundations_contrib.utils.string_from_bytes"
] | [((566, 589), 'foundations_contrib.utils.string_from_bytes', 'string_from_bytes', (['name'], {}), '(name)\n', (583, 589), False, 'from foundations_contrib.utils import string_from_bytes\n')] |
import tensorflow as tf
import numpy as np
import resnet_block
def LeakyRelu(x, leak=0.2, name="LeakyRelu"):
with tf.variable_scope(name):
leak_c = tf.constant(0.1)
leak = tf.Variable(leak_c)
f1 = 0.5 * (1 + leak)
f2 = 0.5 * (1 - leak)
return f1 * x + f2 * tf.abs(x)
def OurRelu(x, name="OurRelu"):
with tf.variable_scope(name):
leak_c = tf.constant(0.1)
leak = tf.Variable(leak_c)
f1 = 0.5 * (1 + leak)
f2 = 0.5 * (1 - leak)
return f1 * tf.abs(x) - f2 * x
def Friend_relu(x):
x = tf.nn.relu(x)
Max = tf.constant([255.0])
return tf.minimum(x, Max)
#normalization
def Batch_normalization(X):
_mean, _var = tf.nn.moments(X, [0, 1, 2])
X = tf.nn.batch_normalization(X, _mean, _var, 0, 1, 0.0001)
return X
#group normalization
def GroupNorm(x,G=32,eps=1e-5):
N,H,W,C=x.shape
x=tf.reshape(x,[tf.cast(N,tf.int32),tf.cast(H,tf.int32),tf.cast(W,tf.int32),tf.cast(G,tf.int32),tf.cast(C//G,tf.int32)])
# x=tf.reshape(x,[N,H,W,G,C//G])
mean,var=tf.nn.moments(x,[1,2,4],keep_dims=True)
x=(x-mean)/tf.sqrt(var+eps)
x=tf.reshape(x,[tf.cast(N,tf.int32),tf.cast(H,tf.int32),tf.cast(W,tf.int32),tf.cast(C,tf.int32)])
gamma = tf.Variable(tf.ones(shape=[1,1,1,tf.cast(C,tf.int32)]), name="gamma")
beta = tf.Variable(tf.zeros(shape=[1,1,1,tf.cast(C,tf.int32)]), name="beta")
return x*gamma+beta
class Net:
def __init__(self):
pass
#kernel initial
def weight_variable(self, shape):
initial = tf.truncated_normal(shape, mean=0.0,stddev=np.sqrt(2.0/shape[2]))
return tf.Variable( initial)
#bias initial
def bias_variable(self,shape):
return tf.Variable(tf.random_normal(shape, stddev=0.1))
def model(self, input_X, training):
#Multi-scale Convolution
w_conv1_3 = self.weight_variable([3, 3, 3, 64])
x_conv1_3 = tf.nn.conv2d(input_X, w_conv1_3, strides=[1, 2, 2, 1], padding='SAME')#64 x 64 x64
w_conv1_5 = self.weight_variable([5, 5, 3, 32])
x_conv1_5 = tf.nn.conv2d(input_X, w_conv1_5, strides=[1, 2, 2, 1], padding='SAME')
w_conv1_7 = self.weight_variable([7, 7, 3, 32])
x_conv1_7 = tf.nn.conv2d(input_X, w_conv1_7, strides=[1, 2, 2, 1], padding='SAME')
x_conv1 = tf.concat([x_conv1_3, x_conv1_5, x_conv1_7],3)
x_conv1 = GroupNorm(x_conv1)
x_conv1 = LeakyRelu(x_conv1)
w_conv2 = self.weight_variable([3, 3, 128, 256])
x_conv2 = tf.nn.conv2d(x_conv1, w_conv2, strides=[1, 2, 2, 1], padding='SAME')#32 x32 x128
x_conv2 = GroupNorm(x_conv2)
x_conv2 = LeakyRelu(x_conv2)
w_conv4 = self.weight_variable([3, 3, 256, 512])
x_conv4 = tf.nn.conv2d(x_conv2, w_conv4, strides=[1, 2, 2, 1], padding='SAME')#16x16x256
x_conv4 = GroupNorm(x_conv4)
x_conv4 = LeakyRelu(x_conv4)
x_conv6 = resnet_block.identity_block(x_conv4, 3, 512, [256, 256, 512], stage=2, block='b', training=training )
x_conv7 = resnet_block.identity_block(x_conv6, 3, 512, [256, 256, 512], stage=2, block='c', training=training )
x_conv8 = resnet_block.identity_block(x_conv7, 3, 512, [256, 256, 512], stage=2, block='d', training=training )
x_conv8 = resnet_block.identity_block(x_conv8, 3, 512, [256, 256, 512], stage=2, block='e', training=training )
x_conv8 = resnet_block.identity_block(x_conv8, 3, 512, [256, 256, 512], stage=2, block='f', training=training )
x_conv8 = resnet_block.identity_block(x_conv8, 3, 512, [256, 256, 512], stage=2, block='g', training=training )
x_conv8 = resnet_block.identity_block(x_conv8, 3, 512, [256, 256, 512], stage=2, block='h', training=training )
w_deconv1 = self.weight_variable([1, 1, 512, 512])
x_conv9 = tf.nn.conv2d_transpose(x_conv8, w_deconv1,output_shape=tf.shape(x_conv4), strides=[1, 1, 1, 1], padding='VALID')#29x29x256
x_conv9 = GroupNorm(x_conv9)
x_conv9 = OurRelu(x_conv9)
x_conv9 = tf.concat([x_conv9, x_conv4],3)
w_conv9_1 = self.weight_variable([1, 1, 1024, 512])
x_conv9 = tf.nn.conv2d(x_conv9, w_conv9_1, strides=[1, 1, 1, 1], padding='VALID')
x_conv9 = GroupNorm(x_conv9)
x_conv9 = LeakyRelu(x_conv9)
w_deconv2 = self.weight_variable([3, 3, 256, 512])
x_conv10 = tf.nn.conv2d_transpose(x_conv9, w_deconv2,output_shape=tf.shape(x_conv2), strides=[1, 2, 2, 1], padding='SAME')
x_conv10 = GroupNorm(x_conv10)
x_conv10 = OurRelu(x_conv10)
x_conv10 = tf.concat([x_conv10, x_conv2],3)
w_conv10_1 = self.weight_variable([1, 1, 512, 256])
x_conv10 = tf.nn.conv2d(x_conv10, w_conv10_1, strides=[1, 1, 1, 1], padding='SAME')
x_conv10 = GroupNorm(x_conv10)
x_conv10 = LeakyRelu(x_conv10)
w_deconv3 = self.weight_variable([3, 3, 128, 256])
x_conv11 = tf.nn.conv2d_transpose(x_conv10, w_deconv3,output_shape=tf.shape(x_conv1), strides=[1, 2, 2, 1], padding='SAME')
x_conv11 = GroupNorm(x_conv11)
x_conv11 = OurRelu(x_conv11)
x_conv11 = tf.concat([x_conv11, x_conv1],3)
w_conv11_1 = self.weight_variable([1, 1, 256, 128])
x_conv11 = tf.nn.conv2d(x_conv11, w_conv11_1, strides=[1, 1, 1, 1], padding='VALID')
x_conv11 = GroupNorm(x_conv11)
x_conv11 = LeakyRelu(x_conv11)
w_deconv4 = self.weight_variable([3, 3, 3, 128])
x_conv12 = tf.nn.conv2d_transpose(x_conv11, w_deconv4,output_shape=tf.shape(input_X), strides=[1, 2, 2, 1], padding='SAME')
model = tf.add(x_conv12,input_X)
model = Friend_relu(model)
return input_X,x_conv12,model
if __name__ == "__main__":
net = Net()
input_X = tf.placeholder(tf.float32, [None, 128,128,3])
model = net.model(input_X,training=True)
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
pre = sess.run(model)
print(pre.shape) | [
"numpy.sqrt",
"tensorflow.shape",
"tensorflow.nn.moments",
"resnet_block.identity_block",
"tensorflow.cast",
"tensorflow.random_normal",
"tensorflow.placeholder",
"tensorflow.Session",
"tensorflow.concat",
"tensorflow.nn.conv2d",
"tensorflow.variable_scope",
"tensorflow.Variable",
"tensorflo... | [((584, 597), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (594, 597), True, 'import tensorflow as tf\n'), ((608, 628), 'tensorflow.constant', 'tf.constant', (['[255.0]'], {}), '([255.0])\n', (619, 628), True, 'import tensorflow as tf\n'), ((640, 658), 'tensorflow.minimum', 'tf.minimum', (['x', 'Max'], {}), '(x, Max)\n', (650, 658), True, 'import tensorflow as tf\n'), ((725, 752), 'tensorflow.nn.moments', 'tf.nn.moments', (['X', '[0, 1, 2]'], {}), '(X, [0, 1, 2])\n', (738, 752), True, 'import tensorflow as tf\n'), ((761, 816), 'tensorflow.nn.batch_normalization', 'tf.nn.batch_normalization', (['X', '_mean', '_var', '(0)', '(1)', '(0.0001)'], {}), '(X, _mean, _var, 0, 1, 0.0001)\n', (786, 816), True, 'import tensorflow as tf\n'), ((1084, 1127), 'tensorflow.nn.moments', 'tf.nn.moments', (['x', '[1, 2, 4]'], {'keep_dims': '(True)'}), '(x, [1, 2, 4], keep_dims=True)\n', (1097, 1127), True, 'import tensorflow as tf\n'), ((5787, 5834), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, 128, 128, 3]'], {}), '(tf.float32, [None, 128, 128, 3])\n', (5801, 5834), True, 'import tensorflow as tf\n'), ((5889, 5922), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (5920, 5922), True, 'import tensorflow as tf\n'), ((5934, 5946), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (5944, 5946), True, 'import tensorflow as tf\n'), ((119, 142), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (136, 142), True, 'import tensorflow as tf\n'), ((161, 177), 'tensorflow.constant', 'tf.constant', (['(0.1)'], {}), '(0.1)\n', (172, 177), True, 'import tensorflow as tf\n'), ((193, 212), 'tensorflow.Variable', 'tf.Variable', (['leak_c'], {}), '(leak_c)\n', (204, 212), True, 'import tensorflow as tf\n'), ((358, 381), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (375, 381), True, 'import tensorflow as tf\n'), ((400, 416), 'tensorflow.constant', 'tf.constant', (['(0.1)'], {}), '(0.1)\n', (411, 416), True, 'import tensorflow as tf\n'), ((432, 451), 'tensorflow.Variable', 'tf.Variable', (['leak_c'], {}), '(leak_c)\n', (443, 451), True, 'import tensorflow as tf\n'), ((1139, 1157), 'tensorflow.sqrt', 'tf.sqrt', (['(var + eps)'], {}), '(var + eps)\n', (1146, 1157), True, 'import tensorflow as tf\n'), ((1657, 1677), 'tensorflow.Variable', 'tf.Variable', (['initial'], {}), '(initial)\n', (1668, 1677), True, 'import tensorflow as tf\n'), ((1964, 2034), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['input_X', 'w_conv1_3'], {'strides': '[1, 2, 2, 1]', 'padding': '"""SAME"""'}), "(input_X, w_conv1_3, strides=[1, 2, 2, 1], padding='SAME')\n", (1976, 2034), True, 'import tensorflow as tf\n'), ((2123, 2193), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['input_X', 'w_conv1_5'], {'strides': '[1, 2, 2, 1]', 'padding': '"""SAME"""'}), "(input_X, w_conv1_5, strides=[1, 2, 2, 1], padding='SAME')\n", (2135, 2193), True, 'import tensorflow as tf\n'), ((2270, 2340), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['input_X', 'w_conv1_7'], {'strides': '[1, 2, 2, 1]', 'padding': '"""SAME"""'}), "(input_X, w_conv1_7, strides=[1, 2, 2, 1], padding='SAME')\n", (2282, 2340), True, 'import tensorflow as tf\n'), ((2359, 2406), 'tensorflow.concat', 'tf.concat', (['[x_conv1_3, x_conv1_5, x_conv1_7]', '(3)'], {}), '([x_conv1_3, x_conv1_5, x_conv1_7], 3)\n', (2368, 2406), True, 'import tensorflow as tf\n'), ((2555, 2623), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['x_conv1', 'w_conv2'], {'strides': '[1, 2, 2, 1]', 'padding': '"""SAME"""'}), "(x_conv1, w_conv2, strides=[1, 2, 2, 1], padding='SAME')\n", (2567, 2623), True, 'import tensorflow as tf\n'), ((2785, 2853), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['x_conv2', 'w_conv4'], {'strides': '[1, 2, 2, 1]', 'padding': '"""SAME"""'}), "(x_conv2, w_conv4, strides=[1, 2, 2, 1], padding='SAME')\n", (2797, 2853), True, 'import tensorflow as tf\n'), ((2956, 3060), 'resnet_block.identity_block', 'resnet_block.identity_block', (['x_conv4', '(3)', '(512)', '[256, 256, 512]'], {'stage': '(2)', 'block': '"""b"""', 'training': 'training'}), "(x_conv4, 3, 512, [256, 256, 512], stage=2,\n block='b', training=training)\n", (2983, 3060), False, 'import resnet_block\n'), ((3076, 3180), 'resnet_block.identity_block', 'resnet_block.identity_block', (['x_conv6', '(3)', '(512)', '[256, 256, 512]'], {'stage': '(2)', 'block': '"""c"""', 'training': 'training'}), "(x_conv6, 3, 512, [256, 256, 512], stage=2,\n block='c', training=training)\n", (3103, 3180), False, 'import resnet_block\n'), ((3196, 3300), 'resnet_block.identity_block', 'resnet_block.identity_block', (['x_conv7', '(3)', '(512)', '[256, 256, 512]'], {'stage': '(2)', 'block': '"""d"""', 'training': 'training'}), "(x_conv7, 3, 512, [256, 256, 512], stage=2,\n block='d', training=training)\n", (3223, 3300), False, 'import resnet_block\n'), ((3316, 3420), 'resnet_block.identity_block', 'resnet_block.identity_block', (['x_conv8', '(3)', '(512)', '[256, 256, 512]'], {'stage': '(2)', 'block': '"""e"""', 'training': 'training'}), "(x_conv8, 3, 512, [256, 256, 512], stage=2,\n block='e', training=training)\n", (3343, 3420), False, 'import resnet_block\n'), ((3436, 3540), 'resnet_block.identity_block', 'resnet_block.identity_block', (['x_conv8', '(3)', '(512)', '[256, 256, 512]'], {'stage': '(2)', 'block': '"""f"""', 'training': 'training'}), "(x_conv8, 3, 512, [256, 256, 512], stage=2,\n block='f', training=training)\n", (3463, 3540), False, 'import resnet_block\n'), ((3556, 3660), 'resnet_block.identity_block', 'resnet_block.identity_block', (['x_conv8', '(3)', '(512)', '[256, 256, 512]'], {'stage': '(2)', 'block': '"""g"""', 'training': 'training'}), "(x_conv8, 3, 512, [256, 256, 512], stage=2,\n block='g', training=training)\n", (3583, 3660), False, 'import resnet_block\n'), ((3676, 3780), 'resnet_block.identity_block', 'resnet_block.identity_block', (['x_conv8', '(3)', '(512)', '[256, 256, 512]'], {'stage': '(2)', 'block': '"""h"""', 'training': 'training'}), "(x_conv8, 3, 512, [256, 256, 512], stage=2,\n block='h', training=training)\n", (3703, 3780), False, 'import resnet_block\n'), ((4068, 4100), 'tensorflow.concat', 'tf.concat', (['[x_conv9, x_conv4]', '(3)'], {}), '([x_conv9, x_conv4], 3)\n', (4077, 4100), True, 'import tensorflow as tf\n'), ((4178, 4249), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['x_conv9', 'w_conv9_1'], {'strides': '[1, 1, 1, 1]', 'padding': '"""VALID"""'}), "(x_conv9, w_conv9_1, strides=[1, 1, 1, 1], padding='VALID')\n", (4190, 4249), True, 'import tensorflow as tf\n'), ((4609, 4642), 'tensorflow.concat', 'tf.concat', (['[x_conv10, x_conv2]', '(3)'], {}), '([x_conv10, x_conv2], 3)\n', (4618, 4642), True, 'import tensorflow as tf\n'), ((4721, 4793), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['x_conv10', 'w_conv10_1'], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(x_conv10, w_conv10_1, strides=[1, 1, 1, 1], padding='SAME')\n", (4733, 4793), True, 'import tensorflow as tf\n'), ((5158, 5191), 'tensorflow.concat', 'tf.concat', (['[x_conv11, x_conv1]', '(3)'], {}), '([x_conv11, x_conv1], 3)\n', (5167, 5191), True, 'import tensorflow as tf\n'), ((5270, 5343), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['x_conv11', 'w_conv11_1'], {'strides': '[1, 1, 1, 1]', 'padding': '"""VALID"""'}), "(x_conv11, w_conv11_1, strides=[1, 1, 1, 1], padding='VALID')\n", (5282, 5343), True, 'import tensorflow as tf\n'), ((5627, 5652), 'tensorflow.add', 'tf.add', (['x_conv12', 'input_X'], {}), '(x_conv12, input_X)\n', (5633, 5652), True, 'import tensorflow as tf\n'), ((929, 949), 'tensorflow.cast', 'tf.cast', (['N', 'tf.int32'], {}), '(N, tf.int32)\n', (936, 949), True, 'import tensorflow as tf\n'), ((949, 969), 'tensorflow.cast', 'tf.cast', (['H', 'tf.int32'], {}), '(H, tf.int32)\n', (956, 969), True, 'import tensorflow as tf\n'), ((969, 989), 'tensorflow.cast', 'tf.cast', (['W', 'tf.int32'], {}), '(W, tf.int32)\n', (976, 989), True, 'import tensorflow as tf\n'), ((989, 1009), 'tensorflow.cast', 'tf.cast', (['G', 'tf.int32'], {}), '(G, tf.int32)\n', (996, 1009), True, 'import tensorflow as tf\n'), ((1009, 1034), 'tensorflow.cast', 'tf.cast', (['(C // G)', 'tf.int32'], {}), '(C // G, tf.int32)\n', (1016, 1034), True, 'import tensorflow as tf\n'), ((1176, 1196), 'tensorflow.cast', 'tf.cast', (['N', 'tf.int32'], {}), '(N, tf.int32)\n', (1183, 1196), True, 'import tensorflow as tf\n'), ((1196, 1216), 'tensorflow.cast', 'tf.cast', (['H', 'tf.int32'], {}), '(H, tf.int32)\n', (1203, 1216), True, 'import tensorflow as tf\n'), ((1216, 1236), 'tensorflow.cast', 'tf.cast', (['W', 'tf.int32'], {}), '(W, tf.int32)\n', (1223, 1236), True, 'import tensorflow as tf\n'), ((1236, 1256), 'tensorflow.cast', 'tf.cast', (['C', 'tf.int32'], {}), '(C, tf.int32)\n', (1243, 1256), True, 'import tensorflow as tf\n'), ((1764, 1799), 'tensorflow.random_normal', 'tf.random_normal', (['shape'], {'stddev': '(0.1)'}), '(shape, stddev=0.1)\n', (1780, 1799), True, 'import tensorflow as tf\n'), ((302, 311), 'tensorflow.abs', 'tf.abs', (['x'], {}), '(x)\n', (308, 311), True, 'import tensorflow as tf\n'), ((532, 541), 'tensorflow.abs', 'tf.abs', (['x'], {}), '(x)\n', (538, 541), True, 'import tensorflow as tf\n'), ((1619, 1642), 'numpy.sqrt', 'np.sqrt', (['(2.0 / shape[2])'], {}), '(2.0 / shape[2])\n', (1626, 1642), True, 'import numpy as np\n'), ((3910, 3927), 'tensorflow.shape', 'tf.shape', (['x_conv4'], {}), '(x_conv4)\n', (3918, 3927), True, 'import tensorflow as tf\n'), ((4457, 4474), 'tensorflow.shape', 'tf.shape', (['x_conv2'], {}), '(x_conv2)\n', (4465, 4474), True, 'import tensorflow as tf\n'), ((5006, 5023), 'tensorflow.shape', 'tf.shape', (['x_conv1'], {}), '(x_conv1)\n', (5014, 5023), True, 'import tensorflow as tf\n'), ((5554, 5571), 'tensorflow.shape', 'tf.shape', (['input_X'], {}), '(input_X)\n', (5562, 5571), True, 'import tensorflow as tf\n'), ((1303, 1323), 'tensorflow.cast', 'tf.cast', (['C', 'tf.int32'], {}), '(C, tf.int32)\n', (1310, 1323), True, 'import tensorflow as tf\n'), ((1385, 1405), 'tensorflow.cast', 'tf.cast', (['C', 'tf.int32'], {}), '(C, tf.int32)\n', (1392, 1405), True, 'import tensorflow as tf\n')] |
#!/usr/bin/env python
"""
package
Implementation for the package command that handles helping set up and
manipulate packages for use with cirrus.
Commands:
package init - Initialise a new repo with a basic cirrus.conf file
add the appropriate setup, manifest and requirements files
package sublime-project - Assistant to set up a sublime project for a cirrus
managed package, including build rules for the local venv
"""
import contextlib
import inspect
import requests
import os
import sys
import pystache
from cirrus._2to3 import ConfigParser, to_str
import pluggage.registry
import cirrus.templates
from argparse import ArgumentParser, ArgumentTypeError, Namespace
from cirrus.logger import get_logger
from cirrus.utils import working_dir
from cirrus.environment import repo_directory
from cirrus.package_container import init_container
from cirrus.utils import update_version
from cirrus.invoke_helpers import local
from cirrus.twine_helpers import register_package
from cirrus.pypirc import PypircFile
from cirrus.git_tools import (
RepoInitializer,
branch,
push,
get_tags,
tag_release,
commit_files_optional_push,
get_active_branch
)
DEFAULT_HISTORY_SENTINEL = "\nCIRRUS_HISTORY_SENTINEL\n"
LOGGER = get_logger()
TOXFILE = \
"""
[tox]
envlist = {python}
[testenv]
{install_command}
deps=
-r{requirements}
-r{test_requirements}
commands=nosetests -w {testdir}/unit
"""
def validate_package_name(value):
"""
ensure package names dont cause problems
with bad characters
"""
if "-" in value:
raise ArgumentTypeError(
"Package name: {} contains a - character".format(value)
)
if " " in value:
raise ArgumentTypeError(
"Package name: {} contains a space ".format(value)
)
return value
def validate_pypi_package_name(value):
"""
ensure package names dont cause problems
with bad characters
"""
if " " in value:
raise ArgumentTypeError(
"Package name: {} contains a space ".format(value)
)
return value
def get_plugin(plugin_name):
"""
_get_plugin_
Get the editor plugin
"""
factory = pluggage.registry.get_factory(
'editors',
load_modules=['cirrus.plugins.editors']
)
return factory(plugin_name)
def list_plugins():
factory = pluggage.registry.get_factory(
'editors',
load_modules=['cirrus.plugins.editors']
)
return [
k for k in factory.registry.keys()
if k != "EditorPlugin"
]
def build_parser(argslist):
"""
build CLI parser and process args
"""
parser = ArgumentParser(
description=(
'git cirrus package command:'
' initialises cirrus for an existing git repo'
)
)
parser.add_argument('command', nargs='?')
subparsers = parser.add_subparsers(dest='command')
init_command = subparsers.add_parser('init')
init_command.add_argument(
'--repo', '-r',
dest='repo',
default=os.getcwd()
)
init_command.add_argument(
'--source-dir', '-s',
help="source code directory within package, assumes top level dir if not set",
dest='source',
default=None
)
init_command.add_argument(
'--package', '-p',
help="name of package being bootstrapped",
dest='package',
type=validate_package_name,
required=True
)
init_command.add_argument(
'--tests',
help='test dir name',
default='tests'
)
init_command.add_argument(
'--version', '-v',
help="initial package version",
default='0.0.0',
dest='version'
)
init_command.add_argument(
'--organization', '-o',
dest='org',
default='ORGANIZATION HERE',
)
init_command.add_argument(
'--description', '-d',
dest='desc',
default='PACKAGE DESCRIPTION HERE'
)
init_command.add_argument(
'--pypi-package-name',
help=(
'Name for package on upload to pypi, '
'use if different from package option'
),
default=None,
type=validate_pypi_package_name
)
init_command.add_argument(
'--use-pypirc',
help='Use pypirc to add install options to pip commands',
default=False,
action='store_true'
)
init_command.add_argument(
'--register-with-pypi',
help=(
"Set this to the name of a pypi repo in your pypirc "
"to register the new package with that server"
),
default=None
)
init_command.add_argument(
'--add-gitignore',
help="Add a git ignore file to the repo",
default=True,
action='store_true'
)
init_command.add_argument(
'--gitignore-url',
help='URL of gitignore file to add',
default='https://raw.githubusercontent.com/github/gitignore/master/Python.gitignore'
)
init_command.add_argument(
'--templates',
help='template rules to include in MANIFEST',
nargs='+',
default=list()
)
init_command.add_argument(
'--version-file',
help='Version file, defaults to package __init__.py',
default=None
)
init_command.add_argument(
'--history-file',
help='changelog history filename',
default='HISTORY.md'
)
init_command.add_argument(
'--requirements',
help='requirements file for pip',
default='requirements.txt'
)
init_command.add_argument(
'--test-requirements',
help='test requirements file for pip',
default='test-requirements.txt',
dest='test_requirements'
)
init_command.add_argument(
'--python',
help='optionally specify the name of python binary to use in this package, eg python2, python3',
default=None
)
init_command.add_argument(
'--test-mode',
help='test execution mode',
choices=['nosetests', 'tox'],
default='tox',
)
init_command.add_argument(
'--master-branch',
help='GitFlow master branch',
default='master',
dest='master'
)
init_command.add_argument(
'--develop-branch',
help='GitFlow develop branch',
default='develop',
dest='develop'
)
init_command.add_argument(
'--origin-name',
default='origin',
help="Git repo remote name",
dest='origin'
)
init_command.add_argument(
'--no-remote',
help='disable pushing changes to remote, commit locally only',
default=False,
action='store_true'
)
init_command.add_argument(
'--create-version-file',
help="create the file containing __version__ if it doesn\'t exist",
default=False,
action='store_true'
)
init_command.add_argument(
'--bootstrap',
help="assumes repo is empty and will create a very minimal set of files to get things started",
default=False,
action='store_true'
)
cont_command = subparsers.add_parser('container-init')
cont_command.add_argument(
'--repo', '-r',
dest='repo',
default=os.getcwd()
)
cont_command.add_argument(
'--template-dir',
help="container template dir in repo",
default='container-template'
)
cont_command.add_argument(
'--image-dir',
help="container image build cache dir in repo",
default='image-dir'
)
cont_command.add_argument(
'--base-image', '-b',
help="Base image for your docker container",
dest='container',
required=True
)
cont_command.add_argument(
'--entrypoint', '-e',
help='container entrypoint',
default='/bin/bash'
)
cont_command.add_argument(
'--docker-registry',
default=None,
help='docker-registry address'
)
cont_command.add_argument(
'--container-virtualenv',
default=None,
dest='virtualenv',
help="If container image has a virtualenv, install package there, otherwise will install in whatever is system python"
)
cont_command.add_argument(
'--local-install',
default=False,
action='store_true',
help="deprecated, has no effect"
)
cont_command.add_argument(
'--pypi-install',
default=False,
action='store_true',
help="Deprecated, has no effect"
)
cont_command.add_argument(
'--no-remote',
help='disable pushing changes to remote, commit locally only',
default=False,
action='store_true'
)
proj_command = subparsers.add_parser('project')
proj_command.add_argument(
'--repo', '-r',
dest='repo',
default=os.getcwd()
)
proj_command.add_argument(
'--type', '-t',
help='type of project to create',
choices=list_plugins()
)
proj_command.add_argument(
'--pythonpath', '-p',
nargs='+',
help='subdirs to include on pythonpath',
default=list()
)
upd_command = subparsers.add_parser('update')
upd_command.add_argument(
'--setup-py',
default=False,
action='store_true',
help='Update the setup.py file to the latest provided by cirrus'
)
upd_command.add_argument(
'--repo', '-r',
dest='repo',
default=os.getcwd()
)
opts = parser.parse_args(argslist)
return opts
def setup_branches(opts):
"""
set up git branches, starting from master
"""
do_push = not opts.no_remote
LOGGER.info(
"setting up branches master={} develop={}".format(
opts.master, opts.develop
)
)
with working_dir(opts.repo):
initializer = RepoInitializer(opts.repo)
initializer.init_branch(opts.master, opts.origin, remote=do_push)
initializer.init_branch(opts.develop, opts.origin, remote=do_push)
branch(opts.repo, opts.develop, opts.master)
LOGGER.info("Working on {}".format(get_active_branch(opts.repo)))
def commit_and_tag(opts, *files):
"""
add files, commit changes and verify that initial tag
exists
"""
do_push = not opts.no_remote
commit_files_optional_push(
opts.repo,
"git cirrus package init",
do_push,
*files
)
tags = get_tags(opts.repo)
if opts.version not in tags:
msg = (
"tag {} not found, tagging {}..."
).format(opts.version, opts.master)
LOGGER.info(msg)
tag_release(
opts.repo,
opts.version,
master=opts.master,
push=do_push
)
branch(opts.repo, opts.develop, opts.develop)
def backup_file(filename):
"""
if filename exists, make a .BAK copy of it to avoid clobbering
any existing files.
"""
if not os.path.exists(filename):
return
newfile = "{}.BAK".format(filename)
LOGGER.info("Backing up {} to {}".format(filename, newfile))
with open(filename, 'r') as handle_in:
content = handle_in.read()
with open(newfile, 'w') as handle_out:
handle_out.write(content)
def write_manifest(opts):
"""
write the manifest file used for distribution
"""
manifest = os.path.join(opts.repo, 'MANIFEST.in')
backup_file(manifest)
LOGGER.info("setting up manifest: {}".format(manifest))
lines = [
"include {}".format(opts.requirements),
"include {}".format(opts.test_requirements),
"include cirrus.conf"
]
lines.extend(opts.templates)
with open(manifest, 'w') as handle:
for line in lines:
handle.write("{}\n".format(line))
return manifest
def write_setup_py(opts):
"""
write setup.py for the new package, using
the cirrus template.
Placeholder for rendering it with other
values.
"""
setup = os.path.join(opts.repo, 'setup.py')
backup_file(setup)
LOGGER.info("setting up setup.py: {}".format(setup))
template = os.path.join(
os.path.dirname(inspect.getsourcefile(cirrus.templates)),
'setup.py.mustache'
)
with open(template, 'r') as handle:
templ = handle.read()
rendered = pystache.render(templ, {})
with open(setup, 'w') as handle:
handle.write(rendered)
return setup
def write_history(opts):
"""
set up the history file containing the sentinel for
release notes
"""
history = os.path.join(opts.repo, opts.history_file)
LOGGER.info("setting up history file: {}".format(history))
if not os.path.exists(history):
with open(history, 'w') as handle:
handle.write(DEFAULT_HISTORY_SENTINEL)
else:
with open(history, 'a') as handle:
handle.write(DEFAULT_HISTORY_SENTINEL)
return history
def write_gitignore(opts):
"""get gitignore template from url and add to repo"""
url = opts.gitignore_url
resp = requests.get(url, verify=False)
resp.raise_for_status()
data = resp.content
gitignore = os.path.join(opts.repo, '.gitignore')
content = to_str(data)
with open(gitignore, 'w') as handle:
handle.write(content)
return gitignore
def write_cirrus_conf(opts, version_file):
"""
build the basic cirrus config file and write it out
"""
cirrus_conf = os.path.join(opts.repo, 'cirrus.conf')
LOGGER.info("setting up cirrus.conf: {}".format(cirrus_conf))
backup_file(cirrus_conf)
pname = opts.package
if opts.pypi_package_name:
pname = opts.pypi_package_name
config = ConfigParser.ConfigParser()
config.add_section('package')
config.set('package', 'name', pname)
config.set('package', 'version', str(opts.version))
config.set('package', 'description', str(opts.desc))
config.set('package', 'organization', str(opts.org))
config.set('package', 'version_file', version_file)
config.set('package', 'history_file', opts.history_file)
config.set('package', 'author', os.environ['USER'])
config.set('package', 'author_email', 'EMAIL_HERE')
config.set('package', 'url', 'PACKAGE_URL_HERE')
if opts.source:
config.set('package', 'find_packages', str(opts.source))
config.add_section('gitflow')
config.set('gitflow', 'origin_name', str(opts.origin))
config.set('gitflow', 'develop_branch', str(opts.develop))
config.set('gitflow', 'release_branch_prefix', 'release/')
config.set('gitflow', 'feature_branch_prefix', 'feature/')
config.add_section('build')
if os.path.exists(opts.test_requirements):
config.set(
'build',
'extra_requirements',
opts.test_requirements
)
if opts.python:
config.set(
'build',
'python',
opts.python
)
if opts.use_pypirc:
rcfile = PypircFile()
pip_opts = rcfile.pip_options()
LOGGER.info("Adding pip options to cirrus.conf: {}".format(pip_opts))
config.set(
'build',
'pip_options',
pip_opts
)
config.add_section('pypi')
config.set(
'pypi',
'pip_options',
pip_opts
)
config.add_section('test-default')
config.set('test-default', 'where', 'tests/unit')
config.set('test-default', 'mode', str(opts.test_mode))
config.add_section('qc')
config.set('qc', 'threshold', str(10))
config.set('qc', 'include_files', 'src/{}/*'.format(opts.package))
config.set('qc', 'exclude_dirs', 'tests dist venv .tox')
config.set('qc', 'linters', "Pep8 Pyflakes")
config.add_section("qc/Pep8")
config.set("qc/Pep8", "allowed_errors_per_file", str(5))
config.add_section("qc/Pyflakes")
config.set("qc/Pyflakes", "allowed_errors_per_file", str(5))
with open(cirrus_conf, 'w') as handle:
config.write(handle)
return cirrus_conf
def update_package_version(opts):
"""
set and/or update package __version__
attr
"""
version_file = opts.version_file
if version_file is None:
version_file = os.path.join(opts.repo, main_init_file(opts))
if not os.path.exists(version_file):
msg = (
"unable to find version file: {}"
).format(version_file)
LOGGER.info(msg)
if opts.create_version_file:
with open(version_file, 'w') as handle:
handle.write("# created by cirrus package init\n")
handle.write("__version__ = \"{}\"".format(opts.version))
LOGGER.info("creating version file: {}".format(version_file))
else:
msg = (
"Unable to update version file, please verify the path {}"
" is correct. Either provide the --version-file"
" option pointing"
" to an existing file or set the --create-version-file"
" flag to create a new file"
).format(version_file)
LOGGER.error(msg)
sys.exit(1)
update_version(version_file, opts.version)
if version_file.startswith(opts.repo):
version_file = version_file.replace(opts.repo, '')
if version_file.startswith('/'):
version_file = version_file[1:]
return version_file
def create_files(opts):
"""
create files and return a list of the
files that need to be committed
"""
files = []
files.append(write_manifest(opts))
files.append(write_setup_py(opts))
files.append(write_history(opts))
if opts.add_gitignore:
files.append(write_gitignore(opts))
vers_file = update_package_version(opts)
files.append(vers_file)
files.append(write_cirrus_conf(opts, vers_file))
return files
def make_package_dir(directory, pkgname):
# TODO: validate package name
if pkgname.count('.') > 0:
package_dirs = pkgname.split('.')
else:
package_dirs = [pkgname]
results = []
pathname = directory
while package_dirs:
d = package_dirs.pop(0)
pathname = os.path.join(pathname, d)
init_file = os.path.join(pathname, '__init__.py')
os.makedirs(pathname)
with open(init_file, 'w') as handle:
handle.write("#created by cirrus\n")
LOGGER.info("wrote: {}".format(init_file))
results.append(init_file)
return results
def main_init_file(opts):
package = opts.package
if package.count('.') > 0:
package_dirs = package.split('.')
else:
package_dirs = [package]
elems = []
if opts.source:
elems.append(opts.source)
elems.extend(package_dirs)
elems.append('__init__.py')
return os.path.join(*elems)
def bootstrap_repo(opts):
"""
bootstrap an empty repo with initial
file and dir structure.
This adds:
- src/<package>/__init__.py
- test/unit/<package>/example_test.py
- requirements.txt
- test-requirements.txt
- tox.ini
"""
package = opts.package
if opts.source is None:
opts.source = 'src'
files = []
src_dir = opts.source
tests_dir = os.path.join(opts.tests)
unit_dir = os.path.join(tests_dir, 'unit')
init_files = [
os.path.join(tests_dir, '__init__.py'),
os.path.join(unit_dir, '__init__.py'),
]
for d in [src_dir, tests_dir, unit_dir]:
os.makedirs(d)
for i in init_files:
with open(i, 'w') as handle:
handle.write("#created by cirrus\n")
files.append(i)
src_inits = make_package_dir(src_dir, package)
test_inits = make_package_dir(unit_dir, package)
files.extend(src_inits)
files.extend(test_inits)
test_pkg_dir = os.path.dirname(test_inits[-1])
main_init = main_init_file(opts)
with open(main_init, 'w') as handle:
handle.write("#!/usr/bin/env python\n")
handle.write("# created by cirrus\n")
handle.write("__version__=\'{}\'\n".format(opts.version))
if not os.path.exists(opts.requirements):
with open(opts.requirements, 'w') as handle:
handle.write("requests\n")
files.append(opts.requirements)
if not os.path.exists(opts.test_requirements):
with open(opts.test_requirements, 'w') as handle:
handle.write("tox\n")
handle.write("nose\n")
handle.write("coverage\n")
handle.write("mock\n")
handle.write("pep8\n")
handle.write("pytest\n")
files.append(opts.test_requirements)
if not os.path.exists('tox.ini'):
if opts.python is not None:
py_vers = opts.python.replace('python', 'py')
py_vers = py_vers.replace('.', '')
else:
py_vers = "py{}{}".format(
sys.version_info.major,
sys.version_info.minor
)
with open('tox.ini', 'w') as handle:
install_comm = ""
if opts.use_pypirc:
rcfile = PypircFile()
pip_opts = rcfile.pip_options()
LOGGER.info(
"Adding pip options to tox.ini: {}".format(
pip_opts
)
)
install_comm = (
"install_command = pip install "
"{} {{opts}} {{packages}}"
).format(pip_opts)
handle.write(
TOXFILE.format(
requirements=opts.requirements,
test_requirements=opts.test_requirements,
install_command=install_comm,
testdir=opts.tests,
python=py_vers
)
)
files.append('tox.ini')
template = os.path.join(
os.path.dirname(inspect.getsourcefile(cirrus.templates)),
'sample_test.py.mustache'
)
with open(template, 'r') as handle:
templ = handle.read()
sample_test = os.path.join(test_pkg_dir, 'sample_test.py')
rendered = pystache.render(templ, {'package': opts.package})
with open(sample_test, 'w') as handle:
handle.write(rendered)
files.append(sample_test)
commit_files_optional_push(
opts.repo,
"git cirrus package bootstrap",
False,
*files
)
def setup_sdist(opts):
LOGGER.info("Running setup.py sdist...")
local(
'cd {} && python setup.py sdist'.format(
repo_directory()
)
)
dist_dir = os.path.join(repo_directory(), 'dist')
pkg = opts.package
if opts.pypi_package_name:
pkg = opts.pypi_package_name
package = "{}-{}.tar.gz".format(pkg, opts.version)
return os.path.join(dist_dir, package)
def init_package_api(**kwargs):
"""
shim method to allow init_package to be called as
an API call by pushing arguments into an argparse namespace
TODO: Refactor init_package to be argparse namespace agnostic
"""
namespace = Namespace()
namespace.repo = kwargs.get('repo', os.getcwd())
namespace.source = kwargs.get('source')
namespace.package = kwargs.get('package')
namespace.tests = kwargs.get('tests', 'tests')
namespace.version = kwargs.get('version', '0.0.0')
namespace.org = kwargs.get('organization', 'ORGANIZATION HERE')
namespace.desc = kwargs.get('description', 'PACKAGE DESCRIPTION HERE')
namespace.pypi_package_name = kwargs.get('pypi_package_name', None)
namespace.use_pypirc = kwargs.get('use_pypirc', False)
namespace.register_with_pypi = kwargs.get('register_with_pypi', None)
namespace.add_gitignore = kwargs.get('add_gitignore', True)
namespace.gitignore_url = kwargs.get(
'gitignore_url',
'https://raw.githubusercontent.com/github/gitignore/master/Python.gitignore'
)
namespace.templates = kwargs.get('templates', [])
namespace.version_file = kwargs.get('version_file', None)
namespace.history_file = kwargs.get('history_file', 'HISTORY.md')
namespace.requirements = kwargs.get('requirements', 'requirements.txt')
namespace.test_requirements = kwargs.get('testrequirements', 'test-requirements.txt')
namespace.python = kwargs.get('python')
namespace.test_mode = kwargs.get('test_mode', 'tox')
namespace.master = 'master'
namespace.develop = 'develop'
namespace.origin = 'origin'
namespace.no_remote = kwargs.get('no_remote', False)
namespace.create_version_file = kwargs.get('create_version_file', False)
namespace.bootstrap = kwargs.get('bootstrap', False)
validate_package_name(namespace.package)
validate_package_name(namespace.pypi_package_name)
init_package(namespace)
def init_package(opts):
"""
initialise a repo with a basic cirrus setup
"""
if opts.bootstrap:
with working_dir(opts.repo):
bootstrap_repo(opts)
setup_branches(opts)
# write files
files = create_files(opts)
with working_dir(opts.repo):
commit_and_tag(opts, *files)
if opts.register_with_pypi:
# run setup.py sdist and then
# call register_package with dist file
package = setup_sdist(opts)
LOGGER.info(
"Registering package {} with pypi {}".format(
package, opts.register_with_pypi
)
)
register_package(package, opts.register_with_pypi)
msg = (
"\nA basic cirrus.conf file has been added to your package\n"
"please review it and add any additional fields and commit it\n"
"The files have been added to the {} branch"
).format(opts.develop)
LOGGER.info(msg)
def build_project(opts):
"""
create an editor/ide project for the repo
"""
pname = opts.type
plugin = get_plugin(pname)
plugin.run(opts)
def update_setup_py(opts):
LOGGER.info("Updating setup.py...")
repo_location = opts.repo
s_py = os.path.join(repo_location, 'setup.py')
if os.path.exists(s_py):
backup_file(s_py)
else:
LOGGER.error("no setup.py found in {}".format(repo_location))
raise RuntimeError('{} not found'.format(s_py))
# render new template...
write_setup_py(opts)
# commit new file
commit_files_optional_push(
opts.repo,
"git cirrus package update: setup.py",
False,
'setup.py'
)
def update_package(opts):
"""
update cirrus templates/files in the repo
"""
if opts.setup_py:
update_setup_py(opts)
def main():
"""
main cli response handler
"""
opts = build_parser(sys.argv)
if opts.command == 'init':
init_package(opts)
if opts.command == 'container-init':
init_container(opts)
if opts.command == 'project':
build_project(opts)
if opts.command == 'update':
update_package(opts)
if __name__ == '__main__':
main()
| [
"cirrus.package_container.init_container",
"cirrus.twine_helpers.register_package",
"cirrus.logger.get_logger",
"argparse.Namespace",
"sys.exit",
"cirrus.git_tools.commit_files_optional_push",
"os.path.exists",
"inspect.getsourcefile",
"argparse.ArgumentParser",
"pystache.render",
"cirrus._2to3.... | [((1249, 1261), 'cirrus.logger.get_logger', 'get_logger', ([], {}), '()\n', (1259, 1261), False, 'from cirrus.logger import get_logger\n'), ((2660, 2766), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""git cirrus package command: initialises cirrus for an existing git repo"""'}), "(description=\n 'git cirrus package command: initialises cirrus for an existing git repo')\n", (2674, 2766), False, 'from argparse import ArgumentParser, ArgumentTypeError, Namespace\n'), ((10441, 10527), 'cirrus.git_tools.commit_files_optional_push', 'commit_files_optional_push', (['opts.repo', '"""git cirrus package init"""', 'do_push', '*files'], {}), "(opts.repo, 'git cirrus package init', do_push, *\n files)\n", (10467, 10527), False, 'from cirrus.git_tools import RepoInitializer, branch, push, get_tags, tag_release, commit_files_optional_push, get_active_branch\n'), ((10573, 10592), 'cirrus.git_tools.get_tags', 'get_tags', (['opts.repo'], {}), '(opts.repo)\n', (10581, 10592), False, 'from cirrus.git_tools import RepoInitializer, branch, push, get_tags, tag_release, commit_files_optional_push, get_active_branch\n'), ((10898, 10943), 'cirrus.git_tools.branch', 'branch', (['opts.repo', 'opts.develop', 'opts.develop'], {}), '(opts.repo, opts.develop, opts.develop)\n', (10904, 10943), False, 'from cirrus.git_tools import RepoInitializer, branch, push, get_tags, tag_release, commit_files_optional_push, get_active_branch\n'), ((11504, 11542), 'os.path.join', 'os.path.join', (['opts.repo', '"""MANIFEST.in"""'], {}), "(opts.repo, 'MANIFEST.in')\n", (11516, 11542), False, 'import os\n'), ((12131, 12166), 'os.path.join', 'os.path.join', (['opts.repo', '"""setup.py"""'], {}), "(opts.repo, 'setup.py')\n", (12143, 12166), False, 'import os\n'), ((12462, 12488), 'pystache.render', 'pystache.render', (['templ', '{}'], {}), '(templ, {})\n', (12477, 12488), False, 'import pystache\n'), ((12707, 12749), 'os.path.join', 'os.path.join', (['opts.repo', 'opts.history_file'], {}), '(opts.repo, opts.history_file)\n', (12719, 12749), False, 'import os\n'), ((13193, 13224), 'requests.get', 'requests.get', (['url'], {'verify': '(False)'}), '(url, verify=False)\n', (13205, 13224), False, 'import requests\n'), ((13293, 13330), 'os.path.join', 'os.path.join', (['opts.repo', '""".gitignore"""'], {}), "(opts.repo, '.gitignore')\n", (13305, 13330), False, 'import os\n'), ((13345, 13357), 'cirrus._2to3.to_str', 'to_str', (['data'], {}), '(data)\n', (13351, 13357), False, 'from cirrus._2to3 import ConfigParser, to_str\n'), ((13586, 13624), 'os.path.join', 'os.path.join', (['opts.repo', '"""cirrus.conf"""'], {}), "(opts.repo, 'cirrus.conf')\n", (13598, 13624), False, 'import os\n'), ((13828, 13855), 'cirrus._2to3.ConfigParser.ConfigParser', 'ConfigParser.ConfigParser', ([], {}), '()\n', (13853, 13855), False, 'from cirrus._2to3 import ConfigParser, to_str\n'), ((14791, 14829), 'os.path.exists', 'os.path.exists', (['opts.test_requirements'], {}), '(opts.test_requirements)\n', (14805, 14829), False, 'import os\n'), ((17296, 17338), 'cirrus.utils.update_version', 'update_version', (['version_file', 'opts.version'], {}), '(version_file, opts.version)\n', (17310, 17338), False, 'from cirrus.utils import update_version\n'), ((18954, 18974), 'os.path.join', 'os.path.join', (['*elems'], {}), '(*elems)\n', (18966, 18974), False, 'import os\n'), ((19391, 19415), 'os.path.join', 'os.path.join', (['opts.tests'], {}), '(opts.tests)\n', (19403, 19415), False, 'import os\n'), ((19431, 19462), 'os.path.join', 'os.path.join', (['tests_dir', '"""unit"""'], {}), "(tests_dir, 'unit')\n", (19443, 19462), False, 'import os\n'), ((19969, 20000), 'os.path.dirname', 'os.path.dirname', (['test_inits[-1]'], {}), '(test_inits[-1])\n', (19984, 20000), False, 'import os\n'), ((22231, 22275), 'os.path.join', 'os.path.join', (['test_pkg_dir', '"""sample_test.py"""'], {}), "(test_pkg_dir, 'sample_test.py')\n", (22243, 22275), False, 'import os\n'), ((22291, 22340), 'pystache.render', 'pystache.render', (['templ', "{'package': opts.package}"], {}), "(templ, {'package': opts.package})\n", (22306, 22340), False, 'import pystache\n'), ((22450, 22538), 'cirrus.git_tools.commit_files_optional_push', 'commit_files_optional_push', (['opts.repo', '"""git cirrus package bootstrap"""', '(False)', '*files'], {}), "(opts.repo, 'git cirrus package bootstrap', False,\n *files)\n", (22476, 22538), False, 'from cirrus.git_tools import RepoInitializer, branch, push, get_tags, tag_release, commit_files_optional_push, get_active_branch\n'), ((22959, 22990), 'os.path.join', 'os.path.join', (['dist_dir', 'package'], {}), '(dist_dir, package)\n', (22971, 22990), False, 'import os\n'), ((23242, 23253), 'argparse.Namespace', 'Namespace', ([], {}), '()\n', (23251, 23253), False, 'from argparse import ArgumentParser, ArgumentTypeError, Namespace\n'), ((26166, 26205), 'os.path.join', 'os.path.join', (['repo_location', '"""setup.py"""'], {}), "(repo_location, 'setup.py')\n", (26178, 26205), False, 'import os\n'), ((26213, 26233), 'os.path.exists', 'os.path.exists', (['s_py'], {}), '(s_py)\n', (26227, 26233), False, 'import os\n'), ((26478, 26577), 'cirrus.git_tools.commit_files_optional_push', 'commit_files_optional_push', (['opts.repo', '"""git cirrus package update: setup.py"""', '(False)', '"""setup.py"""'], {}), "(opts.repo, 'git cirrus package update: setup.py',\n False, 'setup.py')\n", (26504, 26577), False, 'from cirrus.git_tools import RepoInitializer, branch, push, get_tags, tag_release, commit_files_optional_push, get_active_branch\n'), ((9937, 9959), 'cirrus.utils.working_dir', 'working_dir', (['opts.repo'], {}), '(opts.repo)\n', (9948, 9959), False, 'from cirrus.utils import working_dir\n'), ((9983, 10009), 'cirrus.git_tools.RepoInitializer', 'RepoInitializer', (['opts.repo'], {}), '(opts.repo)\n', (9998, 10009), False, 'from cirrus.git_tools import RepoInitializer, branch, push, get_tags, tag_release, commit_files_optional_push, get_active_branch\n'), ((10167, 10211), 'cirrus.git_tools.branch', 'branch', (['opts.repo', 'opts.develop', 'opts.master'], {}), '(opts.repo, opts.develop, opts.master)\n', (10173, 10211), False, 'from cirrus.git_tools import RepoInitializer, branch, push, get_tags, tag_release, commit_files_optional_push, get_active_branch\n'), ((10765, 10835), 'cirrus.git_tools.tag_release', 'tag_release', (['opts.repo', 'opts.version'], {'master': 'opts.master', 'push': 'do_push'}), '(opts.repo, opts.version, master=opts.master, push=do_push)\n', (10776, 10835), False, 'from cirrus.git_tools import RepoInitializer, branch, push, get_tags, tag_release, commit_files_optional_push, get_active_branch\n'), ((11092, 11116), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (11106, 11116), False, 'import os\n'), ((12824, 12847), 'os.path.exists', 'os.path.exists', (['history'], {}), '(history)\n', (12838, 12847), False, 'import os\n'), ((15109, 15121), 'cirrus.pypirc.PypircFile', 'PypircFile', ([], {}), '()\n', (15119, 15121), False, 'from cirrus.pypirc import PypircFile\n'), ((16424, 16452), 'os.path.exists', 'os.path.exists', (['version_file'], {}), '(version_file)\n', (16438, 16452), False, 'import os\n'), ((18327, 18352), 'os.path.join', 'os.path.join', (['pathname', 'd'], {}), '(pathname, d)\n', (18339, 18352), False, 'import os\n'), ((18373, 18410), 'os.path.join', 'os.path.join', (['pathname', '"""__init__.py"""'], {}), "(pathname, '__init__.py')\n", (18385, 18410), False, 'import os\n'), ((18419, 18440), 'os.makedirs', 'os.makedirs', (['pathname'], {}), '(pathname)\n', (18430, 18440), False, 'import os\n'), ((19490, 19528), 'os.path.join', 'os.path.join', (['tests_dir', '"""__init__.py"""'], {}), "(tests_dir, '__init__.py')\n", (19502, 19528), False, 'import os\n'), ((19538, 19575), 'os.path.join', 'os.path.join', (['unit_dir', '"""__init__.py"""'], {}), "(unit_dir, '__init__.py')\n", (19550, 19575), False, 'import os\n'), ((19636, 19650), 'os.makedirs', 'os.makedirs', (['d'], {}), '(d)\n', (19647, 19650), False, 'import os\n'), ((20252, 20285), 'os.path.exists', 'os.path.exists', (['opts.requirements'], {}), '(opts.requirements)\n', (20266, 20285), False, 'import os\n'), ((20431, 20469), 'os.path.exists', 'os.path.exists', (['opts.test_requirements'], {}), '(opts.test_requirements)\n', (20445, 20469), False, 'import os\n'), ((20801, 20826), 'os.path.exists', 'os.path.exists', (['"""tox.ini"""'], {}), "('tox.ini')\n", (20815, 20826), False, 'import os\n'), ((22776, 22792), 'cirrus.environment.repo_directory', 'repo_directory', ([], {}), '()\n', (22790, 22792), False, 'from cirrus.environment import repo_directory\n'), ((23294, 23305), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (23303, 23305), False, 'import os\n'), ((25210, 25232), 'cirrus.utils.working_dir', 'working_dir', (['opts.repo'], {}), '(opts.repo)\n', (25221, 25232), False, 'from cirrus.utils import working_dir\n'), ((25585, 25635), 'cirrus.twine_helpers.register_package', 'register_package', (['package', 'opts.register_with_pypi'], {}), '(package, opts.register_with_pypi)\n', (25601, 25635), False, 'from cirrus.twine_helpers import register_package\n'), ((26958, 26978), 'cirrus.package_container.init_container', 'init_container', (['opts'], {}), '(opts)\n', (26972, 26978), False, 'from cirrus.package_container import init_container\n'), ((3058, 3069), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3067, 3069), False, 'import os\n'), ((7344, 7355), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (7353, 7355), False, 'import os\n'), ((8966, 8977), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (8975, 8977), False, 'import os\n'), ((9601, 9612), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (9610, 9612), False, 'import os\n'), ((10251, 10279), 'cirrus.git_tools.get_active_branch', 'get_active_branch', (['opts.repo'], {}), '(opts.repo)\n', (10268, 10279), False, 'from cirrus.git_tools import RepoInitializer, branch, push, get_tags, tag_release, commit_files_optional_push, get_active_branch\n'), ((12300, 12339), 'inspect.getsourcefile', 'inspect.getsourcefile', (['cirrus.templates'], {}), '(cirrus.templates)\n', (12321, 12339), False, 'import inspect\n'), ((17279, 17290), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (17287, 17290), False, 'import sys\n'), ((22060, 22099), 'inspect.getsourcefile', 'inspect.getsourcefile', (['cirrus.templates'], {}), '(cirrus.templates)\n', (22081, 22099), False, 'import inspect\n'), ((22715, 22731), 'cirrus.environment.repo_directory', 'repo_directory', ([], {}), '()\n', (22729, 22731), False, 'from cirrus.environment import repo_directory\n'), ((25069, 25091), 'cirrus.utils.working_dir', 'working_dir', (['opts.repo'], {}), '(opts.repo)\n', (25080, 25091), False, 'from cirrus.utils import working_dir\n'), ((21248, 21260), 'cirrus.pypirc.PypircFile', 'PypircFile', ([], {}), '()\n', (21258, 21260), False, 'from cirrus.pypirc import PypircFile\n')] |
from setuptools import setup
setup(
name='optool',
version='1.9.4',
py_modules=['optool'],
install_requires=[
'numpy','matplotlib'
]
)
| [
"setuptools.setup"
] | [((30, 136), 'setuptools.setup', 'setup', ([], {'name': '"""optool"""', 'version': '"""1.9.4"""', 'py_modules': "['optool']", 'install_requires': "['numpy', 'matplotlib']"}), "(name='optool', version='1.9.4', py_modules=['optool'],\n install_requires=['numpy', 'matplotlib'])\n", (35, 136), False, 'from setuptools import setup\n')] |
# Copyright (c) 2016-2018, University of Idaho
# All rights reserved.
#
# <NAME> (<EMAIL>)
#
# The project described was supported by NSF award number IIA-1301792
# from the NSF Idaho EPSCoR Program and by the National Science Foundation.
import os
from os.path import exists as _exists
from os.path import join as _join
from os.path import split as _split
from glob import glob
import shutil
# non-standard
import jsonpickle
import numpy as np
# wepppy submodules
from wepppy.nodb.watershed import Watershed
from wepppy.nodb.base import NoDbBase
from wepppy.rhem.out import RhemOutput, RhemSummary
class RhemPostNoDbLockedException(Exception):
pass
class RhemPost(NoDbBase):
"""
Manager that keeps track of project details
and coordinates access of NoDb instances.
"""
__name__ = 'RhemPost'
def __init__(self, wd, cfg_fn):
super(RhemPost, self).__init__(wd, cfg_fn)
self.lock()
# noinspection PyBroadException
try:
self.hill_summaries = None
self.periods = None
self.watershed_annuals = None
self.dump_and_unlock()
except Exception:
self.unlock('-f')
raise
#
# Required for NoDbBase Subclass
#
# noinspection PyPep8Naming
@staticmethod
def getInstance(wd):
with open(_join(wd, 'rhempost.nodb')) as fp:
db = jsonpickle.decode(fp.read())
assert isinstance(db, RhemPost), db
if _exists(_join(wd, 'READONLY')):
db.wd = os.path.abspath(wd)
return db
if os.path.abspath(wd) != os.path.abspath(db.wd):
db.wd = wd
db.lock()
db.dump_and_unlock()
return db
@property
def _nodb(self):
return _join(self.wd, 'rhempost.nodb')
@property
def _lock(self):
return _join(self.wd, 'rhempost.nodb.lock')
def run_post(self):
from wepppy.nodb import Rhem
wd = self.wd
self.lock()
# noinspection PyBroadException
try:
output_dir = self.output_dir
watershed = Watershed.getInstance(wd)
rhem = Rhem.getInstance(wd)
out_dir = rhem.output_dir
hill_summaries = {}
total_area = 0.0
runoff = 0.0
soil_yield = 0.0
soil_loss = 0.0
precip = 0.0
periods = None
ret_rain = None
ret_runoff = None
ret_yield = None
ret_loss = None
for topaz_id, summary in watershed.sub_iter():
area_ha = summary.area / 10000
total_area += area_ha
summary_fn = _join(out_dir, 'hill_{}.sum'.format(topaz_id))
hill_summaries[topaz_id] = RhemSummary(summary_fn, area_ha)
runoff += hill_summaries[topaz_id].annuals['Avg-Runoff (m^3/yr)']
soil_yield += hill_summaries[topaz_id].annuals['Avg-SY (tonne/yr)']
soil_loss += hill_summaries[topaz_id].annuals['Avg-Soil-Loss (tonne/yr)']
precip += hill_summaries[topaz_id].annuals['Avg. Precipitation (m^3/yr)']
if ret_rain is None:
ret_rain = np.array(hill_summaries[topaz_id].return_freqs['Rain (m^3)'])
else:
ret_rain += np.array(hill_summaries[topaz_id].return_freqs['Rain (m^3)'])
if ret_runoff is None:
ret_runoff = np.array(hill_summaries[topaz_id].return_freqs['Runoff (m^3)'])
else:
ret_runoff += np.array(hill_summaries[topaz_id].return_freqs['Runoff (m^3)'])
if ret_yield is None:
ret_yield = np.array(hill_summaries[topaz_id].return_freqs['Sediment-Yield (tonne)'])
else:
ret_yield += np.array(hill_summaries[topaz_id].return_freqs['Sediment-Yield (tonne)'])
if ret_loss is None:
ret_loss = np.array(hill_summaries[topaz_id].return_freqs['Soil-Loss (tonne)'])
else:
ret_loss += np.array(hill_summaries[topaz_id].return_freqs['Soil-Loss (tonne)'])
if periods is None:
periods = [v for v in hill_summaries[topaz_id].ret_freq_periods]
self.hill_summaries = hill_summaries
self.watershed_annuals = {'Avg-Runoff (m^3/yr)': runoff,
'Avg-Runoff (mm/yr)': runoff / (total_area * 10000) * 1000,
'Avg-SY (tonne/yr)': soil_yield,
'Avg-SY (tonne/ha/yr)': soil_yield/ total_area,
'Avg-Soil-Loss (tonne/yr)': soil_loss,
'Avg-Soil-Loss (tonne/ha/yr)': soil_loss / total_area,
'Avg. Precipitation (m^3/yr)': precip,
'Avg. Precipitation (mm/yr)': precip / (total_area * 10000) * 1000}
self.ret_freq_periods = periods
watershed_ret_freqs = {'Rain (m^3)': ret_rain,
'Rain (mm)': ret_rain / (total_area * 10000) * 1000,
'Runoff (m^3)': ret_runoff,
'Runoff (mm)': ret_runoff / (total_area * 10000) * 1000,
'Sediment-Yield (tonne)': ret_yield,
'Sediment-Yield (tonne/ha)': ret_yield / total_area,
'Soil-Loss (tonne)': ret_loss,
'Soil-Loss (tonne/ha)': ret_loss / total_area}
for k in watershed_ret_freqs:
watershed_ret_freqs[k] = [float(v) for v in watershed_ret_freqs[k]]
self.watershed_ret_freqs = watershed_ret_freqs
self.dump_and_unlock()
except Exception:
self.unlock('-f')
raise
def query_sub_val(self, measure):
_measure = measure.strip().lower()
key = None
if _measure == 'runoff':
key = 'Avg-Runoff (mm/yr)'
elif _measure == 'sed_yield':
key = 'Avg-SY (tonne/ha/yr)'
elif _measure == 'soil_loss':
key = 'Avg-Soil-Loss (tonne/ha/yr)'
assert key is not None
hill_summaries = self.hill_summaries
d = {}
for topaz_id in hill_summaries:
d[str(topaz_id)] = dict(
topaz_id=topaz_id,
value=hill_summaries[topaz_id].annuals[key])
return d
| [
"wepppy.nodb.Rhem.getInstance",
"os.path.join",
"numpy.array",
"wepppy.nodb.watershed.Watershed.getInstance",
"os.path.abspath",
"wepppy.rhem.out.RhemSummary"
] | [((1829, 1860), 'os.path.join', '_join', (['self.wd', '"""rhempost.nodb"""'], {}), "(self.wd, 'rhempost.nodb')\n", (1834, 1860), True, 'from os.path import join as _join\n'), ((1912, 1948), 'os.path.join', '_join', (['self.wd', '"""rhempost.nodb.lock"""'], {}), "(self.wd, 'rhempost.nodb.lock')\n", (1917, 1948), True, 'from os.path import join as _join\n'), ((2172, 2197), 'wepppy.nodb.watershed.Watershed.getInstance', 'Watershed.getInstance', (['wd'], {}), '(wd)\n', (2193, 2197), False, 'from wepppy.nodb.watershed import Watershed\n'), ((2217, 2237), 'wepppy.nodb.Rhem.getInstance', 'Rhem.getInstance', (['wd'], {}), '(wd)\n', (2233, 2237), False, 'from wepppy.nodb import Rhem\n'), ((1355, 1381), 'os.path.join', '_join', (['wd', '"""rhempost.nodb"""'], {}), "(wd, 'rhempost.nodb')\n", (1360, 1381), True, 'from os.path import join as _join\n'), ((1508, 1529), 'os.path.join', '_join', (['wd', '"""READONLY"""'], {}), "(wd, 'READONLY')\n", (1513, 1529), True, 'from os.path import join as _join\n'), ((1556, 1575), 'os.path.abspath', 'os.path.abspath', (['wd'], {}), '(wd)\n', (1571, 1575), False, 'import os\n'), ((1618, 1637), 'os.path.abspath', 'os.path.abspath', (['wd'], {}), '(wd)\n', (1633, 1637), False, 'import os\n'), ((1641, 1663), 'os.path.abspath', 'os.path.abspath', (['db.wd'], {}), '(db.wd)\n', (1656, 1663), False, 'import os\n'), ((2853, 2885), 'wepppy.rhem.out.RhemSummary', 'RhemSummary', (['summary_fn', 'area_ha'], {}), '(summary_fn, area_ha)\n', (2864, 2885), False, 'from wepppy.rhem.out import RhemOutput, RhemSummary\n'), ((3302, 3363), 'numpy.array', 'np.array', (["hill_summaries[topaz_id].return_freqs['Rain (m^3)']"], {}), "(hill_summaries[topaz_id].return_freqs['Rain (m^3)'])\n", (3310, 3363), True, 'import numpy as np\n'), ((3418, 3479), 'numpy.array', 'np.array', (["hill_summaries[topaz_id].return_freqs['Rain (m^3)']"], {}), "(hill_summaries[topaz_id].return_freqs['Rain (m^3)'])\n", (3426, 3479), True, 'import numpy as np\n'), ((3553, 3616), 'numpy.array', 'np.array', (["hill_summaries[topaz_id].return_freqs['Runoff (m^3)']"], {}), "(hill_summaries[topaz_id].return_freqs['Runoff (m^3)'])\n", (3561, 3616), True, 'import numpy as np\n'), ((3673, 3736), 'numpy.array', 'np.array', (["hill_summaries[topaz_id].return_freqs['Runoff (m^3)']"], {}), "(hill_summaries[topaz_id].return_freqs['Runoff (m^3)'])\n", (3681, 3736), True, 'import numpy as np\n'), ((3808, 3881), 'numpy.array', 'np.array', (["hill_summaries[topaz_id].return_freqs['Sediment-Yield (tonne)']"], {}), "(hill_summaries[topaz_id].return_freqs['Sediment-Yield (tonne)'])\n", (3816, 3881), True, 'import numpy as np\n'), ((3937, 4010), 'numpy.array', 'np.array', (["hill_summaries[topaz_id].return_freqs['Sediment-Yield (tonne)']"], {}), "(hill_summaries[topaz_id].return_freqs['Sediment-Yield (tonne)'])\n", (3945, 4010), True, 'import numpy as np\n'), ((4080, 4148), 'numpy.array', 'np.array', (["hill_summaries[topaz_id].return_freqs['Soil-Loss (tonne)']"], {}), "(hill_summaries[topaz_id].return_freqs['Soil-Loss (tonne)'])\n", (4088, 4148), True, 'import numpy as np\n'), ((4203, 4271), 'numpy.array', 'np.array', (["hill_summaries[topaz_id].return_freqs['Soil-Loss (tonne)']"], {}), "(hill_summaries[topaz_id].return_freqs['Soil-Loss (tonne)'])\n", (4211, 4271), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Documentation string"""
__authors__ = ["Person1", "Person2"]
__email__ = "<EMAIL>"
__copyright__ = "<NAME>"
__credits__ = ["Person1", "Person2", "Person3"]
__version__ = "0.1"
__license__ = "MIT"
# This file is subject to the terms and conditions defined in
# file 'LICENSE.txt', which is part of this source code package.
import os
import sys
import json
import argparse
from collections import OrderedDict
from file_funcs import dump_json, load_json
def participation_summary(input_path, output_path):
semester = load_json(input_path)
#if not os.path.exists(output_path):
with open(output_path, "w") as f:
f.write("\n".join([
r"\begin{table}[H]",
r"\centering",
r"\begin{tabular}{|l|c|c|c|}"
])+"\n")
f.write("\n".join([r"\hline",r"Kurs & Respondenter & Inviterte & Prosent\\ \hline", ""]))
for course_code, content in semester.items():
answered = int(content["respondents"]["answered"])
invited = int(content["respondents"]["invited"])
if invited < 100:
continue
participation = "{0:.1f}\%".format(100*answered/invited)
f.write(" "+" & ".join([
course_code,
str(answered),
str(invited),
participation
]))
f.write(r" \\ \hline" + "\n")
f.write("\n".join([
r"\end{tabular}",
r"\end{table}"
]))
if __name__ == '__main__':
if len(sys.argv) <= 1:
print("Usage: participation_summary semester")
sys.exit(0)
semester_folder = "./data/"+sys.argv[1]+"/"
participation_summary(semester_folder+"/outputs/courses.json", semester_folder+"/outputs/participation.tex")
| [
"file_funcs.load_json",
"sys.exit"
] | [((590, 611), 'file_funcs.load_json', 'load_json', (['input_path'], {}), '(input_path)\n', (599, 611), False, 'from file_funcs import dump_json, load_json\n'), ((1671, 1682), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1679, 1682), False, 'import sys\n')] |
from django.shortcuts import render,get_object_or_404, redirect
from django.http import HttpResponseRedirect, HttpResponse, JsonResponse
from monitor.models import Machine, Crash, Testcase, Profile, DupCrash
from track.models import Issue
from django.http import Http404
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth import authenticate, login, logout, views
from django.contrib.auth.models import User
from django.utils.timesince import timesince
from django.template import defaultfilters
import datetime
import glob
import os
import hashlib
import threading
from django.contrib.auth.decorators import login_required
def CheckPostVariable(POST, parameter):
for param in parameter:
if param not in POST:
return False
return True
@login_required
def index(request):
machine_count = Machine.objects.filter(owner=request.user).count()
crash_count = Crash.objects.filter(owner=request.user).count()
issue_count = Issue.objects.filter(owner=request.user).count()
cve_count = Issue.objects.filter(owner=request.user).exclude(cve__exact='').count()
server_count = Machine.objects.filter(owner=request.user).values('pub_ip').distinct().count()
profiles = Profile.objects.all()
myprofile = Profile.objects.get(owner=request.user)
profilenum = profiles.order_by('-id')[0].id
context = {'server_count':server_count, 'cve_count':cve_count,'issue_count':issue_count, 'crash_count': crash_count, 'machine_count': machine_count, 'userinfo':request.user, 'profilenum':profilenum, 'profile':profiles, 'myprofile':myprofile}
return render(request, 'monitor/index.html', context)
@login_required
def fuzzer_list(request):
machine_list = Machine.objects.filter(owner=request.user).order_by('-ping')#.all()#[::-1]#.filter(idx>0).order_by('-idx')
now = datetime.datetime.now() - datetime.timedelta(minutes=5)
myprofile = Profile.objects.get(owner=request.user)
context = {'machine_list': machine_list, 'userinfo':request.user, 'now':now, 'myprofile':myprofile}
return render(request, 'monitor/fuzzer/list.html', context)
@login_required
def fuzzer_details(request, idx):
fuzzer_info = None
try:
fuzzer_info = Machine.objects.get(id=idx, owner=request.user)
except ObjectDoesNotExist:
raise Http404
myprofile = Profile.objects.get(owner=request.user)
context = {'fuzzer': fuzzer_info, 'userinfo':request.user, 'myprofile':myprofile}
return render(request, 'monitor/fuzzer/detail.html', context)
@login_required
def crash_list(request):
crash_info = Crash.objects.filter(owner=request.user)[::-1]
myprofile = Profile.objects.get(owner=request.user)
context = {'crashes': crash_info, 'userinfo':request.user, 'myprofile':myprofile}
return render(request, 'monitor/crash/list.html', context)
@login_required
def crash_details(request, idx):
crash_info = None
try:
crash_info = Crash.objects.get(id=idx, owner=request.user)
except ObjectDoesNotExist:
raise Http404
myprofile = Profile.objects.get(owner=request.user)
context = {'crash': crash_info, 'userinfo':request.user, 'myprofile':myprofile}
return render(request, 'monitor/crash/detail.html', context)
@login_required
def crash_details_dupcrash(request, idx, page=0):
crash_info = None
result = {}
try:
crash_info = Crash.objects.get(id=idx, owner=request.user)
Dcrash = DupCrash.objects.filter(owner=request.user, fuzzer=crash_info.fuzzer, original_crash=crash_info)
result["total"] = len(Dcrash)
for i in range(0, len(Dcrash)):
tmp = {}
tmp["size"] = defaultfilters.filesizeformat(Dcrash[i].crash_file.size)
tmp["hash"] = Dcrash[i].crash_hash
tmp["count"] = Dcrash[i].dup_crash
tmp["reg_date"] = defaultfilters.date(Dcrash[i].reg_date)
result[i+1] = tmp
# crash_path = crash_info.crash_file.path.split("/")[:-1]
# crash_path = "/".join(crash_path)
# crashes = (glob.glob(crash_path+"/*"))
# for i in range(0, len(crashes)):
# tmp = {}
# tmp["size"] = os.path.getsize(crashes[i])
# tmp["name"] = os.path.basename(crashes[i])
# tmp["hash"] = hashlib.md5(open(crashes[i],'rb').read()).hexdigest()
# result[i] = tmp
except ObjectDoesNotExist:
raise Http404
return JsonResponse(result)
@login_required
def crash_details_modify(request, idx):
crash_info = None
parameterList = ['comment']
if not CheckPostVariable(request.POST, parameterList):
raise Http404
try:
comment = request.POST['comment']
crash_info = Crash.objects.get(id=idx, owner=request.user)
except ObjectDoesNotExist:
raise Http404
crash_info.comment = comment
crash_info.save()
myprofile = Profile.objects.get(owner=request.user)
context = {'crash': crash_info, 'userinfo':request.user, 'myprofile':myprofile}
return render(request, 'monitor/crash/detail.html', context)
@login_required
def settings_page(request):
machine_count = Machine.objects.filter(owner=request.user).count()
crash_count = Crash.objects.filter(owner=request.user).count()
issue_count = Issue.objects.filter(owner=request.user).count()
testcase_count = Testcase.objects.filter(owner=request.user).count()
cve_count = Issue.objects.filter(owner=request.user).exclude(cve__exact='').count()
server_count = Machine.objects.filter(owner=request.user).values('pub_ip').distinct().count()
profile = Profile.objects.all()
myprofile = Profile.objects.get(owner=request.user)
notification_setting = {'USE_EMAIL_ALERT':settings.USE_EMAIL_ALERT,'USE_TELEGRAM_ALERT':settings.USE_TELEGRAM_ALERT}
context = {'testcase_count':testcase_count, 'server_count':server_count, 'cve_count':cve_count,'issue_count':issue_count, 'crash_count': crash_count, 'machine_count': machine_count,'userinfo':request.user, 'profiles':profile, 'myprofile':myprofile, 'notification_setting':notification_setting, 'myprofile':myprofile}
return render(request, 'settings.html', context)
| [
"django.shortcuts.render",
"monitor.models.Machine.objects.get",
"django.template.defaultfilters.date",
"django.http.JsonResponse",
"monitor.models.DupCrash.objects.filter",
"django.template.defaultfilters.filesizeformat",
"monitor.models.Profile.objects.get",
"monitor.models.Machine.objects.filter",
... | [((1231, 1252), 'monitor.models.Profile.objects.all', 'Profile.objects.all', ([], {}), '()\n', (1250, 1252), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((1266, 1305), 'monitor.models.Profile.objects.get', 'Profile.objects.get', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (1285, 1305), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((1605, 1651), 'django.shortcuts.render', 'render', (['request', '"""monitor/index.html"""', 'context'], {}), "(request, 'monitor/index.html', context)\n", (1611, 1651), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((1895, 1934), 'monitor.models.Profile.objects.get', 'Profile.objects.get', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (1914, 1934), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((2045, 2097), 'django.shortcuts.render', 'render', (['request', '"""monitor/fuzzer/list.html"""', 'context'], {}), "(request, 'monitor/fuzzer/list.html', context)\n", (2051, 2097), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2300, 2339), 'monitor.models.Profile.objects.get', 'Profile.objects.get', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (2319, 2339), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((2431, 2485), 'django.shortcuts.render', 'render', (['request', '"""monitor/fuzzer/detail.html"""', 'context'], {}), "(request, 'monitor/fuzzer/detail.html', context)\n", (2437, 2485), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2603, 2642), 'monitor.models.Profile.objects.get', 'Profile.objects.get', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (2622, 2642), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((2734, 2785), 'django.shortcuts.render', 'render', (['request', '"""monitor/crash/list.html"""', 'context'], {}), "(request, 'monitor/crash/list.html', context)\n", (2740, 2785), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2983, 3022), 'monitor.models.Profile.objects.get', 'Profile.objects.get', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (3002, 3022), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((3112, 3165), 'django.shortcuts.render', 'render', (['request', '"""monitor/crash/detail.html"""', 'context'], {}), "(request, 'monitor/crash/detail.html', context)\n", (3118, 3165), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((4191, 4211), 'django.http.JsonResponse', 'JsonResponse', (['result'], {}), '(result)\n', (4203, 4211), False, 'from django.http import HttpResponseRedirect, HttpResponse, JsonResponse\n'), ((4605, 4644), 'monitor.models.Profile.objects.get', 'Profile.objects.get', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (4624, 4644), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((4735, 4788), 'django.shortcuts.render', 'render', (['request', '"""monitor/crash/detail.html"""', 'context'], {}), "(request, 'monitor/crash/detail.html', context)\n", (4741, 4788), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((5293, 5314), 'monitor.models.Profile.objects.all', 'Profile.objects.all', ([], {}), '()\n', (5312, 5314), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((5328, 5367), 'monitor.models.Profile.objects.get', 'Profile.objects.get', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (5347, 5367), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((5812, 5853), 'django.shortcuts.render', 'render', (['request', '"""settings.html"""', 'context'], {}), "(request, 'settings.html', context)\n", (5818, 5853), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((1826, 1849), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1847, 1849), False, 'import datetime\n'), ((1852, 1881), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(5)'}), '(minutes=5)\n', (1870, 1881), False, 'import datetime\n'), ((2192, 2239), 'monitor.models.Machine.objects.get', 'Machine.objects.get', ([], {'id': 'idx', 'owner': 'request.user'}), '(id=idx, owner=request.user)\n', (2211, 2239), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((2543, 2583), 'monitor.models.Crash.objects.filter', 'Crash.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (2563, 2583), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((2877, 2922), 'monitor.models.Crash.objects.get', 'Crash.objects.get', ([], {'id': 'idx', 'owner': 'request.user'}), '(id=idx, owner=request.user)\n', (2894, 2922), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((3288, 3333), 'monitor.models.Crash.objects.get', 'Crash.objects.get', ([], {'id': 'idx', 'owner': 'request.user'}), '(id=idx, owner=request.user)\n', (3305, 3333), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((3345, 3445), 'monitor.models.DupCrash.objects.filter', 'DupCrash.objects.filter', ([], {'owner': 'request.user', 'fuzzer': 'crash_info.fuzzer', 'original_crash': 'crash_info'}), '(owner=request.user, fuzzer=crash_info.fuzzer,\n original_crash=crash_info)\n', (3368, 3445), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((4449, 4494), 'monitor.models.Crash.objects.get', 'Crash.objects.get', ([], {'id': 'idx', 'owner': 'request.user'}), '(id=idx, owner=request.user)\n', (4466, 4494), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((860, 902), 'monitor.models.Machine.objects.filter', 'Machine.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (882, 902), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((926, 966), 'monitor.models.Crash.objects.filter', 'Crash.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (946, 966), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((990, 1030), 'track.models.Issue.objects.filter', 'Issue.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (1010, 1030), False, 'from track.models import Issue\n'), ((1712, 1754), 'monitor.models.Machine.objects.filter', 'Machine.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (1734, 1754), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((3538, 3594), 'django.template.defaultfilters.filesizeformat', 'defaultfilters.filesizeformat', (['Dcrash[i].crash_file.size'], {}), '(Dcrash[i].crash_file.size)\n', (3567, 3594), False, 'from django.template import defaultfilters\n'), ((3692, 3731), 'django.template.defaultfilters.date', 'defaultfilters.date', (['Dcrash[i].reg_date'], {}), '(Dcrash[i].reg_date)\n', (3711, 3731), False, 'from django.template import defaultfilters\n'), ((4852, 4894), 'monitor.models.Machine.objects.filter', 'Machine.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (4874, 4894), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((4918, 4958), 'monitor.models.Crash.objects.filter', 'Crash.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (4938, 4958), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((4982, 5022), 'track.models.Issue.objects.filter', 'Issue.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (5002, 5022), False, 'from track.models import Issue\n'), ((5049, 5092), 'monitor.models.Testcase.objects.filter', 'Testcase.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (5072, 5092), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((1052, 1092), 'track.models.Issue.objects.filter', 'Issue.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (1072, 1092), False, 'from track.models import Issue\n'), ((5114, 5154), 'track.models.Issue.objects.filter', 'Issue.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (5134, 5154), False, 'from track.models import Issue\n'), ((1140, 1182), 'monitor.models.Machine.objects.filter', 'Machine.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (1162, 1182), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n'), ((5202, 5244), 'monitor.models.Machine.objects.filter', 'Machine.objects.filter', ([], {'owner': 'request.user'}), '(owner=request.user)\n', (5224, 5244), False, 'from monitor.models import Machine, Crash, Testcase, Profile, DupCrash\n')] |
from functools import partial
from keyword import iskeyword
from typing import Tuple, Final, Callable, Any, List, Generator, NoReturn, Dict
from chained.type_utils.meta import ChainedMeta
def _call_monkey_patcher(self, *args, **kwargs):
"""LambdaExpr.__call__ monkey patcher"""
return self.eval()(*args, **kwargs)
def _token_expander(value: Any) -> Generator[Any, None, None]:
"""Expands tokens from an instance of ``LambdaExpr``. Otherwise - yields single 'value'.
>>> x = LambdaExpr('x', '+', 'y')
>>> tuple(_token_expander(x))
('x', '+', 'y')
>>> tuple(_token_expander('value'))
('value',)
Args:
value: token or `LambdaExpr` to expand
Returns:
resulting generator
"""
if isinstance(value, LambdaExpr):
yield from value._tokens
else:
yield value
class LambdaExpr(metaclass=ChainedMeta):
"""Implements functionality for shortened creation of lambda functions."""
__slots__ = (
'_tokens',
'_lambda',
'_string_repr'
)
def __init__(self, *tokens: Any) -> None:
self._tokens: Final[Tuple[Any, ...]] = tokens
self._lambda: Callable = partial(_call_monkey_patcher, self)
def __call__(self, *args, **kwargs):
# When the object of type 'LambdaExpr' is called for the first time,
# the class attribute '_lambda' is replaced with the one that evaluated by the 'eval' method.
return self._lambda(*args, **kwargs)
def __getattr__(self, name: str) -> 'LambdaExpr':
"""
Emulates something like ``lambda x: x.attr``
using ``x.attr``, where ``x`` was defined as ``x = LambdaVar('x')``.
>>> x = LambdaVar('x')
>>> tuple(map(x.real, (3, 4, 5 + 2j)))
(3, 4, 5.0)
Args:
name: name of an attribute
Returns:
Corresponding lambda expression
"""
return LambdaExpr('(', *self._tokens, f').{name}')
def __repr__(self) -> str:
"""
>>> x = LambdaVar('x')
>>> y = LambdaVar('y')
>>> (x - y).__repr__()[:35]
'<LambdaExpr(lambda x,y:(x)-(y)) at '
Returns:
__repr__ of the `LambdaExpr`
"""
try:
string_repr = self.__getattribute__('_string_repr')
except AttributeError:
self.eval()
string_repr = self._string_repr
return f'<{self.__class__.__name__}({string_repr}) at {hex(id(self))}>'
def __str__(self) -> str:
"""
>>> x = LambdaVar('x')
>>> y = LambdaVar('y')
>>> str(x - y)
'(x)-(y)'
Returns:
string representation of the `LambdaExpr`
"""
def tok_filter():
for tok in map(str, self._tokens):
if tok[0] != '*' or len(tok) < 3: # Normal variable, or "*", or "**"
yield tok
elif tok[1] != '*':
yield tok[1:] # *args
else:
yield tok[2:] # **kwargs
return ''.join(tok_filter())
def _(self, *args, **kwargs) -> 'LambdaExpr':
"""
Emulates ``__call__`` inside ``LambdaExpr``.
>>> x = LambdaExpr('x')
>>> str(x._('4', 'a', k='23', www='32'))
'(x)((4),(a),k=(23),www=(32),)'
>>> x = LambdaExpr('x')
>>> str(x._('4', "'a'", k='23', www='32'))
"(x)((4),('a'),k=(23),www=(32),)"
>>> str(x._(k='23', www='32'))
'(x)(k=(23),www=(32),)'
>>> str(x._('4', 'a'))
'(x)((4),(a),)'
>>> str(x._('4'))
'(x)((4),)'
>>> str(x._(kwarg='kw'))
'(x)(kwarg=(kw),)'
>>> str(x._())
'(x)()'
Args:
*args: positional arguments to pass
**kwargs: keyword arguments to pass
Returns:
lambda expression
"""
def args_tokenizer() -> Generator[Any, None, None]:
for arg in args:
yield '('
yield from _token_expander(arg)
yield '),'
def kwargs_tokenizer() -> Generator[Any, None, None]:
for k, v in kwargs.items():
yield f'{k}=('
yield from _token_expander(v)
yield '),'
return LambdaExpr(
'(', *self._tokens, ')(',
*args_tokenizer(),
*kwargs_tokenizer(),
')'
)
def _collapse(self, inter_token: str, right: 'LambdaExpr') -> 'LambdaExpr':
"""Collapses 'self' with 'right' so that they are both evaluated before the effect of 'inter_token'
>>> x = LambdaExpr('x')
>>> y = LambdaExpr('y')
>>> z = LambdaExpr('z')
>>> str(x._collapse('*', y + z))
'(x)*((y)+(z))'
Args:
inter_token: middle token
right: instance of `LambdaExpr` to the right
Returns:
resulting `LambdaExpr`
"""
if isinstance(right, LambdaExpr):
return LambdaExpr(
'(', *self._tokens, ')',
inter_token,
'(', *right._tokens, ')'
)
return LambdaExpr(
'(', *self._tokens, ')',
inter_token,
'(', right, ')'
)
def _get_args(self) -> List:
"""Returns an argument list of a future lambda function built on the ``LambdaExpr``.
Returns:
argument list
"""
arg_set = set(self._tokens) & _registered_vars.keys()
starred_args = []
if (args := '*args') in arg_set:
arg_set.remove(args)
starred_args.append(args)
if (kwargs := '**kwargs') in arg_set:
arg_set.remove(kwargs)
starred_args.append(kwargs)
arg_list = sorted(arg_set)
arg_list += starred_args
return arg_list
def eval(self) -> Callable:
"""Evaluates tokens into a lambda function.
>>> x = LambdaVar('x')
>>> y = LambdaVar('y')
>>> func = (x * y - 3 + 1).eval()
>>> func(3, 4)
10
>>> func(2, 2)
2
"""
string_repr = f'lambda {",".join(self._get_args())}:{self}'
self._string_repr: str = string_repr
evaluated_lambda = eval(string_repr)
self._lambda = evaluated_lambda
return evaluated_lambda
# >>> Unary operators
def __pos__(self) -> 'LambdaExpr':
return LambdaExpr('+(', *self._tokens, ')')
def __neg__(self) -> 'LambdaExpr':
return LambdaExpr('-(', *self._tokens, ')')
def __invert__(self) -> 'LambdaExpr':
return LambdaExpr('~(', *self._tokens, ')')
def __abs__(self) -> 'LambdaExpr':
return LambdaExpr('abs(', *self._tokens, ')')
def __round__(self, n=None) -> 'LambdaExpr':
"""
>>> x = LambdaVar('x')
>>> tuple(map(round(x), (3.4, 44.334)))
(3, 44)
>>> tuple(map(round(x, 1), (3.4, 44.334)))
(3.4, 44.3)
Args:
n: precision
Returns:
rounded number
"""
n = n._tokens if isinstance(n, LambdaExpr) else (n,)
return LambdaExpr('round(', *self._tokens, ',', *n, ')')
# >>> Comparison methods
def __eq__(self, other) -> 'LambdaExpr': # type: ignore
"""
>>> str(LambdaExpr('x') == LambdaExpr('y'))
'(x)==(y)'
"""
return self._collapse('==', other)
def __ne__(self, other) -> 'LambdaExpr': # type: ignore
return self._collapse('!=', other)
def __lt__(self, other) -> 'LambdaExpr':
return self._collapse('<', other)
def __gt__(self, other) -> 'LambdaExpr':
return self._collapse('>', other)
def __le__(self, other) -> 'LambdaExpr':
return self._collapse('<=', other)
def __ge__(self, other) -> 'LambdaExpr':
return self._collapse('>=', other)
# >>> Normal arithmetic operators
def __add__(self, other) -> 'LambdaExpr':
return self._collapse('+', other)
def __sub__(self, other) -> 'LambdaExpr':
return self._collapse('-', other)
def __mul__(self, other) -> 'LambdaExpr':
return self._collapse('*', other)
def __floordiv__(self, other) -> 'LambdaExpr':
return self._collapse('//', other)
def __truediv__(self, other) -> 'LambdaExpr':
return self._collapse('/', other)
def __mod__(self, other) -> 'LambdaExpr':
return self._collapse('%', other)
def __divmod__(self, other) -> 'LambdaExpr':
return LambdaExpr('divmod(', *self._tokens, ')')
def __pow__(self, other) -> 'LambdaExpr':
return self._collapse('**', other)
def __matmul__(self, other) -> 'LambdaExpr':
return self._collapse('@', other)
def __lshift__(self, other) -> 'LambdaExpr':
return self._collapse('<<', other)
def __rshift__(self, other) -> 'LambdaExpr':
return self._collapse('>>', other)
def __and__(self, other) -> 'LambdaExpr':
return self._collapse('&', other)
def __or__(self, other) -> 'LambdaExpr':
return self._collapse('|', other)
def __xor__(self, other) -> 'LambdaExpr':
return self._collapse('^', other)
# >>> Type conversion magic methods
def __int__(self) -> 'LambdaExpr':
return LambdaExpr('int(', *self._tokens, ')')
def __float__(self) -> 'LambdaExpr':
return LambdaExpr('float(', *self._tokens, ')')
def __complex__(self) -> 'LambdaExpr':
return LambdaExpr('complex(', *self._tokens, ')')
def __oct__(self) -> 'LambdaExpr':
return LambdaExpr('oct(', *self._tokens, ')')
def __hex__(self) -> 'LambdaExpr':
return LambdaExpr('hex(', *self._tokens, ')')
# >>> Miscellaneous
def __hash__(self) -> 'LambdaExpr': # type: ignore
return LambdaExpr('hash(', *self._tokens, ')')
def __nonzero__(self) -> 'LambdaExpr':
return LambdaExpr('bool(', *self._tokens, ')')
# >>> Container methods
def __len__(self) -> 'LambdaExpr':
return LambdaExpr('len(', *self._tokens, ')')
def __getitem__(self, key) -> 'LambdaExpr':
return LambdaExpr('(', *self._tokens, ')[', key, ']')
def __setitem__(self, key, value) -> 'LambdaExpr':
return LambdaExpr('(', *self._tokens, ')[', key, ']=(', value, ')')
def __delitem__(self, key) -> 'LambdaExpr':
return LambdaExpr('del (', *self._tokens, ')[', key, ']')
def __iter__(self) -> 'LambdaExpr':
return LambdaExpr('iter(', *self._tokens, ')')
def __reversed__(self) -> 'LambdaExpr':
return LambdaExpr('reversed(', *self._tokens, ')')
def __contains__(self, item) -> 'LambdaExpr':
return LambdaExpr('(', item, ') in (', *self._tokens, ')')
# >>> Keyword substitutes
def _if(self, cond, /) -> 'LambdaExpr':
cond = cond._tokens if isinstance(cond, LambdaExpr) else (cond,)
return LambdaExpr('(', *self._tokens, ') if (', *cond, ')')
def _else(self, alt, /) -> 'LambdaExpr':
alt = alt._tokens if isinstance(alt, LambdaExpr) else (alt,)
return LambdaExpr(*self._tokens, ' else (', *alt, ')')
def _for(self, item, /):
item = item._tokens if isinstance(item, LambdaExpr) else (item,)
return LambdaExpr('(', *self._tokens, ') for (', *item, ')')
def _in(self, item, /):
item = item._tokens if isinstance(item, LambdaExpr) else (item,)
return LambdaExpr(*self._tokens, ' in (', *item, ')')
class _LambdaVarMeta(ChainedMeta):
__slots__ = ()
def __call__(cls, name: str): # type: ignore
instance = _registered_vars.get(name, None)
if instance is not None:
return instance
if not name.isidentifier() or iskeyword(name):
raise NameError(f'LambdaVar with name `{name}` is not a valid identifier')
return super().__call__(name)
class LambdaVar(LambdaExpr, metaclass=_LambdaVarMeta):
"""
>>> a = LambdaVar('a')
>>> b = LambdaVar('b')
>>> tuple(map(a - b, (10, 20, 30), (10, 20, 20)))
(0, 0, 10)
"""
__slots__ = ()
def __new__(cls, name: str) -> 'LambdaVar':
return super().__new__(cls)
def __init__(self, name: str) -> None:
super().__init__(name)
self._string_repr = name
_registered_vars[name] = self
class _StarredLambdaVarMeta(_LambdaVarMeta):
__slots__ = ()
def __call__(cls):
return cls.__new__(cls)
class _StarredLambdaVar(LambdaVar, metaclass=_StarredLambdaVarMeta):
"""Special abstract ``LambdaVar`` handler for ``*args`` and ``**kwargs``."""
__slots__ = ()
def __new__(cls, name: str):
instance = _registered_vars.get(name, None)
if instance is not None:
return instance
instance = LambdaExpr.__new__(cls)
instance._string_repr = name
_registered_vars[name] = instance
return instance
def __call__(self, *args, **kwargs) -> NoReturn:
raise TypeError(
f'Cannot build a lambda function based only on the starred `LambdaVar` instance {repr(self)}'
)
def __iter__(self) -> Generator[str, None, None]: # type: ignore
pass
class LambdaArgs(_StarredLambdaVar):
__slots__ = ()
def __new__(cls) -> 'LambdaArgs':
return super().__new__(LambdaArgs, '*args')
def __iter__(self) -> Generator[str, None, None]: # type: ignore
yield 'args'
class LambdaKwargs(_StarredLambdaVar):
__slots__ = ()
def __new__(cls) -> 'LambdaKwargs':
return super().__new__(LambdaKwargs, '**kwargs')
def __iter__(self) -> Generator[str, None, None]: # type: ignore
yield 'kwargs'
_registered_vars: Final[Dict[str, LambdaVar]] = {}
x = LambdaVar('x')
y = LambdaVar('y')
z = LambdaVar('z')
| [
"keyword.iskeyword",
"functools.partial"
] | [((1182, 1217), 'functools.partial', 'partial', (['_call_monkey_patcher', 'self'], {}), '(_call_monkey_patcher, self)\n', (1189, 1217), False, 'from functools import partial\n'), ((11821, 11836), 'keyword.iskeyword', 'iskeyword', (['name'], {}), '(name)\n', (11830, 11836), False, 'from keyword import iskeyword\n')] |
import pandas as pd
from nilearn.signal import clean
from nilearn.interfaces.fmriprep import load_confounds_strategy, load_confounds
from fmriprep_denoise.data.atlas import create_atlas_masker, get_atlas_dimensions
def generate_timeseries_per_dimension(atlas_name, output, benchmark_strategies,
data_aroma, data):
dimensions = get_atlas_dimensions(atlas_name)
for dimension in dimensions:
print(f"-- {atlas_name}: dimension {dimension} --")
print("raw time series")
atlas_info = {"atlas_name":atlas_name,
"dimension":dimension}
subject_timeseries = _generate_raw_timeseries(output, data, atlas_info)
for strategy_name, parameters in benchmark_strategies.items():
print(f"Denoising: {strategy_name}")
print(parameters)
if "aroma" in strategy_name:
_clean_timeserise_aroma(atlas_name, dimension, strategy_name, parameters, output, data_aroma)
else:
_clean_timeserise_normal(subject_timeseries, atlas_name, dimension, strategy_name, parameters, output, data)
def get_confounds(strategy_name, parameters, img):
if strategy_name == 'baseline':
reduced_confounds, sample_mask = load_confounds(img, **parameters)
else:
reduced_confounds, sample_mask = load_confounds_strategy(img, **parameters)
return reduced_confounds, sample_mask
def _clean_timeserise_normal(subject_timeseries, atlas_name, dimension, strategy_name, parameters, output, data):
atlas_spec = f"atlas-{atlas_name}_nroi-{dimension}"
_, img, ts_path = _get_output_info(strategy_name,
output,
data,
atlas_spec)
reduced_confounds, sample_mask = get_confounds(strategy_name,
parameters,
img)
if _check_exclusion(reduced_confounds, sample_mask):
clean_timeseries = []
else:
clean_timeseries = clean(subject_timeseries,
detrend=True, standardize=True,
sample_mask=sample_mask,
confounds=reduced_confounds)
clean_timeseries = pd.DataFrame(clean_timeseries)
clean_timeseries.to_csv(ts_path, sep='\t', index=False)
def _clean_timeserise_aroma(atlas_name, dimension, strategy_name, parameters, output, data_aroma):
atlas_spec = f"atlas-{atlas_name}_nroi-{dimension}"
subject_mask, img, ts_path = _get_output_info(strategy_name,
output,
data_aroma,
atlas_spec)
reduced_confounds, sample_mask = get_confounds(strategy_name,
parameters,
img)
aroma_masker, _ = create_atlas_masker(atlas_name, dimension,
subject_mask,
nilearn_cache="")
clean_timeseries = aroma_masker.fit_transform(
img, confounds=reduced_confounds, sample_mask=sample_mask)
clean_timeseries = pd.DataFrame(clean_timeseries)
clean_timeseries.to_csv(ts_path, sep='\t', index=False)
def _generate_raw_timeseries(output, data, atlas_info):
subject_spec, subject_output, subject_mask = _get_subject_info(output, data)
rawts_path = subject_output / f"{subject_spec}_atlas-{atlas_info['atlas_name']}_nroi-{atlas_info['dimension']}_desc-raw_timeseries.tsv"
raw_masker, atlas_labels = create_atlas_masker(atlas_info['atlas_name'],
atlas_info['dimension'],
subject_mask,
detrend=False,
nilearn_cache="")
timeseries_labels = pd.DataFrame(columns=atlas_labels)
if not rawts_path.is_file():
subject_timeseries = raw_masker.fit_transform(data.func[0])
df = pd.DataFrame(subject_timeseries, columns=raw_masker.labels_)
# make sure missing label were put pack
df = pd.concat([timeseries_labels, df])
df.to_csv(rawts_path, sep='\t', index=False)
else:
df = pd.read_csv(rawts_path, header=0, sep='\t')
subject_timeseries = df.values
del raw_masker
return subject_timeseries
def _get_output_info(strategy_name, output, data, atlas_spec):
subject_spec, subject_output, subject_mask = _get_subject_info(output, data)
img = data.func[0]
ts_path = subject_output / f"{subject_spec}_{atlas_spec}_desc-{strategy_name}_timeseries.tsv"
return subject_mask,img,ts_path
def _check_exclusion(reduced_confounds, sample_mask):
if sample_mask is not None:
kept_vol = len(sample_mask) / reduced_confounds.shape[0]
remove = 1 - kept_vol
else:
remove = 0
remove = remove > 0.2
return remove
def _get_subject_info(output, data):
img = data.func[0]
subject_spec = data.func[0].split('/')[-1].split('_desc-')[0]
subject_root = img.split(subject_spec)[0]
subject_id = subject_spec.split('_')[0]
subject_output = output / subject_id
subject_output.mkdir(exist_ok=True)
subject_mask = f"{subject_root}/{subject_spec}_desc-brain_mask.nii.gz"
return subject_spec, subject_output, subject_mask
| [
"nilearn.signal.clean",
"fmriprep_denoise.data.atlas.create_atlas_masker",
"nilearn.interfaces.fmriprep.load_confounds",
"pandas.read_csv",
"fmriprep_denoise.data.atlas.get_atlas_dimensions",
"pandas.DataFrame",
"nilearn.interfaces.fmriprep.load_confounds_strategy",
"pandas.concat"
] | [((373, 405), 'fmriprep_denoise.data.atlas.get_atlas_dimensions', 'get_atlas_dimensions', (['atlas_name'], {}), '(atlas_name)\n', (393, 405), False, 'from fmriprep_denoise.data.atlas import create_atlas_masker, get_atlas_dimensions\n'), ((2361, 2391), 'pandas.DataFrame', 'pd.DataFrame', (['clean_timeseries'], {}), '(clean_timeseries)\n', (2373, 2391), True, 'import pandas as pd\n'), ((3063, 3137), 'fmriprep_denoise.data.atlas.create_atlas_masker', 'create_atlas_masker', (['atlas_name', 'dimension', 'subject_mask'], {'nilearn_cache': '""""""'}), "(atlas_name, dimension, subject_mask, nilearn_cache='')\n", (3082, 3137), False, 'from fmriprep_denoise.data.atlas import create_atlas_masker, get_atlas_dimensions\n'), ((3368, 3398), 'pandas.DataFrame', 'pd.DataFrame', (['clean_timeseries'], {}), '(clean_timeseries)\n', (3380, 3398), True, 'import pandas as pd\n'), ((3769, 3890), 'fmriprep_denoise.data.atlas.create_atlas_masker', 'create_atlas_masker', (["atlas_info['atlas_name']", "atlas_info['dimension']", 'subject_mask'], {'detrend': '(False)', 'nilearn_cache': '""""""'}), "(atlas_info['atlas_name'], atlas_info['dimension'],\n subject_mask, detrend=False, nilearn_cache='')\n", (3788, 3890), False, 'from fmriprep_denoise.data.atlas import create_atlas_masker, get_atlas_dimensions\n'), ((4115, 4149), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'atlas_labels'}), '(columns=atlas_labels)\n', (4127, 4149), True, 'import pandas as pd\n'), ((1279, 1312), 'nilearn.interfaces.fmriprep.load_confounds', 'load_confounds', (['img'], {}), '(img, **parameters)\n', (1293, 1312), False, 'from nilearn.interfaces.fmriprep import load_confounds_strategy, load_confounds\n'), ((1364, 1406), 'nilearn.interfaces.fmriprep.load_confounds_strategy', 'load_confounds_strategy', (['img'], {}), '(img, **parameters)\n', (1387, 1406), False, 'from nilearn.interfaces.fmriprep import load_confounds_strategy, load_confounds\n'), ((2127, 2243), 'nilearn.signal.clean', 'clean', (['subject_timeseries'], {'detrend': '(True)', 'standardize': '(True)', 'sample_mask': 'sample_mask', 'confounds': 'reduced_confounds'}), '(subject_timeseries, detrend=True, standardize=True, sample_mask=\n sample_mask, confounds=reduced_confounds)\n', (2132, 2243), False, 'from nilearn.signal import clean\n'), ((4264, 4324), 'pandas.DataFrame', 'pd.DataFrame', (['subject_timeseries'], {'columns': 'raw_masker.labels_'}), '(subject_timeseries, columns=raw_masker.labels_)\n', (4276, 4324), True, 'import pandas as pd\n'), ((4386, 4420), 'pandas.concat', 'pd.concat', (['[timeseries_labels, df]'], {}), '([timeseries_labels, df])\n', (4395, 4420), True, 'import pandas as pd\n'), ((4497, 4540), 'pandas.read_csv', 'pd.read_csv', (['rawts_path'], {'header': '(0)', 'sep': '"""\t"""'}), "(rawts_path, header=0, sep='\\t')\n", (4508, 4540), True, 'import pandas as pd\n')] |
import json
import json
import hashlib
from pydantic import BaseModel, validator
from typing import List, Optional
from speckle.base.resource import ResourceBaseSchema
from speckle.resources.objects import SpeckleObject
from speckle.schemas import Interval
NAME = 'line'
class Schema(SpeckleObject):
type: Optional[str] = "Line"
name: Optional[str] = "SpeckleLine"
Value: List[float] = []
domain: Optional[Interval] = Interval()
class Config:
case_sensitive = False
| [
"speckle.schemas.Interval"
] | [((436, 446), 'speckle.schemas.Interval', 'Interval', ([], {}), '()\n', (444, 446), False, 'from speckle.schemas import Interval\n')] |
import os
import xmlrpclib
from sfa.util.faults import *
from sfa.util.plxrn import PlXrn
from sfa.util.sfaticket import SfaTicket
from sfa.util.version import version_core
def GetVersion(api):
return version_core({'interface':'component',
'testbed':'myplc'})
def init_server():
from sfa.server import sfa_component_setup
# get current trusted gids
try:
sfa_component_setup.get_trusted_certs()
except:
# our keypair may be old, try refreshing
sfa_component_setup.get_node_key()
sfa_component_setup.get_credential(force=True)
sfa_component_setup.get_trusted_certs()
def SliverStatus(api, slice_xrn, creds):
result = {}
result['geni_urn'] = slice_xrn
result['geni_status'] = 'unknown'
result['geni_resources'] = {}
return result
def start_slice(api, xrn, creds):
slicename = PlXrn(xrn, type='slice').pl_slicename()
api.nodemanger.Start(slicename)
def stop_slice(api, xrn, creds):
slicename = PlXrn(xrn, type='slice').pl_slicename()
api.nodemanager.Stop(slicename)
def DeleteSliver(api, xrn, creds, call_id):
slicename = PlXrn(xrn, type='slice').pl_slicename()
api.nodemanager.Destroy(slicename)
def reset_slice(api, xrn):
slicename = PlXrn(xrn, type='slice').pl_slicename()
if not api.sliver_exists(slicename):
raise SliverDoesNotExist(slicename)
api.nodemanager.ReCreate(slicename)
# xxx outdated - this should accept a credential & call_id
def ListSlices(api):
# this returns a tuple, the data we want is at index 1
xids = api.nodemanager.GetXIDs()
# unfortunately the data we want is given to us as
# a string but we really want it as a dict
# lets eval it
slices = eval(xids[1])
return slices.keys()
def redeem_ticket(api, ticket_string):
ticket = SfaTicket(string=ticket_string)
ticket.decode()
hrn = ticket.attributes['slivers'][0]['hrn']
slicename = PlXrn (hrn).pl_slicename()
if not api.sliver_exists(slicename):
raise SliverDoesNotExist(slicename)
# convert ticket to format nm is used to
nm_ticket = xmlrpclib.dumps((ticket.attributes,), methodresponse=True)
api.nodemanager.AdminTicket(nm_ticket)
| [
"sfa.util.version.version_core",
"xmlrpclib.dumps",
"sfa.server.sfa_component_setup.get_trusted_certs",
"sfa.util.plxrn.PlXrn",
"sfa.server.sfa_component_setup.get_node_key",
"sfa.server.sfa_component_setup.get_credential",
"sfa.util.sfaticket.SfaTicket"
] | [((207, 267), 'sfa.util.version.version_core', 'version_core', (["{'interface': 'component', 'testbed': 'myplc'}"], {}), "({'interface': 'component', 'testbed': 'myplc'})\n", (219, 267), False, 'from sfa.util.version import version_core\n'), ((1855, 1886), 'sfa.util.sfaticket.SfaTicket', 'SfaTicket', ([], {'string': 'ticket_string'}), '(string=ticket_string)\n', (1864, 1886), False, 'from sfa.util.sfaticket import SfaTicket\n'), ((2146, 2204), 'xmlrpclib.dumps', 'xmlrpclib.dumps', (['(ticket.attributes,)'], {'methodresponse': '(True)'}), '((ticket.attributes,), methodresponse=True)\n', (2161, 2204), False, 'import xmlrpclib\n'), ((406, 445), 'sfa.server.sfa_component_setup.get_trusted_certs', 'sfa_component_setup.get_trusted_certs', ([], {}), '()\n', (443, 445), False, 'from sfa.server import sfa_component_setup\n'), ((515, 549), 'sfa.server.sfa_component_setup.get_node_key', 'sfa_component_setup.get_node_key', ([], {}), '()\n', (547, 549), False, 'from sfa.server import sfa_component_setup\n'), ((558, 604), 'sfa.server.sfa_component_setup.get_credential', 'sfa_component_setup.get_credential', ([], {'force': '(True)'}), '(force=True)\n', (592, 604), False, 'from sfa.server import sfa_component_setup\n'), ((613, 652), 'sfa.server.sfa_component_setup.get_trusted_certs', 'sfa_component_setup.get_trusted_certs', ([], {}), '()\n', (650, 652), False, 'from sfa.server import sfa_component_setup\n'), ((898, 922), 'sfa.util.plxrn.PlXrn', 'PlXrn', (['xrn'], {'type': '"""slice"""'}), "(xrn, type='slice')\n", (903, 922), False, 'from sfa.util.plxrn import PlXrn\n'), ((1024, 1048), 'sfa.util.plxrn.PlXrn', 'PlXrn', (['xrn'], {'type': '"""slice"""'}), "(xrn, type='slice')\n", (1029, 1048), False, 'from sfa.util.plxrn import PlXrn\n'), ((1161, 1185), 'sfa.util.plxrn.PlXrn', 'PlXrn', (['xrn'], {'type': '"""slice"""'}), "(xrn, type='slice')\n", (1166, 1185), False, 'from sfa.util.plxrn import PlXrn\n'), ((1284, 1308), 'sfa.util.plxrn.PlXrn', 'PlXrn', (['xrn'], {'type': '"""slice"""'}), "(xrn, type='slice')\n", (1289, 1308), False, 'from sfa.util.plxrn import PlXrn\n'), ((1972, 1982), 'sfa.util.plxrn.PlXrn', 'PlXrn', (['hrn'], {}), '(hrn)\n', (1977, 1982), False, 'from sfa.util.plxrn import PlXrn\n')] |
import pytest
import sqlite3
from database_helpers import create_sample_user_records # noqa
def test_pokemon_insert_valid_record_no_users(sqlite_conn):
"""Validate that we fail to insert a valid record into the 'pokemons' table when there is no corresponding
user in the 'users' table"""
cursor = sqlite_conn.cursor()
with pytest.raises(sqlite3.IntegrityError):
cursor.execute(
'''INSERT INTO pokemon(trainer_id, pokemon_number, pokemon_name, pokemon_level) VALUES (?, ?, ?, ?)''',
("USER1", 1, "bulbasaur", 1)
)
def test_pokemon_insert_valid_records(sqlite_conn):
"""Test validating we can insert valid records into our 'users' table"""
cursor = sqlite_conn.cursor()
# Create records in the 'users' table
create_sample_user_records(sqlite_conn)
input_records = [
("USER1", 1, "bulbasaur", 1),
("USER1", 2, "ivysaur", 1),
("USER2", 1, "bulbasaur", 1),
("USER3", 2, "ivysaur", 1)
]
cursor.executemany(
'''INSERT INTO pokemon(trainer_id, pokemon_number, pokemon_name, pokemon_level) VALUES (?, ?, ?, ?)''',
input_records
)
sqlite_conn.commit()
cursor.execute('''SELECT trainer_id, pokemon_number, pokemon_name, pokemon_level from pokemon''')
result = cursor.fetchall()
assert input_records == result
def test_pokemon_when_we_delete_users(sqlite_conn):
"""Validate that if we delete a user from the 'users' table that corresponding records
are removed from the 'pokemon' table"""
cursor = sqlite_conn.cursor()
# Create records in the 'users' table
create_sample_user_records(sqlite_conn)
input_records = [
("USER1", 1, "bulbasaur", 1),
("USER1", 2, "ivysaur", 1),
("USER2", 1, "bulbasaur", 1),
("USER3", 2, "ivysaur", 1)
]
cursor.executemany(
'''INSERT INTO pokemon(trainer_id, pokemon_number, pokemon_name, pokemon_level) VALUES (?, ?, ?, ?)''',
input_records
)
cursor.execute("DELETE FROM users WHERE user_id='USER1';")
sqlite_conn.commit()
cursor.execute('''SELECT trainer_id, pokemon_number, pokemon_name, pokemon_level from pokemon''')
result = cursor.fetchall()
assert input_records[2:] == result
| [
"database_helpers.create_sample_user_records",
"pytest.raises"
] | [((786, 825), 'database_helpers.create_sample_user_records', 'create_sample_user_records', (['sqlite_conn'], {}), '(sqlite_conn)\n', (812, 825), False, 'from database_helpers import create_sample_user_records\n'), ((1630, 1669), 'database_helpers.create_sample_user_records', 'create_sample_user_records', (['sqlite_conn'], {}), '(sqlite_conn)\n', (1656, 1669), False, 'from database_helpers import create_sample_user_records\n'), ((344, 381), 'pytest.raises', 'pytest.raises', (['sqlite3.IntegrityError'], {}), '(sqlite3.IntegrityError)\n', (357, 381), False, 'import pytest\n')] |
from pettingzoo import AECEnv
from pettingzoo.utils import agent_selector
from pettingzoo.utils import wrappers
from pettingzoo.utils.conversions import parallel_wrapper_fn
from gym_stag_hunt.envs.hunt import HuntEnv
from gym.spaces import Box
import cv2
import numpy as np
def env(grid_size=(5, 5), screen_size=(600, 600), obs_type='image', enable_multiagent=False, opponent_policy='random',
load_renderer=False, episodes_per_game=1000, stag_follows=True, run_away_after_maul=False,
forage_quantity=2, stag_reward=5, forage_reward=1, mauling_punishment=-5, max_time_steps=100,
obs_shape=(42, 42)):
"""
The env function wraps the environment in 3 wrappers by default. These
wrappers contain logic that is common to many pettingzoo environments.
We recommend you use at least the OrderEnforcingWrapper on your own environment
to provide sane error messages. You can find full documentation for these methods
elsewhere in the developer documentation.
"""
env_init = ZooHuntEnvironment(grid_size, screen_size, obs_type, enable_multiagent, opponent_policy, load_renderer,
episodes_per_game, stag_follows, run_away_after_maul, forage_quantity, stag_reward,
forage_reward, mauling_punishment, max_time_steps, obs_shape)
env_init = wrappers.CaptureStdoutWrapper(env_init)
env_init = wrappers.AssertOutOfBoundsWrapper(env_init)
env_init = wrappers.OrderEnforcingWrapper(env_init)
return env_init
parallel_env = parallel_wrapper_fn(env)
class ZooHuntEnvironment(AECEnv):
metadata = {'render.modes': ['human'], 'name': "pettingzoo_hunt"}
def __init__(self, grid_size=(5, 5), screen_size=(600, 600), obs_type='image', enable_multiagent=False,
opponent_policy='random', load_renderer=False, episodes_per_game=1000, stag_follows=True,
run_away_after_maul=False, forage_quantity=2, stag_reward=5, forage_reward=1, mauling_punishment=-5,
max_time_steps=100, obs_shape=(42, 42)):
"""
:param grid_size: A (W, H) tuple corresponding to the grid dimensions. Although W=H is expected, W!=H works also
:param screen_size: A (W, H) tuple corresponding to the pixel dimensions of the game window
:param obs_type: Can be 'image' for pixel-array based observations, or 'coords' for just the entity coordinates
:param episodes_per_game: How many timesteps take place before we reset the entity positions.
:param stag_follows: Should the stag seek out the nearest agent (true) or take a random move (false)
:param run_away_after_maul: Does the stag stay on the same cell after mauling an agent (true) or respawn (false)
:param forage_quantity: How many plants will be placed on the board.
:param stag_reward: How much reinforcement the agents get for catching the stag
:param forage_reward: How much reinforcement the agents get for harvesting a plant
:param mauling_punishment: How much reinforcement the agents get for trying to catch a stag alone (MUST be neg.)
"""
super().__init__()
self.hunt_env = HuntEnv(grid_size, screen_size, obs_type, enable_multiagent, opponent_policy, load_renderer,
episodes_per_game, stag_follows, run_away_after_maul, forage_quantity, stag_reward,
forage_reward, mauling_punishment)
self.possible_agents = ["player_" + str(r) for r in range(2)]
self.agents = self.possible_agents[:]
self.shape = obs_shape
observation_space = Box(low=0, high=255, shape=self.shape + self.hunt_env.observation_space.shape[2:],
dtype=np.uint8)
self.observation_spaces = {agent: observation_space for agent in self.possible_agents}
self.action_spaces = {agent: self.hunt_env.action_space for agent in self.possible_agents}
self.has_reset = True
self.agent_name_mapping = dict(zip(self.possible_agents, list(range(len(self.possible_agents)))))
self.agent_selection = None
self._agent_selector = agent_selector(self.agents)
self.done = False
self.rewards = dict(zip(self.agents, [0 for _ in self.agents]))
self._cumulative_rewards = dict(zip(self.agents, [0 for _ in self.agents]))
self.dones = dict(zip(self.agents, [False for _ in self.agents]))
self.infos = dict(zip(self.agents, [{} for _ in self.agents]))
self.accumulated_actions = []
self.current_observation = {agent: self.observation_spaces[agent].sample() for agent in self.agents}
self.t = 0
self.last_rewards = [0, 0]
self.max_time_steps = max_time_steps
def observation_space(self, agent):
return self.observation_spaces[agent]
def action_space(self, agent):
return self.action_spaces[agent]
def reset(self):
obs = self.hunt_env.reset()
self.agents = self.possible_agents[:]
self._agent_selector.reinit(self.agents)
self.agent_selection = self._agent_selector.next()
self.current_observation = {agent: obs for agent in self.agents}
# Get an image observation
# image_obs = self.game.get_image_obs()
self.agent_name_mapping = dict(zip(self.possible_agents, list(range(len(self.possible_agents)))))
self.rewards = dict(zip(self.agents, [0 for _ in self.agents]))
self._cumulative_rewards = dict(zip(self.agents, [0 for _ in self.agents]))
self.dones = dict(zip(self.agents, [False for _ in self.agents]))
self.infos = dict(zip(self.agents, [{} for _ in self.agents]))
self.accumulated_actions = []
self.t = 0
def step(self, action):
agent = self.agent_selection
self.accumulated_actions.append(action)
for idx, agent in enumerate(self.agents):
self.rewards[agent] = 0
if self._agent_selector.is_last():
self.accumulated_step(self.accumulated_actions)
self.accumulated_actions = []
self.agent_selection = self._agent_selector.next()
self._cumulative_rewards[agent] = 0
def accumulated_step(self, actions):
# Track internal environment info.
self.t += 1
obs, rewards, done, info = self.hunt_env.step(actions)
self.last_rewards = rewards
if self.t >= self.max_time_steps:
done = True
info = {"t": self.t}
for idx, agent in enumerate(self.agents):
self.dones[agent] = done
self.current_observation[agent] = obs[idx]
self.rewards[agent] = rewards[idx]
self.infos[agent] = info
def observe(self, agent):
returned_observation = self.current_observation[agent]
returned_observation = cv2.resize(returned_observation, self.shape[::-1], interpolation=cv2.INTER_AREA)
return returned_observation
def render(self, mode='human'):
self.hunt_env.render(mode)
def state(self):
pass
def close(self):
self.hunt_env.close()
| [
"pettingzoo.utils.agent_selector",
"pettingzoo.utils.wrappers.OrderEnforcingWrapper",
"pettingzoo.utils.wrappers.AssertOutOfBoundsWrapper",
"pettingzoo.utils.conversions.parallel_wrapper_fn",
"gym_stag_hunt.envs.hunt.HuntEnv",
"gym.spaces.Box",
"pettingzoo.utils.wrappers.CaptureStdoutWrapper",
"cv2.re... | [((1548, 1572), 'pettingzoo.utils.conversions.parallel_wrapper_fn', 'parallel_wrapper_fn', (['env'], {}), '(env)\n', (1567, 1572), False, 'from pettingzoo.utils.conversions import parallel_wrapper_fn\n'), ((1356, 1395), 'pettingzoo.utils.wrappers.CaptureStdoutWrapper', 'wrappers.CaptureStdoutWrapper', (['env_init'], {}), '(env_init)\n', (1385, 1395), False, 'from pettingzoo.utils import wrappers\n'), ((1411, 1454), 'pettingzoo.utils.wrappers.AssertOutOfBoundsWrapper', 'wrappers.AssertOutOfBoundsWrapper', (['env_init'], {}), '(env_init)\n', (1444, 1454), False, 'from pettingzoo.utils import wrappers\n'), ((1470, 1510), 'pettingzoo.utils.wrappers.OrderEnforcingWrapper', 'wrappers.OrderEnforcingWrapper', (['env_init'], {}), '(env_init)\n', (1500, 1510), False, 'from pettingzoo.utils import wrappers\n'), ((3198, 3421), 'gym_stag_hunt.envs.hunt.HuntEnv', 'HuntEnv', (['grid_size', 'screen_size', 'obs_type', 'enable_multiagent', 'opponent_policy', 'load_renderer', 'episodes_per_game', 'stag_follows', 'run_away_after_maul', 'forage_quantity', 'stag_reward', 'forage_reward', 'mauling_punishment'], {}), '(grid_size, screen_size, obs_type, enable_multiagent,\n opponent_policy, load_renderer, episodes_per_game, stag_follows,\n run_away_after_maul, forage_quantity, stag_reward, forage_reward,\n mauling_punishment)\n', (3205, 3421), False, 'from gym_stag_hunt.envs.hunt import HuntEnv\n'), ((3650, 3753), 'gym.spaces.Box', 'Box', ([], {'low': '(0)', 'high': '(255)', 'shape': '(self.shape + self.hunt_env.observation_space.shape[2:])', 'dtype': 'np.uint8'}), '(low=0, high=255, shape=self.shape + self.hunt_env.observation_space.\n shape[2:], dtype=np.uint8)\n', (3653, 3753), False, 'from gym.spaces import Box\n'), ((4179, 4206), 'pettingzoo.utils.agent_selector', 'agent_selector', (['self.agents'], {}), '(self.agents)\n', (4193, 4206), False, 'from pettingzoo.utils import agent_selector\n'), ((6878, 6963), 'cv2.resize', 'cv2.resize', (['returned_observation', 'self.shape[::-1]'], {'interpolation': 'cv2.INTER_AREA'}), '(returned_observation, self.shape[::-1], interpolation=cv2.INTER_AREA\n )\n', (6888, 6963), False, 'import cv2\n')] |
from pykivdroid import mActivity,WindowManagerNLayoutParams,Window,run_on_ui_thread,View
@run_on_ui_thread
def set_full_screen():
return mActivity.getWindow().getDecorView().setSystemUiVisibility(
View.SYSTEM_UI_FLAG_FULLSCREEN
|View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION)
| [
"pykivdroid.mActivity.getWindow"
] | [((142, 163), 'pykivdroid.mActivity.getWindow', 'mActivity.getWindow', ([], {}), '()\n', (161, 163), False, 'from pykivdroid import mActivity, WindowManagerNLayoutParams, Window, run_on_ui_thread, View\n')] |
import os
from oic.utils.jwt import JWT
from oic.utils.keyio import build_keyjar
from oic.utils.keyio import keybundle_from_local_file
__author__ = "roland"
BASE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "data/keys"))
keys = [
{"type": "RSA", "key": os.path.join(BASE_PATH, "cert.key"), "use": ["enc", "sig"]},
{"type": "EC", "crv": "P-256", "use": ["sig"]},
{"type": "EC", "crv": "P-256", "use": ["enc"]},
]
jwks, keyjar, kidd = build_keyjar(keys)
issuer = "https://fedop.example.org"
def _eq(l1, l2):
return set(l1) == set(l2)
def test_jwt_pack():
_jwt = JWT(keyjar, lifetime=3600, iss=issuer).pack()
assert _jwt
assert len(_jwt.split(".")) == 3
def test_jwt_pack_and_unpack():
srv = JWT(keyjar, iss=issuer)
_jwt = srv.pack(sub="sub")
info = srv.unpack(_jwt)
assert _eq(info.keys(), ["jti", "iat", "exp", "iss", "sub", "kid"])
class TestJWT(object):
"""Tests for JWT."""
def test_unpack_verify_key(self):
srv = JWT(keyjar, iss=issuer)
_jwt = srv.pack(sub="sub")
# Remove the signing key from keyjar
keyjar.remove_key("", "RSA", "")
# And add it back as verify
kb = keybundle_from_local_file(
os.path.join(BASE_PATH, "cert.key"), "RSA", ["ver"]
)
# keybundle_from_local_file doesn'assign kid, so assign manually
kb._keys[0].kid = kidd["sig"]["RSA"]
keyjar.add_kb("", kb)
info = srv.unpack(_jwt)
assert info["sub"] == "sub"
| [
"oic.utils.keyio.build_keyjar",
"os.path.dirname",
"oic.utils.jwt.JWT",
"os.path.join"
] | [((468, 486), 'oic.utils.keyio.build_keyjar', 'build_keyjar', (['keys'], {}), '(keys)\n', (480, 486), False, 'from oic.utils.keyio import build_keyjar\n'), ((751, 774), 'oic.utils.jwt.JWT', 'JWT', (['keyjar'], {'iss': 'issuer'}), '(keyjar, iss=issuer)\n', (754, 774), False, 'from oic.utils.jwt import JWT\n'), ((202, 227), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (217, 227), False, 'import os\n'), ((280, 315), 'os.path.join', 'os.path.join', (['BASE_PATH', '"""cert.key"""'], {}), "(BASE_PATH, 'cert.key')\n", (292, 315), False, 'import os\n'), ((1011, 1034), 'oic.utils.jwt.JWT', 'JWT', (['keyjar'], {'iss': 'issuer'}), '(keyjar, iss=issuer)\n', (1014, 1034), False, 'from oic.utils.jwt import JWT\n'), ((607, 645), 'oic.utils.jwt.JWT', 'JWT', (['keyjar'], {'lifetime': '(3600)', 'iss': 'issuer'}), '(keyjar, lifetime=3600, iss=issuer)\n', (610, 645), False, 'from oic.utils.jwt import JWT\n'), ((1244, 1279), 'os.path.join', 'os.path.join', (['BASE_PATH', '"""cert.key"""'], {}), "(BASE_PATH, 'cert.key')\n", (1256, 1279), False, 'import os\n')] |
import numpy as np
import pandas as pd
import decorators
from scipy import optimize
import settings
import utility_functions as utilfunc
import agent_mutation
import PySAM.Battwatts as battery
import PySAM.BatteryTools as batt_tools
import PySAM.Utilityrate5 as utility
import PySAM.Cashloan as cashloan
#==============================================================================
# Load logger
logger = utilfunc.get_logger()
#==============================================================================
#%%
def calc_system_performance(kw, pv, utilityrate, loan, batt, costs, agent, en_batt=True, batt_simple_dispatch=0):
"""
Executes Battwatts, Utilityrate5, and Cashloan PySAM modules with system sizes (kw) as input
Parameters
----------
kw: Capacity (in kW)
pv: Dictionary with generation_hourly and consumption_hourly
utilityrate: PySAM Utilityrate5 module
loan: PySAM Cashloan module
batt: PySAM Battwatts module
costs: Dictionary with system costs
agent: pd.Series with agent attributes
en_batt: Enable battery
batt_simple_dispatch: batt.Battery.batt_simple_dispatch
- batt_simple_dispatch = 0 (peak shaving look ahead)
- batt_simple_dispatch = 1 (peak shaving look behind)
Returns
-------
-loan.Outputs.npv: the negative net present value of system + storage to be optimized for system sizing
"""
inv_eff = 0.96 # default SAM inverter efficiency for PV
gen_hourly = pv['generation_hourly']
load_hourly = pv['consumption_hourly'] # same field as 'load_kwh_per_customer_in_bin_initial' when summed
dc = [(i * kw) * 1000 for i in gen_hourly] # W
ac = [i * inv_eff for i in dc] # W
gen = [i / 1000 for i in ac] # W to kW
# Set up battery, with system generation conditional on the battery generation being included
if en_batt:
batt.Battery.dc = dc
batt.Battery.ac = ac
batt.Battery.batt_simple_enable = 1
batt.Battery.batt_simple_chemistry = 1 # default value is 1: li ion for residential
batt.Battery.batt_simple_dispatch = batt_simple_dispatch
batt.Battery.batt_simple_meter_position = 0 # default value
batt.Battery.inverter_efficiency = 100 # recommended by Darice for dc-connected
batt.Battery.load = load_hourly
# PV to Battery ratio (kW) - From Ashreeta, 02/08/2020
pv_to_batt_ratio = 1.31372
batt_capacity_to_power_ratio = 2 # hours of operation
desired_size = kw / pv_to_batt_ratio # Default SAM value for residential systems is 10
desired_power = desired_size / batt_capacity_to_power_ratio
batt_inputs = {
'batt_chem': batt.Battery.batt_simple_chemistry,
'batt_Qfull': 2.5, # default SAM value
'batt_Vnom_default': 3.6, # default SAM value
'batt_ac_or_dc': 0, # dc-connected
'desired_power': desired_power,
'desired_capacity': desired_size,
'desired_voltage': 500,
'size_by_ac_not_dc': 0, # dc-connected
'inverter_eff': batt.Battery.inverter_efficiency
# 'batt_dc_dc_efficiency': (optional)
}
# Default values for lead acid batteries
if batt.Battery.batt_simple_chemistry == 0:
batt_inputs['LeadAcid_q10'] = 93.2
batt_inputs['LeadAcid_q20'] = 100
batt_inputs['LeadAcid_qn'] = 58.12
# batt_inputs['LeadAcid_tn']: (optional)
# PySAM.BatteryTools.size_li_ion_battery is the same as dGen_battery_sizing_battwatts.py
batt_outputs = batt_tools.size_li_ion_battery(batt_inputs)
computed_size = batt_outputs['batt_computed_bank_capacity']
computed_power = batt_outputs['batt_power_discharge_max_kwdc']
batt.Battery.batt_simple_kwh = computed_size
batt.Battery.batt_simple_kw = computed_power
batt.execute()
# declare value for net billing sell rate
if agent.loc['compensation_style']=='none':
net_billing_sell_rate = 0.
else:
net_billing_sell_rate = agent.loc['wholesale_elec_price_dollars_per_kwh'] * agent.loc['elec_price_multiplier']
utilityrate = process_tariff(utilityrate, agent.loc['tariff_dict'], net_billing_sell_rate)
utilityrate.SystemOutput.gen = batt.Outputs.gen
loan.BatterySystem.en_batt = 1
loan.BatterySystem.batt_computed_bank_capacity = batt.Outputs.batt_bank_installed_capacity
loan.BatterySystem.batt_bank_replacement = batt.Outputs.batt_bank_replacement
# Battery capacity-based System Costs amount [$/kWhcap]
loan.BatterySystem.battery_per_kWh = costs['batt_capex_per_kwh']
# specify number of O&M types (1 = PV+batt)
loan.SystemCosts.add_om_num_types = 1
# specify O&M variables
loan.SystemCosts.om_capacity = [costs['system_om_per_kw'] + costs['system_variable_om_per_kw']]
loan.SystemCosts.om_capacity1 = [costs['batt_om_per_kw']]
loan.SystemCosts.om_production1 = [costs['batt_om_per_kwh'] * 1000]
loan.SystemCosts.om_replacement_cost1 = [0.]
# Battery capacity for System Costs values [kW]
loan.SystemCosts.om_capacity1_nameplate = batt.Battery.batt_simple_kw
# Battery production for System Costs values [kWh]
loan.SystemCosts.om_production1_values = [batt.Battery.batt_simple_kwh]
batt_costs = ((costs['batt_capex_per_kw']*batt.Battery.batt_simple_kw) +
(costs['batt_capex_per_kwh'] * batt.Battery.batt_simple_kwh))
else:
batt.Battery.batt_simple_enable = 0
loan.BatterySystem.en_batt = 0
computed_power = computed_size = 0
# declare value for net billing sell rate
if agent.loc['compensation_style']=='none':
net_billing_sell_rate = 0.
else:
net_billing_sell_rate = agent.loc['wholesale_elec_price_dollars_per_kwh'] * agent.loc['elec_price_multiplier']
utilityrate = process_tariff(utilityrate, agent.loc['tariff_dict'], net_billing_sell_rate)
utilityrate.SystemOutput.gen = gen
# specify number of O&M types (0 = PV only)
loan.SystemCosts.add_om_num_types = 0
# specify O&M variables
loan.SystemCosts.om_capacity = [costs['system_om_per_kw'] + costs['system_variable_om_per_kw']]
loan.SystemCosts.om_replacement_cost1 = [0.]
system_costs = costs['system_capex_per_kw'] * kw
batt_costs = 0
# Execute utility rate module
utilityrate.Load.load = load_hourly
utilityrate.execute()
# Process payment incentives
loan = process_incentives(loan, kw, computed_power, computed_size, gen_hourly, agent)
# Specify final Cashloan parameters
loan.FinancialParameters.system_capacity = kw
loan.SystemOutput.annual_energy_value = utilityrate.Outputs.annual_energy_value
loan.SystemOutput.gen = utilityrate.SystemOutput.gen
loan.ThirdPartyOwnership.elec_cost_with_system = utilityrate.Outputs.elec_cost_with_system
loan.ThirdPartyOwnership.elec_cost_without_system = utilityrate.Outputs.elec_cost_without_system
# Calculate system costs
direct_costs = (system_costs + batt_costs) * costs['cap_cost_multiplier']
sales_tax = 0
loan.SystemCosts.total_installed_cost = direct_costs + sales_tax
# Execute financial module
loan.execute()
return -loan.Outputs.npv
def calc_system_size_and_performance_pv(agent, sectors, rate_switch_table=None):
"""
Calculate the optimal system and battery size and generation profile, and resulting bill savings and financial metrics.
Parameters
----------
agent : 'pd.df'
individual agent object.
Returns
-------
agent: 'pd.df'
Adds several features to the agent dataframe:
- **agent_id**
- **system_kw** - system capacity selected by agent
- **batt_kw** - battery capacity selected by agent
- **batt_kwh** - battery energy capacity
- **npv** - net present value of system + storage
- **cash_flow** - array of annual cash flows from system adoption
- **batt_dispatch_profile** - array of hourly battery dispatch
- **annual_energy_production_kwh** - annual energy production (kwh) of system
- **naep** - normalized annual energy production (kwh/kW) of system
- **capacity_factor** - annual capacity factor
- **first_year_elec_bill_with_system** - first year electricity bill with adopted system ($/yr)
- **first_year_elec_bill_savings** - first year electricity bill savings with adopted system ($/yr)
- **first_year_elec_bill_savings_frac** - fraction of savings on electricity bill in first year of system adoption
- **max_system_kw** - maximum system size allowed as constrained by roof size or not exceeding annual consumption
- **first_year_elec_bill_without_system** - first year electricity bill without adopted system ($/yr)
- **avg_elec_price_cents_per_kwh** - first year electricity price (c/kwh)
- **cbi** - ndarray of capacity-based incentives applicable to agent
- **ibi** - ndarray of investment-based incentives applicable to agent
- **pbi** - ndarray of performance-based incentives applicable to agent
- **cash_incentives** - ndarray of cash-based incentives applicable to agent
- **export_tariff_result** - summary of structure of retail tariff applied to agent
"""
# Initialize new DB connection
model_settings = settings.init_model_settings()
con, cur = utilfunc.make_con(model_settings.pg_conn_string, model_settings.role)
# PV
pv = dict()
# Extract load profile after scaling hourly load to annual total
load_profile_df = agent_mutation.elec.get_and_apply_agent_load_profiles(con, agent)
pv['consumption_hourly'] = pd.Series(load_profile_df['consumption_hourly']).iloc[0]
del load_profile_df
# Using the scale offset factor of 1E6 for capacity factors
norm_scaled_pv_cf_profiles_df = agent_mutation.elec.get_and_apply_normalized_hourly_resource_solar(con, agent)
pv['generation_hourly'] = pd.Series(norm_scaled_pv_cf_profiles_df['solar_cf_profile'].iloc[0]) / 1e6
del norm_scaled_pv_cf_profiles_df
# Calculate normalized annual energy production
agent.loc['naep'] = float(np.sum(pv['generation_hourly']))
# Battwatts
if agent.loc['sector_abbr'] == 'res':
batt = battery.default("PVWattsBatteryResidential")
else:
batt = battery.default("PVWattsBatteryCommercial")
# Utilityrate5
if agent.loc['sector_abbr'] == 'res':
utilityrate = utility.default("PVWattsBatteryResidential")
else:
utilityrate = utility.default("PVWattsBatteryCommercial")
######################################
###--------- UTILITYRATE5 ---------###
###--- SYSTEM LIFETIME SETTINGS ---###
######################################
# Inflation rate [%]
utilityrate.Lifetime.inflation_rate = agent.loc['inflation_rate'] * 100
# Number of years in analysis [years]
utilityrate.Lifetime.analysis_period = agent.loc['economic_lifetime_yrs']
# Lifetime hourly system outputs [0/1]; Options: 0=hourly first year,1=hourly lifetime
utilityrate.Lifetime.system_use_lifetime_output = 0
######################################
###--------- UTILITYRATE5 ---------###
###---- DEGRADATION/ESCALATION ----###
######################################
# Annual energy degradation [%]
utilityrate.SystemOutput.degradation = [agent.loc['pv_degradation_factor'] * 100] # convert decimal to %
# Annual electricity rate escalation [%/year]
utilityrate.ElectricityRates.rate_escalation = [agent.loc['elec_price_escalator'] * 100] # convert decimal to %
######################################
###--------- UTILITYRATE5 ---------###
###---- NET METERING SETTINGS -----###
######################################
# Dictionary to map dGen compensation styles to PySAM options
nem_options = {'net metering':0, 'net billing':2, 'buy all sell all':4, 'none':2}
# Metering options [0=net energy metering,1=net energy metering with $ credits,2=net billing,3=net billing with carryover to next month,4=buy all - sell all]
utilityrate.ElectricityRates.ur_metering_option = nem_options[agent.loc['compensation_style']]
# Year end sell rate [$/kWh]
utilityrate.ElectricityRates.ur_nm_yearend_sell_rate = agent.loc['wholesale_elec_price_dollars_per_kwh'] * agent.loc['elec_price_multiplier']
if agent.loc['compensation_style']=='none':
net_billing_sell_rate = 0.
else:
net_billing_sell_rate = agent.loc['wholesale_elec_price_dollars_per_kwh'] * agent.loc['elec_price_multiplier']
######################################
###--------- UTILITYRATE5 ---------###
###-------- BUY/SELL RATES --------###
######################################
# Enable time step sell rates [0/1]
utilityrate.ElectricityRates.ur_en_ts_sell_rate = 0
# Time step sell rates [0/1]
utilityrate.ElectricityRates.ur_ts_sell_rate = [0.]
# Set sell rate equal to buy rate [0/1]
utilityrate.ElectricityRates.ur_sell_eq_buy = 0
######################################
###--------- UTILITYRATE5 ---------###
###-------- MISC. SETTINGS --------###
######################################
# Use single monthly peak for TOU demand charge; options: 0=use TOU peak,1=use flat peak
utilityrate.ElectricityRates.TOU_demand_single_peak = 0 # ?
# Optionally enable/disable electricity_rate [years]
utilityrate.ElectricityRates.en_electricity_rates = 1
######################################
###--------- UTILITYRATE5 ---------###
###----- TARIFF RESTRUCTURING -----###
######################################
utilityrate = process_tariff(utilityrate, agent.loc['tariff_dict'], net_billing_sell_rate)
######################################
###----------- CASHLOAN -----------###
###----- FINANCIAL PARAMETERS -----###
######################################
# Initiate cashloan model and set market-specific variables
# Assume res agents do not evaluate depreciation at all
# Assume non-res agents only evaluate federal depreciation (not state)
if agent.loc['sector_abbr'] == 'res':
loan = cashloan.default("PVWattsBatteryResidential")
loan.FinancialParameters.market = 0
else:
loan = cashloan.default("PVWattsBatteryCommercial")
loan.FinancialParameters.market = 1
loan.FinancialParameters.analysis_period = agent.loc['economic_lifetime_yrs']
loan.FinancialParameters.debt_fraction = 100 - (agent.loc['down_payment_fraction'] * 100)
loan.FinancialParameters.federal_tax_rate = [(agent.loc['tax_rate'] * 100) * 0.7] # SAM default
loan.FinancialParameters.inflation_rate = agent.loc['inflation_rate'] * 100
loan.FinancialParameters.insurance_rate = 0
loan.FinancialParameters.loan_rate = agent.loc['loan_interest_rate'] * 100
loan.FinancialParameters.loan_term = agent.loc['loan_term_yrs']
loan.FinancialParameters.mortgage = 0 # default value - standard loan (no mortgage)
loan.FinancialParameters.prop_tax_assessed_decline = 5 # PySAM default
loan.FinancialParameters.prop_tax_cost_assessed_percent = 95 # PySAM default
loan.FinancialParameters.property_tax_rate = 0 # PySAM default
loan.FinancialParameters.real_discount_rate = agent.loc['real_discount_rate'] * 100
loan.FinancialParameters.salvage_percentage = 0
loan.FinancialParameters.state_tax_rate = [(agent.loc['tax_rate'] * 100) * 0.3] # SAM default
loan.FinancialParameters.system_heat_rate = 0
######################################
###----------- CASHLOAN -----------###
###--------- SYSTEM COSTS ---------###
######################################
# System costs that are input to loan.SystemCosts will depend on system configuration (PV, batt, PV+batt)
# and are therefore specified in calc_system_performance()
system_costs = dict()
system_costs['system_capex_per_kw'] = agent.loc['system_capex_per_kw']
system_costs['system_om_per_kw'] = agent.loc['system_om_per_kw']
system_costs['system_variable_om_per_kw'] = agent.loc['system_variable_om_per_kw']
system_costs['cap_cost_multiplier'] = agent.loc['cap_cost_multiplier']
system_costs['batt_capex_per_kw'] = agent.loc['batt_capex_per_kw']
system_costs['batt_capex_per_kwh'] = agent.loc['batt_capex_per_kwh']
system_costs['batt_om_per_kw'] = agent.loc['batt_om_per_kw']
system_costs['batt_om_per_kwh'] = agent.loc['batt_om_per_kwh']
######################################
###----------- CASHLOAN -----------###
###---- DEPRECIATION PARAMETERS ---###
######################################
if agent.loc['sector_abbr'] == 'res':
loan.Depreciation.depr_fed_type = 0
loan.Depreciation.depr_sta_type = 0
else:
loan.Depreciation.depr_fed_type = 1
loan.Depreciation.depr_sta_type = 0
######################################
###----------- CASHLOAN -----------###
###----- TAX CREDIT INCENTIVES ----###
######################################
loan.TaxCreditIncentives.itc_fed_percent = agent.loc['itc_fraction_of_capex'] * 100
######################################
###----------- CASHLOAN -----------###
###-------- BATTERY SYSTEM --------###
######################################
loan.BatterySystem.batt_replacement_option = 2 # user schedule
batt_replacement_schedule = [0 for i in range(0, agent.loc['batt_lifetime_yrs'] - 1)] + [1]
loan.BatterySystem.batt_replacement_schedule = batt_replacement_schedule
######################################
###----------- CASHLOAN -----------###
###-------- SYSTEM OUTPUT ---------###
######################################
loan.SystemOutput.degradation = [agent.loc['pv_degradation_factor'] * 100]
######################################
###----------- CASHLOAN -----------###
###----------- LIFETIME -----------###
######################################
loan.Lifetime.system_use_lifetime_output = 0
# From dGen - calc_system_size_and_financial_performance()
max_size_load = agent.loc['load_kwh_per_customer_in_bin'] / agent.loc['naep']
max_size_roof = agent.loc['developable_roof_sqft'] * agent.loc['pv_kw_per_sqft']
max_system_kw = min(max_size_load, max_size_roof)
# set tolerance for minimize_scalar based on max_system_kw value
tol = min(0.25 * max_system_kw, 0.5)
# Calculate the PV system size that maximizes the agent's NPV, to a tolerance of 0.5 kW.
# Note that the optimization is technically minimizing negative NPV
# ! As is, because of the tolerance this function would not necessarily return a system size of 0 or max PV size if those are optimal
res_with_batt = optimize.minimize_scalar(calc_system_performance,
args = (pv, utilityrate, loan, batt, system_costs, True, 0),
bounds = (0, max_system_kw),
method = 'bounded',
tol = tol)
# PySAM Module outputs with battery
batt_loan_outputs = loan.Outputs.export()
batt_util_outputs = utilityrate.Outputs.export()
batt_annual_energy_kwh = np.sum(utilityrate.SystemOutput.gen)
batt_kw = batt.Battery.batt_simple_kw
batt_kwh = batt.Battery.batt_simple_kwh
batt_dispatch_profile = batt.Outputs.batt_power # ?
# Run without battery
res_no_batt = optimize.minimize_scalar(calc_system_performance,
args = (pv, utilityrate, loan, batt, system_costs, False, 0),
bounds = (0, max_system_kw),
method = 'bounded',
tol = tol)
# PySAM Module outputs without battery
no_batt_loan_outputs = loan.Outputs.export()
no_batt_util_outputs = utilityrate.Outputs.export()
no_batt_annual_energy_kwh = np.sum(utilityrate.SystemOutput.gen)
# Retrieve NPVs of system with batt and system without batt
npv_w_batt = batt_loan_outputs['npv']
npv_no_batt = no_batt_loan_outputs['npv']
# Choose the system with the higher NPV
if npv_w_batt >= npv_no_batt:
system_kw = res_with_batt.x
annual_energy_production_kwh = batt_annual_energy_kwh
first_year_elec_bill_with_system = batt_util_outputs['elec_cost_with_system_year1']
first_year_elec_bill_without_system = batt_util_outputs['elec_cost_without_system_year1']
npv = npv_w_batt
payback = batt_loan_outputs['payback']
cash_flow = list(batt_loan_outputs['cf_payback_with_expenses']) # ?
cbi_total = batt_loan_outputs['cbi_total']
cbi_total_fed = batt_loan_outputs['cbi_total_fed']
cbi_total_oth = batt_loan_outputs['cbi_total_oth']
cbi_total_sta = batt_loan_outputs['cbi_total_sta']
cbi_total_uti = batt_loan_outputs['cbi_total_uti']
ibi_total = batt_loan_outputs['ibi_total']
ibi_total_fed = batt_loan_outputs['ibi_total_fed']
ibi_total_oth = batt_loan_outputs['ibi_total_oth']
ibi_total_sta = batt_loan_outputs['ibi_total_sta']
ibi_total_uti = batt_loan_outputs['ibi_total_uti']
cf_pbi_total = batt_loan_outputs['cf_pbi_total']
pbi_total_fed = batt_loan_outputs['cf_pbi_total_fed']
pbi_total_oth = batt_loan_outputs['cf_pbi_total_oth']
pbi_total_sta = batt_loan_outputs['cf_pbi_total_sta']
pbi_total_uti = batt_loan_outputs['cf_pbi_total_uti']
else:
system_kw = res_no_batt.x
annual_energy_production_kwh = no_batt_annual_energy_kwh
first_year_elec_bill_with_system = no_batt_util_outputs['elec_cost_with_system_year1']
first_year_elec_bill_without_system = no_batt_util_outputs['elec_cost_without_system_year1']
npv = npv_no_batt
payback = no_batt_loan_outputs['payback']
cash_flow = list(no_batt_loan_outputs['cf_payback_with_expenses'])
batt_kw = 0
batt_kwh = 0
batt_dispatch_profile = np.nan
cbi_total = no_batt_loan_outputs['cbi_total']
cbi_total_fed = no_batt_loan_outputs['cbi_total_fed']
cbi_total_oth = no_batt_loan_outputs['cbi_total_oth']
cbi_total_sta = no_batt_loan_outputs['cbi_total_sta']
cbi_total_uti = no_batt_loan_outputs['cbi_total_uti']
ibi_total = no_batt_loan_outputs['ibi_total']
ibi_total_fed = no_batt_loan_outputs['ibi_total_fed']
ibi_total_oth = no_batt_loan_outputs['ibi_total_oth']
ibi_total_sta = no_batt_loan_outputs['ibi_total_sta']
ibi_total_uti = no_batt_loan_outputs['ibi_total_uti']
cf_pbi_total = no_batt_loan_outputs['cf_pbi_total']
pbi_total_fed = no_batt_loan_outputs['cf_pbi_total_fed']
pbi_total_oth = no_batt_loan_outputs['cf_pbi_total_oth']
pbi_total_sta = no_batt_loan_outputs['cf_pbi_total_sta']
pbi_total_uti = no_batt_loan_outputs['cf_pbi_total_uti']
# change 0 value to 1 to avoid divide by zero errors
if first_year_elec_bill_without_system == 0:
first_year_elec_bill_without_system = 1.0
# Add outputs to agent df
naep = annual_energy_production_kwh / system_kw
first_year_elec_bill_savings = first_year_elec_bill_without_system - first_year_elec_bill_with_system
first_year_elec_bill_savings_frac = first_year_elec_bill_savings / first_year_elec_bill_without_system
avg_elec_price_cents_per_kwh = first_year_elec_bill_without_system / agent.loc['load_kwh_per_customer_in_bin']
agent.loc['system_kw'] = system_kw
agent.loc['npv'] = npv
agent.loc['payback_period'] = np.round(np.where(np.isnan(payback), 30.1, payback), 1).astype(float)
agent.loc['cash_flow'] = cash_flow
agent.loc['annual_energy_production_kwh'] = annual_energy_production_kwh
agent.loc['naep'] = naep
agent.loc['capacity_factor'] = agent.loc['naep'] / 8760
agent.loc['first_year_elec_bill_with_system'] = first_year_elec_bill_with_system
agent.loc['first_year_elec_bill_savings'] = first_year_elec_bill_savings
agent.loc['first_year_elec_bill_savings_frac'] = first_year_elec_bill_savings_frac
agent.loc['max_system_kw'] = max_system_kw
agent.loc['first_year_elec_bill_without_system'] = first_year_elec_bill_without_system
agent.loc['avg_elec_price_cents_per_kwh'] = avg_elec_price_cents_per_kwh
agent.loc['batt_kw'] = batt_kw
agent.loc['batt_kwh'] = batt_kwh
agent.loc['batt_dispatch_profile'] = batt_dispatch_profile
# Financial outputs (find out which ones to include):
agent.loc['cbi'] = np.array({'cbi_total': cbi_total,
'cbi_total_fed': cbi_total_fed,
'cbi_total_oth': cbi_total_oth,
'cbi_total_sta': cbi_total_sta,
'cbi_total_uti': cbi_total_uti
})
agent.loc['ibi'] = np.array({'ibi_total': ibi_total,
'ibi_total_fed': ibi_total_fed,
'ibi_total_oth': ibi_total_oth,
'ibi_total_sta': ibi_total_sta,
'ibi_total_uti': ibi_total_uti
})
agent.loc['pbi'] = np.array({'pbi_total': cf_pbi_total,
'pbi_total_fed': pbi_total_fed,
'pbi_total_oth': pbi_total_oth,
'pbi_total_sta': pbi_total_sta,
'pbi_total_uti': pbi_total_uti
})
agent.loc['cash_incentives'] = ''
agent.loc['export_tariff_results'] = ''
out_cols = ['agent_id',
'system_kw',
'batt_kw',
'batt_kwh',
'npv',
'payback_period',
'cash_flow',
'batt_dispatch_profile',
'annual_energy_production_kwh',
'naep',
'capacity_factor',
'first_year_elec_bill_with_system',
'first_year_elec_bill_savings',
'first_year_elec_bill_savings_frac',
'max_system_kw',
'first_year_elec_bill_without_system',
'avg_elec_price_cents_per_kwh',
'cbi',
'ibi',
'pbi',
'cash_incentives',
'export_tariff_results'
]
return agent[out_cols]
#%%
def calc_financial_performance_wind(agent, sectors, rate_switch_table=None):
"""
Calculate bill savings and financial metrics based on pre-selected wind system size.
Parameters
----------
agent : 'pd.df'
individual agent object.
Returns
-------
agent: 'pd.df'
Adds several features to the agent dataframe:
- **agent_id**
- **system_kw** - system capacity selected by agent
- **npv** - net present value of system + storage
- **cash_flow** - array of annual cash flows from system adoption
- **batt_dispatch_profile** - array of hourly battery dispatch
- **annual_energy_production_kwh** - annual energy production (kwh) of system
- **naep** - normalized annual energy production (kwh/kW) of system
- **capacity_factor** - annual capacity factor
- **first_year_elec_bill_with_system** - first year electricity bill with adopted system ($/yr)
- **first_year_elec_bill_savings** - first year electricity bill savings with adopted system ($/yr)
- **first_year_elec_bill_savings_frac** - fraction of savings on electricity bill in first year of system adoption
- **max_system_kw** - maximum system size allowed as constrained by roof size or not exceeding annual consumption
- **first_year_elec_bill_without_system** - first year electricity bill without adopted system ($/yr)
- **avg_elec_price_cents_per_kwh** - first year electricity price (c/kwh)
- **cbi** - ndarray of capacity-based incentives applicable to agent
- **ibi** - ndarray of investment-based incentives applicable to agent
- **pbi** - ndarray of performance-based incentives applicable to agent
- **cash_incentives** - ndarray of cash-based incentives applicable to agent
- **export_tariff_result** - summary of structure of retail tariff applied to agent
"""
# Initialize new DB connection
model_settings = settings.init_model_settings()
con, cur = utilfunc.make_con(model_settings.pg_conn_string, model_settings.role)
# Extract load profile after scaling hourly load to annual total
load_profile_df = agent_mutation.elec.get_and_apply_agent_load_profiles(con, agent)
consumption_hourly = pd.Series(load_profile_df['consumption_hourly']).iloc[0]
del load_profile_df
# Using the scale offset factor of 1E6 for capacity factors
norm_scaled_wind_profiles_df = agent_mutation.elec.get_and_apply_normalized_hourly_resource_wind(con, agent)
generation_hourly = pd.Series(norm_scaled_wind_profiles_df['generation_hourly']).iloc[0]
del norm_scaled_wind_profiles_df
# Instantiate utilityrate5 model based on agent sector
if agent.loc['sector_abbr'] == 'res':
utilityrate = utility.default('WindPowerResidential')
else:
utilityrate = utility.default('WindPowerCommercial')
######################################
###--------- UTILITYRATE5 ---------###
###------- ELECTRICITYRATES -------###
######################################
# Use single monthly peak for TOU demand charge; options: 0=use TOU peak,1=use flat peak
utilityrate.ElectricityRates.TOU_demand_single_peak = 0 # ?
# Optionally enable/disable electricity_rate [years]
utilityrate.ElectricityRates.en_electricity_rates = 1
# Annual electricity rate escalation [%/year]
utilityrate.ElectricityRates.rate_escalation = [agent.loc['elec_price_escalator'] * 100] # convert decimal to %
# Enable time step sell rates [0/1]
utilityrate.ElectricityRates.ur_en_ts_sell_rate = 0
# Time step sell rates [0/1]
utilityrate.ElectricityRates.ur_ts_sell_rate = [0.]
# Set sell rate equal to buy rate [0/1]
utilityrate.ElectricityRates.ur_sell_eq_buy = 0
# Dictionary to map dGen compensation styles to PySAM options
nem_options = {'net metering':0, 'net billing':2, 'buy all sell all':4, 'none':2}
# Metering options [0=net energy metering,1=net energy metering with $ credits,2=net billing,3=net billing with carryover to next month,4=buy all - sell all]
utilityrate.ElectricityRates.ur_metering_option = nem_options[agent.loc['compensation_style']]
# Year end sell rate [$/kWh]
utilityrate.ElectricityRates.ur_nm_yearend_sell_rate = agent.loc['wholesale_elec_price_dollars_per_kwh'] * agent.loc['elec_price_multiplier']
if agent.loc['compensation_style']=='none':
net_billing_sell_rate = 0.
else:
net_billing_sell_rate = agent.loc['wholesale_elec_price_dollars_per_kwh'] * agent.loc['elec_price_multiplier']
# Restructure tariff object for PySAM compatibility
utilityrate = process_tariff(utilityrate, agent.loc['tariff_dict'], net_billing_sell_rate)
######################################
###--------- UTILITYRATE5 ---------###
###----------- LIFETIME -----------###
######################################
# Number of years in analysis [years]
utilityrate.Lifetime.analysis_period = agent.loc['economic_lifetime_yrs']
# Inflation rate [%]
utilityrate.Lifetime.inflation_rate = agent.loc['inflation_rate'] * 100
# Lifetime hourly system outputs [0/1]; Options: 0=hourly first year,1=hourly lifetime
utilityrate.Lifetime.system_use_lifetime_output = 0
######################################
###--------- UTILITYRATE5 ---------###
###-------- SYSTEM OUTPUT ---------###
######################################
# Annual energy degradation [%] -- Wind degradation already applied via 'derate_factor'
utilityrate.SystemOutput.degradation = [0.]
# System power generated [kW]
utilityrate.SystemOutput.gen = generation_hourly
######################################
###--------- UTILITYRATE5 ---------###
###-------- SYSTEM OUTPUT ---------###
######################################
# Electricity load (year 1) [kW]
utilityrate.Load.load = consumption_hourly
######################################
###--------- UTILITYRATE5 ---------###
###------------ EXECUTE -----------###
######################################
utilityrate.execute()
######################################
###----------- CASHLOAN -----------###
###----- FINANCIAL PARAMETERS -----###
######################################
# Initiate cashloan model and set market-specific variables
if agent.loc['sector_abbr'] == 'res':
loan = cashloan.default('WindPowerResidential')
loan.FinancialParameters.market = 0
else:
loan = cashloan.default('WindPowerCommercial')
loan.FinancialParameters.market = 1
loan.FinancialParameters.analysis_period = agent.loc['economic_lifetime_yrs']
loan.FinancialParameters.debt_fraction = 100 - (agent.loc['down_payment_fraction'] * 100)
loan.FinancialParameters.federal_tax_rate = [(agent.loc['tax_rate'] * 100) * 0.7] # SAM default
loan.FinancialParameters.inflation_rate = agent.loc['inflation_rate'] * 100
loan.FinancialParameters.insurance_rate = 0
loan.FinancialParameters.loan_rate = agent.loc['loan_interest_rate'] * 100
loan.FinancialParameters.loan_term = agent.loc['loan_term_yrs']
loan.FinancialParameters.mortgage = 0 # default value - standard loan (no mortgage)
loan.FinancialParameters.prop_tax_assessed_decline = 5 # PySAM default
loan.FinancialParameters.prop_tax_cost_assessed_percent = 95 # PySAM default
loan.FinancialParameters.property_tax_rate = 0 # PySAM default
loan.FinancialParameters.real_discount_rate = agent.loc['real_discount_rate'] * 100
loan.FinancialParameters.salvage_percentage = 0
loan.FinancialParameters.state_tax_rate = [(agent.loc['tax_rate'] * 100) * 0.3] # SAM default
loan.FinancialParameters.system_heat_rate = 0
loan.FinancialParameters.system_capacity = agent.loc['system_size_kw']
######################################
###----------- CASHLOAN -----------###
###--------- SYSTEM COSTS ---------###
######################################
# specify number of O&M types (0 = system only)
loan.SystemCosts.add_om_num_types = 0
# specify O&M variables
loan.SystemCosts.om_capacity = [agent.loc['system_om_per_kw'] + agent.loc['system_variable_om_per_kw']]
# Calculate and specify system costs
system_costs = agent.loc['system_capex_per_kw'] * agent.loc['system_size_kw']
batt_costs = 0
sales_tax = 0
direct_costs = (system_costs + batt_costs) * agent.loc['cap_cost_multiplier']
loan.SystemCosts.total_installed_cost = direct_costs + sales_tax
######################################
###----------- CASHLOAN -----------###
###---- DEPRECIATION PARAMETERS ---###
######################################
# Federal and State depreciation type
# Options: 0=none, 1=MACRS half year, 2=straight-line, 3=custom
if agent.loc['sector_abbr'] == 'res':
loan.Depreciation.depr_fed_type = 0
loan.Depreciation.depr_sta_type = 0
else:
loan.Depreciation.depr_fed_type = 1
loan.Depreciation.depr_sta_type = 0
######################################
###----------- CASHLOAN -----------###
###----- TAX CREDIT INCENTIVES ----###
######################################
# Federal percentage-based ITC percent [%]
loan.TaxCreditIncentives.itc_fed_percent = agent.loc['itc_fraction_of_capex'] * 100
######################################
###----------- CASHLOAN -----------###
###------ PAYMENT INCENTIVES ------###
######################################
# Specify payment incentives within Cashloan object
loan = process_incentives(loan, agent.loc['system_size_kw'], 0, 0, generation_hourly, agent)
######################################
###----------- CASHLOAN -----------###
###-------- BATTERY SYSTEM --------###
######################################
# Enable battery storage model [0/1]
loan.BatterySystem.en_batt = 0
######################################
###----------- CASHLOAN -----------###
###-------- SYSTEM OUTPUT ---------###
######################################
# Energy value [$] -- i.e. "bill savings"
loan.SystemOutput.annual_energy_value = utilityrate.Outputs.annual_energy_value
# Annual energy degradation [%] -- Wind degradation already applied via 'derate_factor'
loan.SystemOutput.degradation = [0.]
# Power generated by renewable resource [kW]
loan.SystemOutput.gen = utilityrate.SystemOutput.gen
######################################
###----------- CASHLOAN -----------###
###----------- LIFETIME -----------###
######################################
loan.Lifetime.system_use_lifetime_output = 0
######################################
###----------- CASHLOAN -----------###
###----- THIRD PARTY OWNERSHIP ----###
######################################
# Energy value [$]
loan.ThirdPartyOwnership.elec_cost_with_system = utilityrate.Outputs.elec_cost_with_system
# Energy value [$]
loan.ThirdPartyOwnership.elec_cost_without_system = utilityrate.Outputs.elec_cost_without_system
######################################
###-------- POSTPROCESSING --------###
###------------ RESULTS -----------###
######################################
# Get outputs from Utilityrate5 model
util_outputs = utilityrate.Outputs.export()
# Assign variables from Utilityrate5 outputs, others
system_kw = agent.loc['system_size_kw']
first_year_elec_bill_with_system = util_outputs['elec_cost_with_system_year1']
first_year_elec_bill_without_system = util_outputs['elec_cost_without_system_year1']
# PySAM cannot evaluate system sizes of 0 kW -- check and manually assign values if system_size_kw = 0
if system_kw > 0:
# Execute Cashloan model
loan.execute()
loan_outputs = loan.Outputs.export()
npv = loan_outputs['npv']
payback = loan_outputs['payback']
cash_flow = list(loan_outputs['cf_payback_with_expenses'])
cbi_total = loan_outputs['cbi_total']
cbi_total_fed = loan_outputs['cbi_total_fed']
cbi_total_oth = loan_outputs['cbi_total_oth']
cbi_total_sta = loan_outputs['cbi_total_sta']
cbi_total_uti = loan_outputs['cbi_total_uti']
ibi_total = loan_outputs['ibi_total']
ibi_total_fed = loan_outputs['ibi_total_fed']
ibi_total_oth = loan_outputs['ibi_total_oth']
ibi_total_sta = loan_outputs['ibi_total_sta']
ibi_total_uti = loan_outputs['ibi_total_uti']
cf_pbi_total = loan_outputs['cf_pbi_total']
pbi_total_fed = loan_outputs['cf_pbi_total_fed']
pbi_total_oth = loan_outputs['cf_pbi_total_oth']
pbi_total_sta = loan_outputs['cf_pbi_total_sta']
pbi_total_uti = loan_outputs['cf_pbi_total_uti']
else:
npv = 0.
payback = 30.1
cash_flow = [0.] * (agent.loc['economic_lifetime_yrs'] + 1)
cbi_total = cbi_total_fed = cbi_total_oth = cbi_total_sta = cbi_total_uti = 0.
ibi_total = ibi_total_fed = ibi_total_oth = ibi_total_sta = ibi_total_uti = 0.
cf_pbi_total = pbi_total_fed = pbi_total_oth = pbi_total_sta = pbi_total_uti = 0.
# change 0 value to 1 to avoid divide by zero errors
if first_year_elec_bill_without_system == 0:
first_year_elec_bill_without_system = 1.0
# Add outputs to agent df
first_year_elec_bill_savings = first_year_elec_bill_without_system - first_year_elec_bill_with_system
first_year_elec_bill_savings_frac = first_year_elec_bill_savings / first_year_elec_bill_without_system
avg_elec_price_cents_per_kwh = first_year_elec_bill_without_system / agent.loc['load_kwh_per_customer_in_bin']
# Specify variables to write to agent df -- also write placeholder batt values
agent.loc['system_kw'] = system_kw
agent.loc['npv'] = npv
agent.loc['payback_period'] = np.round(np.where(np.isnan(payback), 30.1, payback), 1).astype(float)
agent.loc['cash_flow'] = cash_flow
agent.loc['first_year_elec_bill_with_system'] = first_year_elec_bill_with_system
agent.loc['first_year_elec_bill_savings'] = first_year_elec_bill_savings
agent.loc['first_year_elec_bill_savings_frac'] = first_year_elec_bill_savings_frac
agent.loc['first_year_elec_bill_without_system'] = first_year_elec_bill_without_system
agent.loc['avg_elec_price_cents_per_kwh'] = avg_elec_price_cents_per_kwh
agent.loc['batt_kw'] = 0.
agent.loc['batt_kwh'] = 0.
agent.loc['batt_dispatch_profile'] = np.nan
# Specify incentive outputs
agent.loc['cbi'] = np.array({'cbi_total': cbi_total,
'cbi_total_fed': cbi_total_fed,
'cbi_total_oth': cbi_total_oth,
'cbi_total_sta': cbi_total_sta,
'cbi_total_uti': cbi_total_uti
})
agent.loc['ibi'] = np.array({'ibi_total': ibi_total,
'ibi_total_fed': ibi_total_fed,
'ibi_total_oth': ibi_total_oth,
'ibi_total_sta': ibi_total_sta,
'ibi_total_uti': ibi_total_uti
})
agent.loc['pbi'] = np.array({'pbi_total': cf_pbi_total,
'pbi_total_fed': pbi_total_fed,
'pbi_total_oth': pbi_total_oth,
'pbi_total_sta': pbi_total_sta,
'pbi_total_uti': pbi_total_uti
})
agent.loc['cash_incentives'] = ''
agent.loc['export_tariff_results'] = ''
out_cols = ['agent_id',
'system_kw',
'npv',
'payback_period',
'cash_flow',
'first_year_elec_bill_with_system',
'first_year_elec_bill_savings',
'first_year_elec_bill_savings_frac',
'first_year_elec_bill_without_system',
'avg_elec_price_cents_per_kwh',
'cbi',
'ibi',
'pbi',
'cash_incentives',
'export_tariff_results',
'batt_kw',
'batt_kwh',
'batt_dispatch_profile'
]
return agent[out_cols]
#%%
def process_tariff(utilityrate, tariff_dict, net_billing_sell_rate):
"""
Instantiate the utilityrate5 PySAM model and process the agent's rate json object to conform with PySAM input formatting.
Parameters
----------
agent : 'pd.Series'
Individual agent object.
Returns
-------
utilityrate: 'PySAM.Utilityrate5'
"""
######################################
###--------- UTILITYRATE5 ---------###
###--- FIXED AND ANNUAL CHARGES ---###
######################################
# Monthly fixed charge [$]
utilityrate.ElectricityRates.ur_monthly_fixed_charge = tariff_dict['fixed_charge']
# Annual minimum charge [$]
utilityrate.ElectricityRates.ur_annual_min_charge = 0. # not currently tracked in URDB rate attribute downloads
# Monthly minimum charge [$]
utilityrate.ElectricityRates.ur_monthly_min_charge = 0. # not currently tracked in URDB rate attribute downloads
######################################
###--------- UTILITYRATE5 ---------###
###-------- DEMAND CHARGES --------###
######################################
# Enable demand charge
utilityrate.ElectricityRates.ur_dc_enable = (tariff_dict['d_flat_exists']) | (tariff_dict['d_tou_exists'])
if utilityrate.ElectricityRates.ur_dc_enable:
if tariff_dict['d_flat_exists']:
# Reformat demand charge table from dGen format
n_periods = len(tariff_dict['d_flat_levels'][0])
n_tiers = len(tariff_dict['d_flat_levels'])
ur_dc_flat_mat = []
for period in range(n_periods):
for tier in range(n_tiers):
row = [period, tier+1, tariff_dict['d_flat_levels'][tier][period], tariff_dict['d_flat_prices'][tier][period]]
ur_dc_flat_mat.append(row)
# Demand rates (flat) table
utilityrate.ElectricityRates.ur_dc_flat_mat = ur_dc_flat_mat
if tariff_dict['d_tou_exists']:
# Reformat demand charge table from dGen format
n_periods = len(tariff_dict['d_tou_levels'][0])
n_tiers = len(tariff_dict['d_tou_levels'])
ur_dc_tou_mat = []
for period in range(n_periods):
for tier in range(n_tiers):
row = [period+1, tier+1, tariff_dict['d_tou_levels'][tier][period], tariff_dict['d_tou_prices'][tier][period]]
ur_dc_tou_mat.append(row)
# Demand rates (TOU) table
utilityrate.ElectricityRates.ur_dc_tou_mat = ur_dc_tou_mat
# Reformat 12x24 tables - original are indexed to 0, PySAM needs index starting at 1
d_wkday_12by24 = []
for m in range(len(tariff_dict['d_wkday_12by24'])):
row = [x+1 for x in tariff_dict['d_wkday_12by24'][m]]
d_wkday_12by24.append(row)
d_wkend_12by24 = []
for m in range(len(tariff_dict['d_wkend_12by24'])):
row = [x+1 for x in tariff_dict['d_wkend_12by24'][m]]
d_wkend_12by24.append(row)
# Demand charge weekday schedule
utilityrate.ElectricityRates.ur_dc_sched_weekday = d_wkday_12by24
# Demand charge weekend schedule
utilityrate.ElectricityRates.ur_dc_sched_weekend = d_wkend_12by24
######################################
###--------- UTILITYRATE5 ---------###
###-------- ENERGY CHARGES --------###
######################################
if tariff_dict['e_exists']:
# Dictionary to map dGen max usage units to PySAM options
max_usage_dict = {'kWh':0, 'kWh/kW':1, 'kWh daily':2, 'kWh/kW daily':3}
# If max usage units are 'kWh daily', divide max usage by 30 -- rate download procedure converts daily to monthly
modifier = 30. if tariff_dict['energy_rate_unit'] == 'kWh daily' else 1.
# Reformat energy charge table from dGen format
n_periods = len(tariff_dict['e_levels'][0])
n_tiers = len(tariff_dict['e_levels'])
ur_ec_tou_mat = []
for period in range(n_periods):
for tier in range(n_tiers):
row = [period+1, tier+1, tariff_dict['e_levels'][tier][period]/modifier, max_usage_dict[tariff_dict['energy_rate_unit']], tariff_dict['e_prices'][tier][period], net_billing_sell_rate]
ur_ec_tou_mat.append(row)
# Energy rates table
utilityrate.ElectricityRates.ur_ec_tou_mat = ur_ec_tou_mat
# Reformat 12x24 tables - original are indexed to 0, PySAM needs index starting at 1
e_wkday_12by24 = []
for m in range(len(tariff_dict['e_wkday_12by24'])):
row = [x+1 for x in tariff_dict['e_wkday_12by24'][m]]
e_wkday_12by24.append(row)
e_wkend_12by24 = []
for m in range(len(tariff_dict['e_wkend_12by24'])):
row = [x+1 for x in tariff_dict['e_wkend_12by24'][m]]
e_wkend_12by24.append(row)
# Energy charge weekday schedule
utilityrate.ElectricityRates.ur_ec_sched_weekday = e_wkday_12by24
# Energy charge weekend schedule
utilityrate.ElectricityRates.ur_ec_sched_weekend = e_wkend_12by24
return utilityrate
#%%
def process_incentives(loan, kw, batt_kw, batt_kwh, generation_hourly, agent):
######################################
###----------- CASHLOAN -----------###
###------ PAYMENT INCENTIVES ------###
######################################
# Read incentive dataframe from agent attributes
incentive_df = agent.loc['state_incentives']
# Check dtype of incentive_df - process incentives if pd.DataFrame, otherwise do not assign incentive values to cashloan
if isinstance(incentive_df, pd.DataFrame):
# Fill NaNs in incentive_df - assume max incentive duration of 5 years and max incentive value of $10,000
incentive_df = incentive_df.fillna(value={'incentive_duration_yrs' : 5, 'max_incentive_usd' : 10000})
# Filter for CBI's in incentive_df
cbi_df = (incentive_df.loc[pd.notnull(incentive_df['cbi_usd_p_w'])]
.sort_values(['cbi_usd_p_w'], axis=0, ascending=False)
.reset_index(drop=True)
)
# For multiple CBIs that are applicable to the agent, cap at 2 and use PySAM's "state" and "other" option
if len(cbi_df) == 1:
loan.PaymentIncentives.cbi_sta_amount = cbi_df['cbi_usd_p_w'].iloc[0]
loan.PaymentIncentives.cbi_sta_deprbas_fed = 0
loan.PaymentIncentives.cbi_sta_deprbas_sta = 0
loan.PaymentIncentives.cbi_sta_maxvalue = cbi_df['max_incentive_usd'].iloc[0]
loan.PaymentIncentives.cbi_sta_tax_fed = 0
loan.PaymentIncentives.cbi_sta_tax_sta = 0
elif len(cbi_df) >= 2:
loan.PaymentIncentives.cbi_sta_amount = cbi_df['cbi_usd_p_w'].iloc[0]
loan.PaymentIncentives.cbi_sta_deprbas_fed = 0
loan.PaymentIncentives.cbi_sta_deprbas_sta = 0
loan.PaymentIncentives.cbi_sta_maxvalue = cbi_df['max_incentive_usd'].iloc[0]
loan.PaymentIncentives.cbi_sta_tax_fed = 1
loan.PaymentIncentives.cbi_sta_tax_sta = 1
loan.PaymentIncentives.cbi_oth_amount = cbi_df['cbi_usd_p_w'].iloc[1]
loan.PaymentIncentives.cbi_oth_deprbas_fed = 0
loan.PaymentIncentives.cbi_oth_deprbas_sta = 0
loan.PaymentIncentives.cbi_oth_maxvalue = cbi_df['max_incentive_usd'].iloc[1]
loan.PaymentIncentives.cbi_oth_tax_fed = 1
loan.PaymentIncentives.cbi_oth_tax_sta = 1
else:
pass
# Filter for PBI's in incentive_df
pbi_df = (incentive_df.loc[pd.notnull(incentive_df['pbi_usd_p_kwh'])]
.sort_values(['pbi_usd_p_kwh'], axis=0, ascending=False)
.reset_index(drop=True)
)
# For multiple PBIs that are applicable to the agent, cap at 2 and use PySAM's "state" and "other" option
if len(pbi_df) == 1:
# Aamount input [$/kWh] requires sequence -- repeat pbi_usd_p_kwh using incentive_duration_yrs
loan.PaymentIncentives.pbi_sta_amount = [pbi_df['pbi_usd_p_kwh'].iloc[0]] * int(pbi_df['incentive_duration_yrs'].iloc[0])
loan.PaymentIncentives.pbi_sta_escal = 0.
loan.PaymentIncentives.pbi_sta_tax_fed = 1
loan.PaymentIncentives.pbi_sta_tax_sta = 1
loan.PaymentIncentives.pbi_sta_term = pbi_df['incentive_duration_yrs'].iloc[0]
elif len(pbi_df) >= 2:
# Aamount input [$/kWh] requires sequence -- repeat pbi_usd_p_kwh using incentive_duration_yrs
loan.PaymentIncentives.pbi_sta_amount = [pbi_df['pbi_usd_p_kwh'].iloc[0]] * int(pbi_df['incentive_duration_yrs'].iloc[0])
loan.PaymentIncentives.pbi_sta_escal = 0.
loan.PaymentIncentives.pbi_sta_tax_fed = 1
loan.PaymentIncentives.pbi_sta_tax_sta = 1
loan.PaymentIncentives.pbi_sta_term = pbi_df['incentive_duration_yrs'].iloc[0]
# Aamount input [$/kWh] requires sequence -- repeat pbi_usd_p_kwh using incentive_duration_yrs
loan.PaymentIncentives.pbi_oth_amount = [pbi_df['pbi_usd_p_kwh'].iloc[1]] * int(pbi_df['incentive_duration_yrs'].iloc[1])
loan.PaymentIncentives.pbi_oth_escal = 0.
loan.PaymentIncentives.pbi_oth_tax_fed = 1
loan.PaymentIncentives.pbi_oth_tax_sta = 1
loan.PaymentIncentives.pbi_oth_term = pbi_df['incentive_duration_yrs'].iloc[1]
else:
pass
# Filter for IBI's in incentive_df
ibi_df = (incentive_df.loc[pd.notnull(incentive_df['ibi_pct'])]
.sort_values(['ibi_pct'], axis=0, ascending=False)
.reset_index(drop=True)
)
# For multiple IBIs that are applicable to the agent, cap at 2 and use PySAM's "state" and "other" option
# NOTE: this specifies IBI percentage, instead of IBI absolute amount
if len(ibi_df) == 1:
loan.PaymentIncentives.ibi_sta_percent = ibi_df['ibi_pct'].iloc[0]
loan.PaymentIncentives.ibi_sta_percent_deprbas_fed = 0
loan.PaymentIncentives.ibi_sta_percent_deprbas_sta = 0
loan.PaymentIncentives.ibi_sta_percent_maxvalue = ibi_df['max_incentive_usd'].iloc[0]
loan.PaymentIncentives.ibi_sta_percent_tax_fed = 1
loan.PaymentIncentives.ibi_sta_percent_tax_sta = 1
elif len(ibi_df) >= 2:
loan.PaymentIncentives.ibi_sta_percent = ibi_df['ibi_pct'].iloc[0]
loan.PaymentIncentives.ibi_sta_percent_deprbas_fed = 0
loan.PaymentIncentives.ibi_sta_percent_deprbas_sta = 0
loan.PaymentIncentives.ibi_sta_percent_maxvalue = ibi_df['max_incentive_usd'].iloc[0]
loan.PaymentIncentives.ibi_sta_percent_tax_fed = 1
loan.PaymentIncentives.ibi_sta_percent_tax_sta = 1
loan.PaymentIncentives.ibi_oth_percent = ibi_df['ibi_pct'].iloc[1]
loan.PaymentIncentives.ibi_oth_percent_deprbas_fed = 0
loan.PaymentIncentives.ibi_oth_percent_deprbas_sta = 0
loan.PaymentIncentives.ibi_oth_percent_maxvalue = ibi_df['max_incentive_usd'].iloc[1]
loan.PaymentIncentives.ibi_oth_percent_tax_fed = 1
loan.PaymentIncentives.ibi_oth_percent_tax_sta = 1
else:
pass
else:
pass
return loan
#%%
@decorators.fn_timer(logger = logger, tab_level = 2, prefix = '')
def calc_max_market_share(dataframe, max_market_share_df):
in_cols = list(dataframe.columns)
dataframe = dataframe.reset_index()
dataframe['business_model'] = 'host_owned'
dataframe['metric'] = 'payback_period'
# Convert metric value to integer as a primary key, then bound within max market share ranges
max_payback = max_market_share_df[max_market_share_df.metric == 'payback_period'].payback_period.max()
min_payback = max_market_share_df[max_market_share_df.metric == 'payback_period'].payback_period.min()
max_mbs = max_market_share_df[max_market_share_df.metric == 'percent_monthly_bill_savings'].payback_period.max()
min_mbs = max_market_share_df[max_market_share_df.metric == 'percent_monthly_bill_savings'].payback_period.min()
# copy the metric valeus to a new column to store an edited version
payback_period_bounded = dataframe['payback_period'].values.copy()
# where the metric value exceeds the corresponding max market curve bounds, set the value to the corresponding bound
payback_period_bounded[np.where((dataframe.metric == 'payback_period') & (dataframe['payback_period'] < min_payback))] = min_payback
payback_period_bounded[np.where((dataframe.metric == 'payback_period') & (dataframe['payback_period'] > max_payback))] = max_payback
payback_period_bounded[np.where((dataframe.metric == 'percent_monthly_bill_savings') & (dataframe['payback_period'] < min_mbs))] = min_mbs
payback_period_bounded[np.where((dataframe.metric == 'percent_monthly_bill_savings') & (dataframe['payback_period'] > max_mbs))] = max_mbs
dataframe['payback_period_bounded'] = np.round(payback_period_bounded.astype(float), 1)
# scale and round to nearest int
dataframe['payback_period_as_factor'] = (dataframe['payback_period_bounded'] * 100).round().astype('int')
# add a scaled key to the max_market_share dataframe too
max_market_share_df['payback_period_as_factor'] = (max_market_share_df['payback_period'] * 100).round().astype('int')
# Join the max_market_share table and dataframe in order to select the ultimate mms based on the metric value.
dataframe = pd.merge(dataframe, max_market_share_df[['sector_abbr', 'max_market_share', 'metric', 'payback_period_as_factor', 'business_model']],
how = 'left', on = ['sector_abbr', 'metric','payback_period_as_factor','business_model'])
out_cols = in_cols + ['max_market_share', 'metric']
return dataframe[out_cols] | [
"settings.init_model_settings",
"PySAM.Utilityrate5.default",
"numpy.array",
"agent_mutation.elec.get_and_apply_agent_load_profiles",
"pandas.notnull",
"PySAM.Battwatts.default",
"numpy.where",
"PySAM.Cashloan.default",
"scipy.optimize.minimize_scalar",
"PySAM.BatteryTools.size_li_ion_battery",
... | [((411, 432), 'utility_functions.get_logger', 'utilfunc.get_logger', ([], {}), '()\n', (430, 432), True, 'import utility_functions as utilfunc\n'), ((55274, 55332), 'decorators.fn_timer', 'decorators.fn_timer', ([], {'logger': 'logger', 'tab_level': '(2)', 'prefix': '""""""'}), "(logger=logger, tab_level=2, prefix='')\n", (55293, 55332), False, 'import decorators\n'), ((9715, 9745), 'settings.init_model_settings', 'settings.init_model_settings', ([], {}), '()\n', (9743, 9745), False, 'import settings\n'), ((9761, 9830), 'utility_functions.make_con', 'utilfunc.make_con', (['model_settings.pg_conn_string', 'model_settings.role'], {}), '(model_settings.pg_conn_string, model_settings.role)\n', (9778, 9830), True, 'import utility_functions as utilfunc\n'), ((9949, 10014), 'agent_mutation.elec.get_and_apply_agent_load_profiles', 'agent_mutation.elec.get_and_apply_agent_load_profiles', (['con', 'agent'], {}), '(con, agent)\n', (10002, 10014), False, 'import agent_mutation\n'), ((10228, 10306), 'agent_mutation.elec.get_and_apply_normalized_hourly_resource_solar', 'agent_mutation.elec.get_and_apply_normalized_hourly_resource_solar', (['con', 'agent'], {}), '(con, agent)\n', (10294, 10306), False, 'import agent_mutation\n'), ((19336, 19507), 'scipy.optimize.minimize_scalar', 'optimize.minimize_scalar', (['calc_system_performance'], {'args': '(pv, utilityrate, loan, batt, system_costs, True, 0)', 'bounds': '(0, max_system_kw)', 'method': '"""bounded"""', 'tol': 'tol'}), "(calc_system_performance, args=(pv, utilityrate,\n loan, batt, system_costs, True, 0), bounds=(0, max_system_kw), method=\n 'bounded', tol=tol)\n", (19360, 19507), False, 'from scipy import optimize\n'), ((19856, 19892), 'numpy.sum', 'np.sum', (['utilityrate.SystemOutput.gen'], {}), '(utilityrate.SystemOutput.gen)\n', (19862, 19892), True, 'import numpy as np\n'), ((20081, 20253), 'scipy.optimize.minimize_scalar', 'optimize.minimize_scalar', (['calc_system_performance'], {'args': '(pv, utilityrate, loan, batt, system_costs, False, 0)', 'bounds': '(0, max_system_kw)', 'method': '"""bounded"""', 'tol': 'tol'}), "(calc_system_performance, args=(pv, utilityrate,\n loan, batt, system_costs, False, 0), bounds=(0, max_system_kw), method=\n 'bounded', tol=tol)\n", (20105, 20253), False, 'from scipy import optimize\n'), ((20607, 20643), 'numpy.sum', 'np.sum', (['utilityrate.SystemOutput.gen'], {}), '(utilityrate.SystemOutput.gen)\n', (20613, 20643), True, 'import numpy as np\n'), ((25301, 25471), 'numpy.array', 'np.array', (["{'cbi_total': cbi_total, 'cbi_total_fed': cbi_total_fed, 'cbi_total_oth':\n cbi_total_oth, 'cbi_total_sta': cbi_total_sta, 'cbi_total_uti':\n cbi_total_uti}"], {}), "({'cbi_total': cbi_total, 'cbi_total_fed': cbi_total_fed,\n 'cbi_total_oth': cbi_total_oth, 'cbi_total_sta': cbi_total_sta,\n 'cbi_total_uti': cbi_total_uti})\n", (25309, 25471), True, 'import numpy as np\n'), ((25547, 25717), 'numpy.array', 'np.array', (["{'ibi_total': ibi_total, 'ibi_total_fed': ibi_total_fed, 'ibi_total_oth':\n ibi_total_oth, 'ibi_total_sta': ibi_total_sta, 'ibi_total_uti':\n ibi_total_uti}"], {}), "({'ibi_total': ibi_total, 'ibi_total_fed': ibi_total_fed,\n 'ibi_total_oth': ibi_total_oth, 'ibi_total_sta': ibi_total_sta,\n 'ibi_total_uti': ibi_total_uti})\n", (25555, 25717), True, 'import numpy as np\n'), ((25793, 25966), 'numpy.array', 'np.array', (["{'pbi_total': cf_pbi_total, 'pbi_total_fed': pbi_total_fed, 'pbi_total_oth':\n pbi_total_oth, 'pbi_total_sta': pbi_total_sta, 'pbi_total_uti':\n pbi_total_uti}"], {}), "({'pbi_total': cf_pbi_total, 'pbi_total_fed': pbi_total_fed,\n 'pbi_total_oth': pbi_total_oth, 'pbi_total_sta': pbi_total_sta,\n 'pbi_total_uti': pbi_total_uti})\n", (25801, 25966), True, 'import numpy as np\n'), ((28935, 28965), 'settings.init_model_settings', 'settings.init_model_settings', ([], {}), '()\n', (28963, 28965), False, 'import settings\n'), ((28981, 29050), 'utility_functions.make_con', 'utilfunc.make_con', (['model_settings.pg_conn_string', 'model_settings.role'], {}), '(model_settings.pg_conn_string, model_settings.role)\n', (28998, 29050), True, 'import utility_functions as utilfunc\n'), ((29143, 29208), 'agent_mutation.elec.get_and_apply_agent_load_profiles', 'agent_mutation.elec.get_and_apply_agent_load_profiles', (['con', 'agent'], {}), '(con, agent)\n', (29196, 29208), False, 'import agent_mutation\n'), ((29415, 29492), 'agent_mutation.elec.get_and_apply_normalized_hourly_resource_wind', 'agent_mutation.elec.get_and_apply_normalized_hourly_resource_wind', (['con', 'agent'], {}), '(con, agent)\n', (29480, 29492), False, 'import agent_mutation\n'), ((41873, 42043), 'numpy.array', 'np.array', (["{'cbi_total': cbi_total, 'cbi_total_fed': cbi_total_fed, 'cbi_total_oth':\n cbi_total_oth, 'cbi_total_sta': cbi_total_sta, 'cbi_total_uti':\n cbi_total_uti}"], {}), "({'cbi_total': cbi_total, 'cbi_total_fed': cbi_total_fed,\n 'cbi_total_oth': cbi_total_oth, 'cbi_total_sta': cbi_total_sta,\n 'cbi_total_uti': cbi_total_uti})\n", (41881, 42043), True, 'import numpy as np\n'), ((42119, 42289), 'numpy.array', 'np.array', (["{'ibi_total': ibi_total, 'ibi_total_fed': ibi_total_fed, 'ibi_total_oth':\n ibi_total_oth, 'ibi_total_sta': ibi_total_sta, 'ibi_total_uti':\n ibi_total_uti}"], {}), "({'ibi_total': ibi_total, 'ibi_total_fed': ibi_total_fed,\n 'ibi_total_oth': ibi_total_oth, 'ibi_total_sta': ibi_total_sta,\n 'ibi_total_uti': ibi_total_uti})\n", (42127, 42289), True, 'import numpy as np\n'), ((42365, 42538), 'numpy.array', 'np.array', (["{'pbi_total': cf_pbi_total, 'pbi_total_fed': pbi_total_fed, 'pbi_total_oth':\n pbi_total_oth, 'pbi_total_sta': pbi_total_sta, 'pbi_total_uti':\n pbi_total_uti}"], {}), "({'pbi_total': cf_pbi_total, 'pbi_total_fed': pbi_total_fed,\n 'pbi_total_oth': pbi_total_oth, 'pbi_total_sta': pbi_total_sta,\n 'pbi_total_uti': pbi_total_uti})\n", (42373, 42538), True, 'import numpy as np\n'), ((57521, 57750), 'pandas.merge', 'pd.merge', (['dataframe', "max_market_share_df[['sector_abbr', 'max_market_share', 'metric',\n 'payback_period_as_factor', 'business_model']]"], {'how': '"""left"""', 'on': "['sector_abbr', 'metric', 'payback_period_as_factor', 'business_model']"}), "(dataframe, max_market_share_df[['sector_abbr', 'max_market_share',\n 'metric', 'payback_period_as_factor', 'business_model']], how='left',\n on=['sector_abbr', 'metric', 'payback_period_as_factor', 'business_model'])\n", (57529, 57750), True, 'import pandas as pd\n'), ((3624, 3667), 'PySAM.BatteryTools.size_li_ion_battery', 'batt_tools.size_li_ion_battery', (['batt_inputs'], {}), '(batt_inputs)\n', (3654, 3667), True, 'import PySAM.BatteryTools as batt_tools\n'), ((10337, 10405), 'pandas.Series', 'pd.Series', (["norm_scaled_pv_cf_profiles_df['solar_cf_profile'].iloc[0]"], {}), "(norm_scaled_pv_cf_profiles_df['solar_cf_profile'].iloc[0])\n", (10346, 10405), True, 'import pandas as pd\n'), ((10538, 10569), 'numpy.sum', 'np.sum', (["pv['generation_hourly']"], {}), "(pv['generation_hourly'])\n", (10544, 10569), True, 'import numpy as np\n'), ((10645, 10689), 'PySAM.Battwatts.default', 'battery.default', (['"""PVWattsBatteryResidential"""'], {}), "('PVWattsBatteryResidential')\n", (10660, 10689), True, 'import PySAM.Battwatts as battery\n'), ((10715, 10758), 'PySAM.Battwatts.default', 'battery.default', (['"""PVWattsBatteryCommercial"""'], {}), "('PVWattsBatteryCommercial')\n", (10730, 10758), True, 'import PySAM.Battwatts as battery\n'), ((10843, 10887), 'PySAM.Utilityrate5.default', 'utility.default', (['"""PVWattsBatteryResidential"""'], {}), "('PVWattsBatteryResidential')\n", (10858, 10887), True, 'import PySAM.Utilityrate5 as utility\n'), ((10920, 10963), 'PySAM.Utilityrate5.default', 'utility.default', (['"""PVWattsBatteryCommercial"""'], {}), "('PVWattsBatteryCommercial')\n", (10935, 10963), True, 'import PySAM.Utilityrate5 as utility\n'), ((14666, 14711), 'PySAM.Cashloan.default', 'cashloan.default', (['"""PVWattsBatteryResidential"""'], {}), "('PVWattsBatteryResidential')\n", (14682, 14711), True, 'import PySAM.Cashloan as cashloan\n'), ((14781, 14825), 'PySAM.Cashloan.default', 'cashloan.default', (['"""PVWattsBatteryCommercial"""'], {}), "('PVWattsBatteryCommercial')\n", (14797, 14825), True, 'import PySAM.Cashloan as cashloan\n'), ((29755, 29794), 'PySAM.Utilityrate5.default', 'utility.default', (['"""WindPowerResidential"""'], {}), "('WindPowerResidential')\n", (29770, 29794), True, 'import PySAM.Utilityrate5 as utility\n'), ((29827, 29865), 'PySAM.Utilityrate5.default', 'utility.default', (['"""WindPowerCommercial"""'], {}), "('WindPowerCommercial')\n", (29842, 29865), True, 'import PySAM.Utilityrate5 as utility\n'), ((33490, 33530), 'PySAM.Cashloan.default', 'cashloan.default', (['"""WindPowerResidential"""'], {}), "('WindPowerResidential')\n", (33506, 33530), True, 'import PySAM.Cashloan as cashloan\n'), ((33600, 33639), 'PySAM.Cashloan.default', 'cashloan.default', (['"""WindPowerCommercial"""'], {}), "('WindPowerCommercial')\n", (33616, 33639), True, 'import PySAM.Cashloan as cashloan\n'), ((56424, 56523), 'numpy.where', 'np.where', (["((dataframe.metric == 'payback_period') & (dataframe['payback_period'] <\n min_payback))"], {}), "((dataframe.metric == 'payback_period') & (dataframe[\n 'payback_period'] < min_payback))\n", (56432, 56523), True, 'import numpy as np\n'), ((56561, 56660), 'numpy.where', 'np.where', (["((dataframe.metric == 'payback_period') & (dataframe['payback_period'] >\n max_payback))"], {}), "((dataframe.metric == 'payback_period') & (dataframe[\n 'payback_period'] > max_payback))\n", (56569, 56660), True, 'import numpy as np\n'), ((56702, 56811), 'numpy.where', 'np.where', (["((dataframe.metric == 'percent_monthly_bill_savings') & (dataframe[\n 'payback_period'] < min_mbs))"], {}), "((dataframe.metric == 'percent_monthly_bill_savings') & (dataframe[\n 'payback_period'] < min_mbs))\n", (56710, 56811), True, 'import numpy as np\n'), ((56845, 56954), 'numpy.where', 'np.where', (["((dataframe.metric == 'percent_monthly_bill_savings') & (dataframe[\n 'payback_period'] > max_mbs))"], {}), "((dataframe.metric == 'percent_monthly_bill_savings') & (dataframe[\n 'payback_period'] > max_mbs))\n", (56853, 56954), True, 'import numpy as np\n'), ((10046, 10094), 'pandas.Series', 'pd.Series', (["load_profile_df['consumption_hourly']"], {}), "(load_profile_df['consumption_hourly'])\n", (10055, 10094), True, 'import pandas as pd\n'), ((29234, 29282), 'pandas.Series', 'pd.Series', (["load_profile_df['consumption_hourly']"], {}), "(load_profile_df['consumption_hourly'])\n", (29243, 29282), True, 'import pandas as pd\n'), ((29517, 29577), 'pandas.Series', 'pd.Series', (["norm_scaled_wind_profiles_df['generation_hourly']"], {}), "(norm_scaled_wind_profiles_df['generation_hourly'])\n", (29526, 29577), True, 'import pandas as pd\n'), ((24362, 24379), 'numpy.isnan', 'np.isnan', (['payback'], {}), '(payback)\n', (24370, 24379), True, 'import numpy as np\n'), ((41200, 41217), 'numpy.isnan', 'np.isnan', (['payback'], {}), '(payback)\n', (41208, 41217), True, 'import numpy as np\n'), ((49596, 49635), 'pandas.notnull', 'pd.notnull', (["incentive_df['cbi_usd_p_w']"], {}), "(incentive_df['cbi_usd_p_w'])\n", (49606, 49635), True, 'import pandas as pd\n'), ((51355, 51396), 'pandas.notnull', 'pd.notnull', (["incentive_df['pbi_usd_p_kwh']"], {}), "(incentive_df['pbi_usd_p_kwh'])\n", (51365, 51396), True, 'import pandas as pd\n'), ((53387, 53422), 'pandas.notnull', 'pd.notnull', (["incentive_df['ibi_pct']"], {}), "(incentive_df['ibi_pct'])\n", (53397, 53422), True, 'import pandas as pd\n')] |
# Copyright 2005-2010 Wesabe, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
fixtures = os.path.join(os.path.dirname(__file__) or '.', "fixtures")
def get_checking_stmt():
return _read_file("checking.ofx")
def get_savings_stmt():
return _read_file("savings.ofx")
def get_savings_with_self_closed_empty_tag_stmt():
return _read_file("savings_with_self_closed_empty_tag.ofx")
def get_creditcard_stmt():
return _read_file("creditcard.ofx")
def get_blank_memo_stmt():
return _read_file("blank_memo.ofx")
def get_tag_with_line_break_stmt():
return _read_file("tag_with_line_break.ofx")
def _read_file(filename):
return open(os.path.join(fixtures, filename), 'rU').read()
| [
"os.path.dirname",
"os.path.join"
] | [((617, 642), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (632, 642), False, 'import os\n'), ((1170, 1202), 'os.path.join', 'os.path.join', (['fixtures', 'filename'], {}), '(fixtures, filename)\n', (1182, 1202), False, 'import os\n')] |
"""doufo.convert
abstract class of `dataType` converters.
Example:
Todo:
Author:
"""
from .function import func
from functools import wraps, cmp_to_key
from multipledispatch import Dispatcher
from typing import Callable, TypeVar
__all__ = ['converters', 'convert_to', 'convert']
T = TypeVar('T')
B = TypeVar('B')
class ConvertersDict:
"""doufo.ConverterDict: to define dictionary-like class to store converters.
Note, this class is hidden, and been used as `converters`
Attributes:
`attr1` (type): Description
"""
def __init__(self):
"""initial as a empty `dictionary`"""
self.converters = {}
def sorted_converters_keys(self):
"""doufo.ConvertDict().sorted_converters_key: sort converter keys
sort key according to their relationship (if parent- and child-class)
or their hash value.
Args:
`self`
"""
keys = sorted(self.converters.keys(),
key=cmp_to_key(tuple_type_compare))
return {k: self.converters[k] for k in keys}
def register(self, src: type, tar: type) -> Callable[[T], B]:
"""doufo.ConverterDict().register(): A decorator factory to define typing converting decorator
Attributes:
`self`
`src` (`type`): source `type`,
`tar` (`type`): target `type`,
Returns:
`f` (`Callable[[T], B]`): a decorater that defines a converter
"""
def deco(f):
self.converters[(src, tar)] = f
self.converters = self.sorted_converters_keys()
return f
return deco
def convert(self, src: type, tar: type) -> Callable[[T], B]:
""" doufo.ConvertDict().convert: define a converter from `type src` to `type tar`
Attibutes:
`self`
`src` (`type`): source `type`,
`tar` (`type`): target `type`,
Returns:
`converter` (`Callable[[T], B]`): converter from `type src` to `type tar`
"""
return self.converters[(src, tar)]
converters = ConvertersDict()
@func()
def convert_to(o, target_type):
"""doufo.convert_to: convert forward
Args:
`o` (`A`): any object
`target_type` (`type`): destination type
Returns:
return (`target_type`):description: object `o` in type of `target_type`
Raises:
"""
return converters.convert(type(o), target_type)(o)
@func()
def convert(o, target_type):
"""doufo.convert: convert backwards
Args:
`o` (`A`): any object
`target_type` (`type`): destination type
Returns:
return (`target_type`):description: object `o` in type of `target_type`
Raises:
"""
return converters.convert(type(o), target_type)(o)
def tuple_type_compare(types0, types1):
"""doufo.tuple_type_compare: compare two types
if `types0` is 'bigger' than `types1`, return negative (<0);
otherwise, return positive (>0). Here 'bigger' is defined by
whether they are 'parent and child', or ituitively bigger
Args:
types0 (`type`): types0
types1 (`type`): types1
Returns:
return (`int`): comparison results
Raises:
"""
compares = [single_type_compare(types0[0], types1[0]),
single_type_compare(types0[1], types1[1])]
if compares[0] != 0:
return compares[0]
if compares[1] != 0:
return compares[1]
if types0[0] is types1[0] and types0[1] is types1[1]:
return 0
return hash(types1) - hash(types0)
def single_type_compare(t0, t1):
if t0 is t1:
return 0
if issubclass(t0, t1):
return 1
if issubclass(t1, t0):
return -1
return 0
| [
"functools.cmp_to_key",
"typing.TypeVar"
] | [((297, 309), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (304, 309), False, 'from typing import Callable, TypeVar\n'), ((314, 326), 'typing.TypeVar', 'TypeVar', (['"""B"""'], {}), "('B')\n", (321, 326), False, 'from typing import Callable, TypeVar\n'), ((1013, 1043), 'functools.cmp_to_key', 'cmp_to_key', (['tuple_type_compare'], {}), '(tuple_type_compare)\n', (1023, 1043), False, 'from functools import wraps, cmp_to_key\n')] |
import re, os, copy
PAREMETER_PATTERN = '{{%s}}'
def convert_value_for_environment(value: object) -> str:
if str(value).lower() == 'true': value = '1'
elif str(value).lower() == 'false': value = '0'
return str(value)
def set_environment_variables(environs:dict):
if environs:
for key, value in environs.items():
os.environ[key] = convert_value_for_environment(value)
def merge_configuration(configuration, source_configuration,replace=False,path=[]):
for key2, value2 in source_configuration.items():
if not key2 in configuration:
configuration[key2] = value2
elif replace:
if type(value2) == dict:
path.append(key2)
merge_configuration(configuration[key2],source_configuration[key2],replace=replace,path=path)
#elif type(value2) == list:
else:
configuration[key2] = value2
def get_parameters(content):
title_regex = r'\{\{.*?\}\}'
founds = re.findall(title_regex,content)
return founds
def get_mails_parameters(content):
title_regex = r'\[\[.*?\]\]'
founds = re.findall(title_regex,content)
return founds
def show(config,level=0):
for key, cf in config.items():
val = '' if type(cf) == dict else str(cf)
print('{} {:30} {}'.format(' '*level,key,val))
if type(cf) == dict:
show(cf,level + 1)
def set_configs_paths(config,paths,parameters_values,configurations):
levels = list(set([len(x) for x in paths]))
for level in levels:
for i in range(len(parameters_values)):
if len(paths[i]) == level:
set_configurations_path(config, paths[i], parameters_values[i], configurations)
def set_configurations_path(config,path,parameters,parameters_values):
if len(path) == 1:
matchs = get_configs_matchs(config[path[0]])
if len(matchs) != 0 and matchs[0] in parameters_values:
sub_configuration = parameters_values[matchs[0]]
replacement_data = {x:y for x,y in sub_configuration.data.items() if x not in sub_configuration.tmp}
config[path[0]] = replacement_data
return
sub_config = config[path[0]]
path = path[1:]
set_configurations_path(sub_config,path,parameters,parameters_values)
def set_paths(config,paths,parameters_values,parameters_value):
levels = list(set([len(x) for x in paths]))
for level in levels:
for i in range(len(parameters_values)):
if len(paths[i]) == level:
set_path(config, paths[i], parameters_values[i], parameters_value)
def set_path(config,path,parameters,parameters_values):
if len(path) == 1:
value = config[path[0]]
for parameter in parameters:
if parameter in parameters_values:
parameter_value = parameters_values[parameter]
if value == PAREMETER_PATTERN%parameter:
value = parameter_value
elif PAREMETER_PATTERN%parameter in str(value):
value = value.replace(PAREMETER_PATTERN%parameter,str(parameter_value))
config[path[0]] = value
return
sub_config = config[path[0]]
path = path[1:]
set_path(sub_config,path,parameters,parameters_values)
def fill_config(configuration,source_configuration):
for key, value in configuration.items():
for key2, value2 in source_configuration.items():
if type(value) != dict and PAREMETER_PATTERN%key2 in str(value):
value = str(value).replace(PAREMETER_PATTERN%key2,value2)
configuration[key] = value
def process_configuration(configuration,source_configuration,path=None):
if path is None:
fill_config(configuration,source_configuration)
for key in source_configuration:
fill_config(configuration,source_configuration[key])
source = source_configuration[keys[level]]
fill_config()
def search_it(nested, target,path=None):
found, paths = [], []
if path is None:
path = []
if type(nested) == dict:
for key, value in nested.items():
next_path = copy.copy(path)
next_path.append(key)
if isinstance(target,list) and len(target) == 1:
target = target[0]
if isinstance(target,list):
if key == target[0]:
f, p = search_it(value, target[1:],next_path)
found.extend(f)
paths.extend(p)
else:
if key == target:
found.append(value)
paths.append(path)
if isinstance(value, dict):
f, p = search_it(value, target,next_path)
found.extend(f)
paths.extend(p)
elif isinstance(value, list):
i = 0
for item in value:
if isinstance(item, dict):
path.append(i)
f, p = search_it(item, target, next_path)
found.extend(f)
paths.extend(p)
"""else:
if key == target:
path.append(key)
found.append(value)"""
i += 1
"""elif type(nested) == list:
for value in nested:
if isinstance(item, dict):
path.append(i)
f, p = search_it(item, target, next_path)
found.extend(f)
paths.extend(p)"""
return found, paths
def get_configs_matchs(string):
return re.findall(r"\$config\(([^\$]+)\)",string)
def check_value(value,found,paths,object_type,next_path):
parameters = get_parameters(value)
if object_type == 'parameters':
results = [ x.replace('{{','').replace('}}','') for x in parameters]
else:
results = get_configs_matchs(value)
if len(results) != 0:
found.append( results)
paths.append(next_path)
def get_object_from_config(nested,path=None,object_type='parameters'):
found, paths = [], []
if path is None:
path = []
if isinstance(nested, dict):
for key, value in nested.items():
next_path = copy.copy(path)
next_path.append(key)
if isinstance(value, str):
check_value(value,found,paths,object_type,next_path)
elif isinstance(value, dict):
f, p = get_object_from_config(value, next_path,object_type)
found.extend(f)
paths.extend(p)
elif isinstance(value, list):
f, p = get_object_from_config(value, next_path,object_type)
found.extend(f)
paths.extend(p)
elif isinstance(nested, list):
for i, value in enumerate(nested):
next_path = copy.copy(path)
next_path.append(i)
if isinstance(value, str):
check_value(value,found,paths,object_type,next_path)
elif isinstance(value, dict):
f, p = get_object_from_config(value, next_path,object_type)
found.extend(f)
paths.extend(p)
elif isinstance(value, list):
f, p = get_object_from_config(value, next_path,object_type)
found.extend(f)
paths.extend(p)
return found, paths
def get_parameters_from_config(nested, path=None):
return get_object_from_config(nested,path=path,object_type='parameters')
def get_configs_from_config(nested, path=None):
return get_object_from_config(nested,path=path,object_type='configs')
def get_values_for_parameters(config, parameter_name,path=None):
"""Get the values associated to the parameter in the configuration
Arguments:
config {json dict} -- configuration as a json dict
parameter_name {str} -- parameter_name to search
Keyword Arguments:
path {list} -- the current path in the json dict as a list (default: {None})
Returns:
tuple -- a tuple of the parameter values and the parameter path
"""
found, paths = [], []
if path is None:
path = []
for key, value in config.items():
next_path = copy.copy(path)
next_path.append(key)
if key == parameter_name:
found.append(value)
paths.append(path)
if isinstance(value, dict):
f, p = search_it(value, parameter_name, next_path)
found.extend(f)
paths.extend(p)
elif isinstance(value, list):
i = 0
for item in value:
if isinstance(item, dict):
path.append(i)
f, p = search_it(item, parameter_name, next_path)
found.extend(f)
paths.extend(p)
i += 1
return found, paths
LIMIT = 100
def set_parameter_value(parameters_value,l):
if l > 10:
print('ERROR: replacement limit exceed for parameter %s'%parameters_value)
exit()
l += 1
replaced = False
keys = list(parameters_value.keys())
for key, value in parameters_value.items():
for k in keys:
if "{{%s}}"%k in str(value) and "{{%s}}"%k != value:
i = 0
value = replace_parameter(k,value,parameters_value[k],i)
replaced = True
parameters_value[key] = value
if replaced:
set_parameter_value(parameters_value,l)
def replace_parameter(key,value,replace_value,i):
if i > LIMIT:
print('ERROR: replacement limit exceed for parameter %s'%key)
exit()
i += 1
if isinstance(value,dict):
replacements = {}
for k, v in value.items():
vr = replace_parameter(key,v,replace_value,i)
if v != vr:
replacements[k] = vr
for k, newv in replacements.items():
value[k] = newv
elif isinstance(value,list):
replacements = {}
i = 0
for v in value:
vr = replace_parameter(key,v,replace_value,i)
if v != vr:
replacements[i] = vr
i += 1
for i, newv in replacements.items():
value[i] = newv
else:
if "{{%s}}"%key == value:
value = replace_value
elif "{{%s}}"%key in str(value):
value = value.replace("{{%s}}"%key,replace_value)
return value
def ensure_path(dict_object,paths=[],value=None):
if len(paths) == 0:
return
if not paths[0] in dict_object:
dict_object[paths[0]] = {}
if len(paths) == 1 and value is not None:
dict_object[paths[0]] = value
return
ensure_path(dict_object[paths[0]],paths[1:],value=value)
def ensure_filepath(name:str,filepath:str,root:str,filename:str):
name = name.split('/')[-1]
if filepath is not None:
if not filepath[-5:] == '.json':
filepath = filepath + '.json'
filename = os.path.basename(filepath).split('.')[0]
if root is None:
root = os.path.abspath(filepath).replace('%s.json'%filename,'')
if name == 'config':
name = filename
if root is None:
stack = inspect.stack()
parentframe = stack[1]
module = inspect.getmodule(parentframe[0])
filename_frame = parentframe.filename
current_path = os.getcwd()
root = current_path
if filename is None:
filename = name.lower()
filepath = root + os.sep + filename + '.json'
return name, filepath, root, filename | [
"os.getcwd",
"copy.copy",
"os.path.basename",
"os.path.abspath",
"re.findall"
] | [((1012, 1044), 're.findall', 're.findall', (['title_regex', 'content'], {}), '(title_regex, content)\n', (1022, 1044), False, 'import re, os, copy\n'), ((1149, 1181), 're.findall', 're.findall', (['title_regex', 'content'], {}), '(title_regex, content)\n', (1159, 1181), False, 'import re, os, copy\n'), ((5823, 5869), 're.findall', 're.findall', (['"""\\\\$config\\\\(([^\\\\$]+)\\\\)"""', 'string'], {}), "('\\\\$config\\\\(([^\\\\$]+)\\\\)', string)\n", (5833, 5869), False, 'import re, os, copy\n'), ((8509, 8524), 'copy.copy', 'copy.copy', (['path'], {}), '(path)\n', (8518, 8524), False, 'import re, os, copy\n'), ((11783, 11794), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (11792, 11794), False, 'import re, os, copy\n'), ((4291, 4306), 'copy.copy', 'copy.copy', (['path'], {}), '(path)\n', (4300, 4306), False, 'import re, os, copy\n'), ((6474, 6489), 'copy.copy', 'copy.copy', (['path'], {}), '(path)\n', (6483, 6489), False, 'import re, os, copy\n'), ((7101, 7116), 'copy.copy', 'copy.copy', (['path'], {}), '(path)\n', (7110, 7116), False, 'import re, os, copy\n'), ((11338, 11364), 'os.path.basename', 'os.path.basename', (['filepath'], {}), '(filepath)\n', (11354, 11364), False, 'import re, os, copy\n'), ((11430, 11455), 'os.path.abspath', 'os.path.abspath', (['filepath'], {}), '(filepath)\n', (11445, 11455), False, 'import re, os, copy\n')] |
# -*- coding: utf-8 -*-
# pragma pylint: disable=unused-argument, no-self-use
# (c) Copyright IBM Corp. 2010, 2018. All Rights Reserved.
""" Resilient functions component to run a Cisco AMP for endpoints query - get events """
# Set up:
# Destination: a Queue named "amp_get_events".
# Manual Action: Execute a REST query against a Cisco AMP for endpoints server.
import json
import logging
from datetime import datetime
from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError
from fn_cisco_amp4ep.lib.amp_client import Ampclient
from fn_cisco_amp4ep.lib.helpers import validate_opts, validate_params
from fn_cisco_amp4ep.lib.amp_ratelimit import AmpRateLimit
RATE_LIMITER = AmpRateLimit()
class FunctionComponent(ResilientComponent):
"""Component that implements Resilient function 'fn_amp_get_events of package fn_cisco_amp4ep.
The Function takes the following parameters:
amp_detection_sha256, amp_application_sha256, amp_conn_guid, amp_group_guid, amp_start_date,
amp_event_type, amp_limit, amp_offset
An example of a set of query parameter might look like the following:
amp_detection_sha256 = None
amp_application_sha256 = None
amp_conn_guid = None
amp_group_guid = None
amp_start_date = None
amp_event_type = None
amp_limit = None
amp_offset = None
The function will execute a REST api request against a Cisco AMP for endpoints server and returns a result in JSON
format similar to the following.
{
"input_params": {"detection_sha256": null, "application_sha256": null, "connector_guid": null,
"group_guid": null, "start_date": null, "event_type": null, "limit": null, "offset": null},
"response": {
"version": "v1.2.0",
"data": [
{
"id": 6455442249407791000,
"timestamp": 1503024774,
"timestamp_nanoseconds": 98000000,
"date": "2017-08-18T02:52:54+00:00",
"event_type": "Threat Detected",
"event_type_id": 1090519054,
"detection": "benign_qa_testware7",
"detection_id": "6455442249407791109",
"group_guids": [
"b077d6bc-bbdf-42f7-8838-a06053fbd98a"
],
"computer": {
"connector_guid": "af73d9d5-ddc5-4c93-9c6d-d5e6b5c5eb01",
"hostname": "WIN-S1AC1PI6L5L",
"external_ip": "10.200.65.31",
"user": "johndoe@WIN-S1AC1PI6L5L",
"active": true,
"network_addresses": [
{
"ip": "10.0.2.15",
"mac": "08:00:27:85:28:61"
}
],
"links": {
"computer": "https://api.amp.cisco.com/v1/computers/af73d9d5-ddc5-4c93-9c6d-d5e6b5c5eb01",
"trajectory": "https://api.amp.cisco.com/v1/computers/af73d9d5-ddc5-4c93-9c6d-d5e6b5c5eb01/trajectory",
"group": "https://api.amp.cisco.com/v1/groups/b077d6bc-bbdf-42f7-8838-a06053fbd98a"
}
},
"file": {
"disposition": "Unknown",
"file_name": "file.zip",
"file_path": "\\\\?\\C:\\Users\\johndoe\\Downloads\\file.zip",
"identity": {
"sha256": "f8a6a244138cb1e2f044f63f3dc42beeb555da892bbd7a121274498cbdfc9ad5",
"sha1": "20eeee16345e0c1283f7b500126350cb938b8570",
"md5": "6853839cde69359049ae6f7bd3ae86d7"
},
"archived_file": {
"disposition": "Malicious",
"identity": {
"sha256": "46679a50632d05b99683a14b91a69ce908de1673fbb71e9cd325e5685fcd7e49"
}
},
"parent": {
"process_id": 3416,
"disposition": "Clean",
"file_name": "explorer.exe",
"identity": {
"sha256": "80ef843fa78c33b511394a9c7535a9cbace1deb2270e86ee4ad2faffa5b1e7d2",
"sha1": "ea97227d34b8526055a543ade7d18587a927f6a3",
"md5": "15bc38a7492befe831966adb477cf76f"
}
}
}
},
...
...
],
"metadata": {
"results": {
"index": 0,
"total": 0,
"items_per_page": 500,
"current_item_count": 0
},
"links": {
"self": "https://api.amp.cisco.com/v1/events"
}
}
},
"query_execution_time": "2018-10-09 11:05:12"
}
"""
def __init__(self, opts):
"""constructor provides access to the configuration options"""
super(FunctionComponent, self).__init__(opts)
self.options = opts.get("fn_cisco_amp4ep", {})
validate_opts(self)
@handler("reload")
def _reload(self, event, opts):
"""Configuration options have changed, save new values"""
self.options = opts.get("fn_cisco_amp4ep", {})
validate_opts(self)
@function("fn_amp_get_events")
def _fn_amp_get_events_function(self, event, *args, **kwargs):
"""Function: Returns a list of events."""
try:
# Get the function parameters:
amp_detection_sha256 = kwargs.get("amp_detection_sha256") # text
amp_application_sha256 = kwargs.get("amp_application_sha256") # text
amp_conn_guid = kwargs.get("amp_conn_guid") # text
amp_group_guid = kwargs.get("amp_group_guid") # text
amp_start_date = kwargs.get("amp_start_date") # datetimepicker
amp_event_type = kwargs.get("amp_event_type") # text
amp_severity = self.get_select_param(kwargs.get("amp_severity")) # select, values: "High","Medium","Low"
amp_limit = kwargs.get("amp_limit") # number
amp_offset = kwargs.get("amp_offset") # number
log = logging.getLogger(__name__)
log.info("amp_detection_sha256: %s", amp_detection_sha256)
log.info("amp_application_sha256: %s", amp_application_sha256)
log.info("amp_conn_guid: %s", amp_conn_guid)
log.info("amp_group_guid: %s", amp_group_guid)
log.info("amp_start_date: %s", amp_start_date)
log.info("amp_event_type: %s", amp_event_type)
log.info("amp_severity: %s", amp_severity)
log.info("amp_limit: %s", amp_limit)
log.info("amp_offset: %s", amp_offset)
yield StatusMessage("Running Cisco AMP get events query...")
params = {"detection_sha256": amp_detection_sha256, "application_sha256": amp_application_sha256,
"connector_guid": amp_conn_guid, "group_guid": amp_group_guid, "start_date": amp_start_date,
"event_type": amp_event_type, "severity": amp_severity, "limit": amp_limit, "offset": amp_offset}
validate_params(params)
amp = Ampclient(self.options, RATE_LIMITER)
rtn = amp.get_paginated_total(amp.get_events, **params)
query_execution_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
# Add in "query_execution_time" and "ip_address" to result to facilitate post-processing.
results = {"response": rtn, "query_execution_time": query_execution_time, "input_params": params}
yield StatusMessage("Returning 'events' results")
log.debug(json.dumps(results))
# Produce a FunctionResult with the results
yield FunctionResult(results)
except Exception:
log.exception("Exception in Resilient Function for Cisco AMP for endpoints.")
yield FunctionError() | [
"logging.getLogger",
"resilient_circuits.handler",
"json.dumps",
"fn_cisco_amp4ep.lib.amp_client.Ampclient",
"resilient_circuits.FunctionError",
"resilient_circuits.StatusMessage",
"datetime.datetime.now",
"fn_cisco_amp4ep.lib.amp_ratelimit.AmpRateLimit",
"resilient_circuits.function",
"fn_cisco_a... | [((739, 753), 'fn_cisco_amp4ep.lib.amp_ratelimit.AmpRateLimit', 'AmpRateLimit', ([], {}), '()\n', (751, 753), False, 'from fn_cisco_amp4ep.lib.amp_ratelimit import AmpRateLimit\n'), ((5038, 5055), 'resilient_circuits.handler', 'handler', (['"""reload"""'], {}), "('reload')\n", (5045, 5055), False, 'from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError\n'), ((5247, 5276), 'resilient_circuits.function', 'function', (['"""fn_amp_get_events"""'], {}), "('fn_amp_get_events')\n", (5255, 5276), False, 'from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError\n'), ((5012, 5031), 'fn_cisco_amp4ep.lib.helpers.validate_opts', 'validate_opts', (['self'], {}), '(self)\n', (5025, 5031), False, 'from fn_cisco_amp4ep.lib.helpers import validate_opts, validate_params\n'), ((5221, 5240), 'fn_cisco_amp4ep.lib.helpers.validate_opts', 'validate_opts', (['self'], {}), '(self)\n', (5234, 5240), False, 'from fn_cisco_amp4ep.lib.helpers import validate_opts, validate_params\n'), ((6138, 6165), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (6155, 6165), False, 'import logging\n'), ((7134, 7157), 'fn_cisco_amp4ep.lib.helpers.validate_params', 'validate_params', (['params'], {}), '(params)\n', (7149, 7157), False, 'from fn_cisco_amp4ep.lib.helpers import validate_opts, validate_params\n'), ((7177, 7214), 'fn_cisco_amp4ep.lib.amp_client.Ampclient', 'Ampclient', (['self.options', 'RATE_LIMITER'], {}), '(self.options, RATE_LIMITER)\n', (7186, 7214), False, 'from fn_cisco_amp4ep.lib.amp_client import Ampclient\n'), ((6720, 6774), 'resilient_circuits.StatusMessage', 'StatusMessage', (['"""Running Cisco AMP get events query..."""'], {}), "('Running Cisco AMP get events query...')\n", (6733, 6774), False, 'from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError\n'), ((7594, 7637), 'resilient_circuits.StatusMessage', 'StatusMessage', (['"""Returning \'events\' results"""'], {}), '("Returning \'events\' results")\n', (7607, 7637), False, 'from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError\n'), ((7661, 7680), 'json.dumps', 'json.dumps', (['results'], {}), '(results)\n', (7671, 7680), False, 'import json\n'), ((7756, 7779), 'resilient_circuits.FunctionResult', 'FunctionResult', (['results'], {}), '(results)\n', (7770, 7779), False, 'from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError\n'), ((7319, 7333), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7331, 7333), False, 'from datetime import datetime\n'), ((7914, 7929), 'resilient_circuits.FunctionError', 'FunctionError', ([], {}), '()\n', (7927, 7929), False, 'from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError\n')] |
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import os
import shutil
import sys
from commitsan.git import (REPOS_PATH, CalledProcessError,
git_cmd, git_revlist, mkdir_p)
from commitsan.worker import job
from commitsan.checks import check_all
def output(*args, **kwargs):
kwargs.setdefault('file', sys.stderr)
print(*args, **kwargs)
@job()
def update_repo(repo, clone_url):
try:
out = git_cmd(repo, ['remote', 'update'])
except (OSError, CalledProcessError):
repo_path = os.path.join(REPOS_PATH, repo)
shutil.rmtree(repo_path, ignore_errors=True)
mkdir_p(repo_path)
out = git_cmd(repo, ['clone', '--mirror', clone_url, '.'],
no_git_dir=True)
output(out)
@job()
def process_commit_range(repo, *commits):
for commit in git_revlist(repo, *commits):
check_all(repo, commit)
| [
"commitsan.git.git_revlist",
"commitsan.git.git_cmd",
"os.path.join",
"commitsan.worker.job",
"commitsan.git.mkdir_p",
"shutil.rmtree",
"commitsan.checks.check_all"
] | [((461, 466), 'commitsan.worker.job', 'job', ([], {}), '()\n', (464, 466), False, 'from commitsan.worker import job\n'), ((859, 864), 'commitsan.worker.job', 'job', ([], {}), '()\n', (862, 864), False, 'from commitsan.worker import job\n'), ((925, 952), 'commitsan.git.git_revlist', 'git_revlist', (['repo', '*commits'], {}), '(repo, *commits)\n', (936, 952), False, 'from commitsan.git import REPOS_PATH, CalledProcessError, git_cmd, git_revlist, mkdir_p\n'), ((524, 559), 'commitsan.git.git_cmd', 'git_cmd', (['repo', "['remote', 'update']"], {}), "(repo, ['remote', 'update'])\n", (531, 559), False, 'from commitsan.git import REPOS_PATH, CalledProcessError, git_cmd, git_revlist, mkdir_p\n'), ((962, 985), 'commitsan.checks.check_all', 'check_all', (['repo', 'commit'], {}), '(repo, commit)\n', (971, 985), False, 'from commitsan.checks import check_all\n'), ((622, 652), 'os.path.join', 'os.path.join', (['REPOS_PATH', 'repo'], {}), '(REPOS_PATH, repo)\n', (634, 652), False, 'import os\n'), ((661, 705), 'shutil.rmtree', 'shutil.rmtree', (['repo_path'], {'ignore_errors': '(True)'}), '(repo_path, ignore_errors=True)\n', (674, 705), False, 'import shutil\n'), ((714, 732), 'commitsan.git.mkdir_p', 'mkdir_p', (['repo_path'], {}), '(repo_path)\n', (721, 732), False, 'from commitsan.git import REPOS_PATH, CalledProcessError, git_cmd, git_revlist, mkdir_p\n'), ((747, 816), 'commitsan.git.git_cmd', 'git_cmd', (['repo', "['clone', '--mirror', clone_url, '.']"], {'no_git_dir': '(True)'}), "(repo, ['clone', '--mirror', clone_url, '.'], no_git_dir=True)\n", (754, 816), False, 'from commitsan.git import REPOS_PATH, CalledProcessError, git_cmd, git_revlist, mkdir_p\n')] |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: physics.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import vector3d_pb2 as vector3d__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='physics.proto',
package='Indriya.Core.Msgs',
#syntax='proto2',
serialized_pb=_b('\n\rphysics.proto\x12\x11Indriya.Core.Msgs\x1a\x0evector3d.proto\"\xc2\x03\n\x07Physics\x12\x32\n\x04type\x18\x01 \x01(\x0e\x32\x1f.Indriya.Core.Msgs.Physics.Type:\x03ODE\x12\x13\n\x0bsolver_type\x18\x02 \x01(\t\x12\x15\n\rmin_step_size\x18\x03 \x01(\x01\x12\x14\n\x0cprecon_iters\x18\x04 \x01(\x05\x12\r\n\x05iters\x18\x05 \x01(\x05\x12\x0b\n\x03sor\x18\x06 \x01(\x01\x12\x0b\n\x03\x63\x66m\x18\x07 \x01(\x01\x12\x0b\n\x03\x65rp\x18\x08 \x01(\x01\x12\"\n\x1a\x63ontact_max_correcting_vel\x18\t \x01(\x01\x12\x1d\n\x15\x63ontact_surface_layer\x18\n \x01(\x01\x12,\n\x07gravity\x18\x0b \x01(\x0b\x32\x1b.Indriya.Core.Msgs.Vector3d\x12\x16\n\x0e\x65nable_physics\x18\x0c \x01(\x08\x12\x18\n\x10real_time_factor\x18\r \x01(\x01\x12\x1d\n\x15real_time_update_rate\x18\x0e \x01(\x01\x12\x15\n\rmax_step_size\x18\x0f \x01(\x01\"2\n\x04Type\x12\x07\n\x03ODE\x10\x01\x12\n\n\x06\x42ULLET\x10\x02\x12\x0b\n\x07SIMBODY\x10\x03\x12\x08\n\x04\x44\x41RT\x10\x04')
,
dependencies=[vector3d__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_PHYSICS_TYPE = _descriptor.EnumDescriptor(
name='Type',
full_name='Indriya.Core.Msgs.Physics.Type',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='ODE', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BULLET', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SIMBODY', index=2, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DART', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=453,
serialized_end=503,
)
_sym_db.RegisterEnumDescriptor(_PHYSICS_TYPE)
_PHYSICS = _descriptor.Descriptor(
name='Physics',
full_name='Indriya.Core.Msgs.Physics',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='Indriya.Core.Msgs.Physics.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='solver_type', full_name='Indriya.Core.Msgs.Physics.solver_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_step_size', full_name='Indriya.Core.Msgs.Physics.min_step_size', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='precon_iters', full_name='Indriya.Core.Msgs.Physics.precon_iters', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='iters', full_name='Indriya.Core.Msgs.Physics.iters', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sor', full_name='Indriya.Core.Msgs.Physics.sor', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cfm', full_name='Indriya.Core.Msgs.Physics.cfm', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='erp', full_name='Indriya.Core.Msgs.Physics.erp', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='contact_max_correcting_vel', full_name='Indriya.Core.Msgs.Physics.contact_max_correcting_vel', index=8,
number=9, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='contact_surface_layer', full_name='Indriya.Core.Msgs.Physics.contact_surface_layer', index=9,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gravity', full_name='Indriya.Core.Msgs.Physics.gravity', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='enable_physics', full_name='Indriya.Core.Msgs.Physics.enable_physics', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='real_time_factor', full_name='Indriya.Core.Msgs.Physics.real_time_factor', index=12,
number=13, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='real_time_update_rate', full_name='Indriya.Core.Msgs.Physics.real_time_update_rate', index=13,
number=14, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_step_size', full_name='Indriya.Core.Msgs.Physics.max_step_size', index=14,
number=15, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_PHYSICS_TYPE,
],
options=None,
is_extendable=False,
#syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=53,
serialized_end=503,
)
_PHYSICS.fields_by_name['type'].enum_type = _PHYSICS_TYPE
_PHYSICS.fields_by_name['gravity'].message_type = vector3d__pb2._VECTOR3D
_PHYSICS_TYPE.containing_type = _PHYSICS
DESCRIPTOR.message_types_by_name['Physics'] = _PHYSICS
Physics = _reflection.GeneratedProtocolMessageType('Physics', (_message.Message,), dict(
DESCRIPTOR = _PHYSICS,
__module__ = 'physics_pb2'
# @@protoc_insertion_point(class_scope:Indriya.Core.Msgs.Physics)
))
_sym_db.RegisterMessage(Physics)
# @@protoc_insertion_point(module_scope)
| [
"google.protobuf.symbol_database.Default",
"google.protobuf.descriptor.FieldDescriptor",
"google.protobuf.descriptor.EnumValueDescriptor"
] | [((480, 506), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ([], {}), '()\n', (504, 506), True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((1885, 1976), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', ([], {'name': '"""ODE"""', 'index': '(0)', 'number': '(1)', 'options': 'None', 'type': 'None'}), "(name='ODE', index=0, number=1, options=None,\n type=None)\n", (1916, 1976), True, 'from google.protobuf import descriptor as _descriptor\n'), ((1997, 2092), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', ([], {'name': '"""BULLET"""', 'index': '(1)', 'number': '(2)', 'options': 'None', 'type': 'None'}), "(name='BULLET', index=1, number=2, options=\n None, type=None)\n", (2028, 2092), True, 'from google.protobuf import descriptor as _descriptor\n'), ((2112, 2208), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', ([], {'name': '"""SIMBODY"""', 'index': '(2)', 'number': '(3)', 'options': 'None', 'type': 'None'}), "(name='SIMBODY', index=2, number=3, options=\n None, type=None)\n", (2143, 2208), True, 'from google.protobuf import descriptor as _descriptor\n'), ((2228, 2321), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', ([], {'name': '"""DART"""', 'index': '(3)', 'number': '(4)', 'options': 'None', 'type': 'None'}), "(name='DART', index=3, number=4, options=\n None, type=None)\n", (2259, 2321), True, 'from google.protobuf import descriptor as _descriptor\n'), ((2647, 2952), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""type"""', 'full_name': '"""Indriya.Core.Msgs.Physics.type"""', 'index': '(0)', 'number': '(1)', 'type': '(14)', 'cpp_type': '(8)', 'label': '(1)', 'has_default_value': '(True)', 'default_value': '(1)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='type', full_name=\n 'Indriya.Core.Msgs.Physics.type', index=0, number=1, type=14, cpp_type=\n 8, label=1, has_default_value=True, default_value=1, message_type=None,\n enum_type=None, containing_type=None, is_extension=False,\n extension_scope=None, options=None)\n", (2674, 2952), True, 'from google.protobuf import descriptor as _descriptor\n'), ((3342, 3665), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""min_step_size"""', 'full_name': '"""Indriya.Core.Msgs.Physics.min_step_size"""', 'index': '(2)', 'number': '(3)', 'type': '(1)', 'cpp_type': '(5)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='min_step_size', full_name=\n 'Indriya.Core.Msgs.Physics.min_step_size', index=2, number=3, type=1,\n cpp_type=5, label=1, has_default_value=False, default_value=0,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, options=None)\n", (3369, 3665), True, 'from google.protobuf import descriptor as _descriptor\n'), ((3690, 4011), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""precon_iters"""', 'full_name': '"""Indriya.Core.Msgs.Physics.precon_iters"""', 'index': '(3)', 'number': '(4)', 'type': '(5)', 'cpp_type': '(1)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='precon_iters', full_name=\n 'Indriya.Core.Msgs.Physics.precon_iters', index=3, number=4, type=5,\n cpp_type=1, label=1, has_default_value=False, default_value=0,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, options=None)\n", (3717, 4011), True, 'from google.protobuf import descriptor as _descriptor\n'), ((4036, 4343), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""iters"""', 'full_name': '"""Indriya.Core.Msgs.Physics.iters"""', 'index': '(4)', 'number': '(5)', 'type': '(5)', 'cpp_type': '(1)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='iters', full_name=\n 'Indriya.Core.Msgs.Physics.iters', index=4, number=5, type=5, cpp_type=\n 1, label=1, has_default_value=False, default_value=0, message_type=None,\n enum_type=None, containing_type=None, is_extension=False,\n extension_scope=None, options=None)\n", (4063, 4343), True, 'from google.protobuf import descriptor as _descriptor\n'), ((4368, 4670), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""sor"""', 'full_name': '"""Indriya.Core.Msgs.Physics.sor"""', 'index': '(5)', 'number': '(6)', 'type': '(1)', 'cpp_type': '(5)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='sor', full_name=\n 'Indriya.Core.Msgs.Physics.sor', index=5, number=6, type=1, cpp_type=5,\n label=1, has_default_value=False, default_value=0, message_type=None,\n enum_type=None, containing_type=None, is_extension=False,\n extension_scope=None, options=None)\n", (4395, 4670), True, 'from google.protobuf import descriptor as _descriptor\n'), ((4696, 4998), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""cfm"""', 'full_name': '"""Indriya.Core.Msgs.Physics.cfm"""', 'index': '(6)', 'number': '(7)', 'type': '(1)', 'cpp_type': '(5)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='cfm', full_name=\n 'Indriya.Core.Msgs.Physics.cfm', index=6, number=7, type=1, cpp_type=5,\n label=1, has_default_value=False, default_value=0, message_type=None,\n enum_type=None, containing_type=None, is_extension=False,\n extension_scope=None, options=None)\n", (4723, 4998), True, 'from google.protobuf import descriptor as _descriptor\n'), ((5024, 5326), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""erp"""', 'full_name': '"""Indriya.Core.Msgs.Physics.erp"""', 'index': '(7)', 'number': '(8)', 'type': '(1)', 'cpp_type': '(5)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='erp', full_name=\n 'Indriya.Core.Msgs.Physics.erp', index=7, number=8, type=1, cpp_type=5,\n label=1, has_default_value=False, default_value=0, message_type=None,\n enum_type=None, containing_type=None, is_extension=False,\n extension_scope=None, options=None)\n", (5051, 5326), True, 'from google.protobuf import descriptor as _descriptor\n'), ((5352, 5702), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""contact_max_correcting_vel"""', 'full_name': '"""Indriya.Core.Msgs.Physics.contact_max_correcting_vel"""', 'index': '(8)', 'number': '(9)', 'type': '(1)', 'cpp_type': '(5)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='contact_max_correcting_vel', full_name=\n 'Indriya.Core.Msgs.Physics.contact_max_correcting_vel', index=8, number\n =9, type=1, cpp_type=5, label=1, has_default_value=False, default_value\n =0, message_type=None, enum_type=None, containing_type=None,\n is_extension=False, extension_scope=None, options=None)\n", (5379, 5702), True, 'from google.protobuf import descriptor as _descriptor\n'), ((5726, 6066), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""contact_surface_layer"""', 'full_name': '"""Indriya.Core.Msgs.Physics.contact_surface_layer"""', 'index': '(9)', 'number': '(10)', 'type': '(1)', 'cpp_type': '(5)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='contact_surface_layer', full_name=\n 'Indriya.Core.Msgs.Physics.contact_surface_layer', index=9, number=10,\n type=1, cpp_type=5, label=1, has_default_value=False, default_value=0,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, options=None)\n", (5753, 6066), True, 'from google.protobuf import descriptor as _descriptor\n'), ((6091, 6409), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""gravity"""', 'full_name': '"""Indriya.Core.Msgs.Physics.gravity"""', 'index': '(10)', 'number': '(11)', 'type': '(11)', 'cpp_type': '(10)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': 'None', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='gravity', full_name=\n 'Indriya.Core.Msgs.Physics.gravity', index=10, number=11, type=11,\n cpp_type=10, label=1, has_default_value=False, default_value=None,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, options=None)\n", (6118, 6409), True, 'from google.protobuf import descriptor as _descriptor\n'), ((6434, 6765), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""enable_physics"""', 'full_name': '"""Indriya.Core.Msgs.Physics.enable_physics"""', 'index': '(11)', 'number': '(12)', 'type': '(8)', 'cpp_type': '(7)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(False)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='enable_physics', full_name=\n 'Indriya.Core.Msgs.Physics.enable_physics', index=11, number=12, type=8,\n cpp_type=7, label=1, has_default_value=False, default_value=False,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, options=None)\n", (6461, 6765), True, 'from google.protobuf import descriptor as _descriptor\n'), ((6790, 7122), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""real_time_factor"""', 'full_name': '"""Indriya.Core.Msgs.Physics.real_time_factor"""', 'index': '(12)', 'number': '(13)', 'type': '(1)', 'cpp_type': '(5)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='real_time_factor', full_name=\n 'Indriya.Core.Msgs.Physics.real_time_factor', index=12, number=13, type\n =1, cpp_type=5, label=1, has_default_value=False, default_value=0,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, options=None)\n", (6817, 7122), True, 'from google.protobuf import descriptor as _descriptor\n'), ((7146, 7487), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""real_time_update_rate"""', 'full_name': '"""Indriya.Core.Msgs.Physics.real_time_update_rate"""', 'index': '(13)', 'number': '(14)', 'type': '(1)', 'cpp_type': '(5)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='real_time_update_rate', full_name=\n 'Indriya.Core.Msgs.Physics.real_time_update_rate', index=13, number=14,\n type=1, cpp_type=5, label=1, has_default_value=False, default_value=0,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, options=None)\n", (7173, 7487), True, 'from google.protobuf import descriptor as _descriptor\n'), ((7512, 7837), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""max_step_size"""', 'full_name': '"""Indriya.Core.Msgs.Physics.max_step_size"""', 'index': '(14)', 'number': '(15)', 'type': '(1)', 'cpp_type': '(5)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'options': 'None'}), "(name='max_step_size', full_name=\n 'Indriya.Core.Msgs.Physics.max_step_size', index=14, number=15, type=1,\n cpp_type=5, label=1, has_default_value=False, default_value=0,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, options=None)\n", (7539, 7837), True, 'from google.protobuf import descriptor as _descriptor\n')] |
"""Run all of the unit tests for this package over and over,
in order to provide for better profiling."""
from __future__ import print_function
def main():
import sys, os, gc, time
dirname = os.path.split(__file__)
sys.path.append(dirname)
import runtests
gc.set_debug(gc.DEBUG_LEAK)
start = time.clock()
for i in range(50):
print('iteration: %d' % i)
runtests.main()
stop = time.clock()
took = str(stop - start)
print('Total Time: %s' % took)
for item in gc.get_objects():
print(item, sys.getrefcount(item))
if __name__ == '__main__':
main()
sys.exit(0)
| [
"time.clock",
"gc.set_debug",
"os.path.split",
"runtests.main",
"sys.getrefcount",
"sys.exit",
"gc.get_objects",
"sys.path.append"
] | [((205, 228), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (218, 228), False, 'import sys, os, gc, time\n'), ((233, 257), 'sys.path.append', 'sys.path.append', (['dirname'], {}), '(dirname)\n', (248, 257), False, 'import sys, os, gc, time\n'), ((283, 310), 'gc.set_debug', 'gc.set_debug', (['gc.DEBUG_LEAK'], {}), '(gc.DEBUG_LEAK)\n', (295, 310), False, 'import sys, os, gc, time\n'), ((324, 336), 'time.clock', 'time.clock', ([], {}), '()\n', (334, 336), False, 'import sys, os, gc, time\n'), ((433, 445), 'time.clock', 'time.clock', ([], {}), '()\n', (443, 445), False, 'import sys, os, gc, time\n'), ((527, 543), 'gc.get_objects', 'gc.get_objects', ([], {}), '()\n', (541, 543), False, 'import sys, os, gc, time\n'), ((632, 643), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (640, 643), False, 'import sys, os, gc, time\n'), ((405, 420), 'runtests.main', 'runtests.main', ([], {}), '()\n', (418, 420), False, 'import runtests\n'), ((565, 586), 'sys.getrefcount', 'sys.getrefcount', (['item'], {}), '(item)\n', (580, 586), False, 'import sys, os, gc, time\n')] |
import argparse
import functools
import sys
from tornado import (
httpclient,
ioloop,
)
def add_package_to_path():
if not (__name__ == "__main__" and __package__ == ""):
return
import os
sys.path.append(os.path.abspath(os.path.join(
os.path.abspath(__file__),
"..",
"..")))
add_package_to_path()
from thuum import (
reporters,
runners,
stats,
)
class UsageError(Exception):
def __init__(self, message, parser):
super(UsageError, self).__init__(message)
self.parser = parser
class AddBody(argparse.Action):
def __call__(self, parser, namespace, body, option_string=None):
if namespace.body is not None:
raise UsageError("Cannot specify -b/--body more than once.", parser)
if namespace.method not in ("PATCH", "POST", "PUT"):
raise UsageError(
"Cannot specify -b/--body with %r." % namespace.method,
parser)
if body.startswith("py:"):
pass
elif body.startswith("@"):
pass
namespace.body = body
class AddHeader(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
header = tuple(values.split(":"))
namespace.headers.append(header)
if len(header) != 2:
raise UsageError("Headers must be of the form 'name:value'", parser)
class StoreMappedChoice(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
choice = self.choices[values]
setattr(namespace, self.dest, choice)
def get_argument_parser(parser=None):
parser = parser or argparse.ArgumentParser(
description="Simple HTTP Load runner.")
parser.add_argument(
"-m", "--method",
choices=("DELETE", "GET", "HEAD", "OPTIONS", "POST", "PUT"),
help="HTTP Method to use for request",
default="GET")
parser.add_argument(
"-b", "--body",
action=AddBody,
default=None,
help=(
"Request body. Prefix with 'py:' to indicate a fully-qualified "
"python callable. Prefix with '@' to indicate a file path."
))
parser.add_argument(
"-c", "--concurrency",
help="Number of requests to make concurrently.",
dest="concurrency",
default=1,
type=int)
parser.add_argument(
"-H", "--header", dest="headers",
help="Custom header. name:value",
default=[], action=AddHeader)
parser.add_argument(
"--reporter", dest="reporter_class",
help="Stats report format.",
action=StoreMappedChoice,
default=reporters.TerminalReporter,
choices={
"csv": reporters.CSVReporter,
"json": reporters.JSONReporter,
"term": reporters.TerminalReporter,
})
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
"-n", "--requests",
help="Number of requests",
type=int)
group.add_argument(
"-d", "--duration",
help="Run load test for specified length of time.",
type=float)
parser.add_argument("url", help="URL to hit")
return parser
def main(argv=sys.argv[1:], stdout=sys.stdout):
parser = get_argument_parser()
try:
args = parser.parse_args(argv)
except UsageError as exception:
sys.exit("%s\n\n%s" % (
exception.message,
exception.parser.format_usage()
))
httpclient.AsyncHTTPClient.configure(None, max_clients=args.concurrency)
client = httpclient.AsyncHTTPClient(io_loop=ioloop.IOLoop.current())
try:
make_request = functools.partial(
httpclient.HTTPRequest,
args.url,
args.method,
args.headers,
args.body)
if args.duration:
runner = runners.DurationRunner(client, make_request, args.duration)
else:
runner = runners.QuantityRunner(client, make_request, args.requests)
reporter = args.reporter_class(stdout)
progress = functools.partial(reporter.progress, runner)
tracker = stats.Tracker(runner)
tracker.events.on("request_finished", reporter.record)
tracker.events.on("tests_finished", lambda t: progress())
tracker.events.on("tests_finished", reporter.summarize)
client.io_loop.add_callback(progress)
ioloop.PeriodicCallback(progress, 500, client.io_loop).start()
runner.run()
except KeyboardInterrupt:
sys.exit("Tests interrupted.")
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| [
"argparse.ArgumentParser",
"tornado.httpclient.AsyncHTTPClient.configure",
"tornado.ioloop.IOLoop.current",
"thuum.runners.QuantityRunner",
"thuum.runners.DurationRunner",
"tornado.ioloop.PeriodicCallback",
"functools.partial",
"sys.exit",
"os.path.abspath",
"thuum.stats.Tracker"
] | [((3541, 3613), 'tornado.httpclient.AsyncHTTPClient.configure', 'httpclient.AsyncHTTPClient.configure', (['None'], {'max_clients': 'args.concurrency'}), '(None, max_clients=args.concurrency)\n', (3577, 3613), False, 'from tornado import httpclient, ioloop\n'), ((1666, 1729), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Simple HTTP Load runner."""'}), "(description='Simple HTTP Load runner.')\n", (1689, 1729), False, 'import argparse\n'), ((3720, 3814), 'functools.partial', 'functools.partial', (['httpclient.HTTPRequest', 'args.url', 'args.method', 'args.headers', 'args.body'], {}), '(httpclient.HTTPRequest, args.url, args.method, args.\n headers, args.body)\n', (3737, 3814), False, 'import functools\n'), ((4141, 4185), 'functools.partial', 'functools.partial', (['reporter.progress', 'runner'], {}), '(reporter.progress, runner)\n', (4158, 4185), False, 'import functools\n'), ((4205, 4226), 'thuum.stats.Tracker', 'stats.Tracker', (['runner'], {}), '(runner)\n', (4218, 4226), False, 'from thuum import reporters, runners, stats\n'), ((3662, 3685), 'tornado.ioloop.IOLoop.current', 'ioloop.IOLoop.current', ([], {}), '()\n', (3683, 3685), False, 'from tornado import httpclient, ioloop\n'), ((3919, 3978), 'thuum.runners.DurationRunner', 'runners.DurationRunner', (['client', 'make_request', 'args.duration'], {}), '(client, make_request, args.duration)\n', (3941, 3978), False, 'from thuum import reporters, runners, stats\n'), ((4014, 4073), 'thuum.runners.QuantityRunner', 'runners.QuantityRunner', (['client', 'make_request', 'args.requests'], {}), '(client, make_request, args.requests)\n', (4036, 4073), False, 'from thuum import reporters, runners, stats\n'), ((4599, 4629), 'sys.exit', 'sys.exit', (['"""Tests interrupted."""'], {}), "('Tests interrupted.')\n", (4607, 4629), False, 'import sys\n'), ((271, 296), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (286, 296), False, 'import os\n'), ((4475, 4529), 'tornado.ioloop.PeriodicCallback', 'ioloop.PeriodicCallback', (['progress', '(500)', 'client.io_loop'], {}), '(progress, 500, client.io_loop)\n', (4498, 4529), False, 'from tornado import httpclient, ioloop\n')] |
# Generated by Django 3.1.2 on 2021-02-15 05:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('profiles', '0008_whoiswatching_person_avatar'),
]
operations = [
migrations.AddField(
model_name='whoiswatching',
name='user_age',
field=models.CharField(max_length=30, null=True),
),
]
| [
"django.db.models.CharField"
] | [((353, 395), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'null': '(True)'}), '(max_length=30, null=True)\n', (369, 395), False, 'from django.db import migrations, models\n')] |
from pathlib import Path
import os
import re
from decimal import Decimal
import csv
import numpy
from Utils import TextProcessingUtils
from Utils import DefinedConstants
def readEmbeddingsFromTxtFile(inFile):
w2v = {}
with open(inFile, "r") as f:
for l in f.readlines():
if not l.strip():
continue
if l:
ar = l.strip().split()
v = []
for i in range(ar.length):
v[i-1] = Decimal(ar[i])
w2v[ar[0]] = v
return w2v
def readEmbeddingsFromTxtFileUsingVocab(inFile, vocab):
w2v = {}
with open(inFile, "r") as f:
for l in f.readlines():
if not l.strip():
continue
if l:
ar = l.strip().split()
if ar[0] in vocab:
v = []
for i in range(ar.length):
v[i-1] = Decimal(ar[i])
w2v[ar[0]] = v
return w2v
def readTextFile(inFile):
out = []
with open(inFile, "r") as f:
for i in f.readlines():
if not i.strip():
continue
if i:
out.append(i+'\n')
return ''.join(out)
def saveAlignments(alingments, outFile, fileEncoding="utf-8"):
if len(alingments)>0:
with open(outFile, 'w',encoding=fileEncoding) as f:
for match in alingments:
f.write(match.toString()+"\n\n")
def readNewselaEmbeddingVocabulary(inFolder, language):
vocab = set()
regFilter = r'^.*\.'+language+'.0.txt$'
for dirpath, dirs, files in os.walk(inFolder):
for filename in files:
if re.match(regFilter, filename):
fname = os.path.join(dirpath,filename)
text = readTextFile(fname)
print("Read file "+fname)
vocab.update(TextProcessingUtils.getCleanEmbeddingModelTokens(text))
for i in range(1, 5):
filename = re.sub("." + language + ".0.txt","." + language + "." + str(i) + ".txt", filename)
fname = os.path.join(dirpath,filename)
text = readTextFile(fname)
if text:
vocab.update(TextProcessingUtils.getCleanEmbeddingModelTokens(text))
return vocab
def displayAlignments(alignments, detailed=True):
print ("Alignments:")
for alignment in alignments:
if detailed:
print(alignment.toString())
else:
print(alignment.getIndexAlignmentString())
print("")
def readTwoTextPerLineFileEmbeddingVocabulary(inFile, fistSentIndex, secondSentIndex):
vocab = set()
with open(inFile, "r") as f:
for l in f.readlines():
if not l.strip():
continue
if l:
ar = l.strip().split("\t")
vocab.update(TextProcessingUtils.getCleanEmbeddingModelTokens(ar[fistSentIndex]))
vocab.update(TextProcessingUtils.getCleanEmbeddingModelTokens(ar[secondSentIndex]))
return vocab
def convertArgToOption(param2value, args, key):
if args:
param2value[key] = args
def parseOptions(args):
param2value = {}
convertArgToOption(param2value, args.i, "input")
convertArgToOption(param2value, args.o, "output")
convertArgToOption(param2value, args.l, "language")
convertArgToOption(param2value, args.s, "similarity")
convertArgToOption(param2value, args.a, "aLv")
convertArgToOption(param2value, args.t, "aSt")
convertArgToOption(param2value, args.u, "aSt2")
convertArgToOption(param2value, args.e, "emb")
convertArgToOption(param2value, args.ll, "linelevel")
return param2value
def showNewselaUsageMessage():
print("Usage:\nprogram -i inFolder -o outFolder -l language -s similarityStrategy -a alignmentLevel -t alignmentStrategy"
+ " {-u SubLevelalignmentStrategy} {-e embeddingsTxtFile}\n"
+ "\"inFolder\" is the folder with the original newsela texts.\n"
+ "\"outFolder\" is the folder where the alignments will be stored.\n"
+ "\"language\" can be \""+DefinedConstants.SpanishLanguage+"\" or \""+DefinedConstants.EnglishLanguage+"\". Default: \""+DefinedConstants.EnglishLanguage+"\".\n"
+ "\"similarityStrategy\" can be \""+DefinedConstants.CNGstrategy+"\", \""+DefinedConstants.WAVGstrategy+"\", or \""+DefinedConstants.CWASAstrategy+"\", where the N in \""+DefinedConstants.CNGstrategy+"\" should be replaced for the desired n-gram size, e.g. \""+DefinedConstants.CNGstrategy.replace("N", 3+"")+"\". Default: \""+DefinedConstants.CNGstrategy.replace("N", 3+"")+"\".\n"
+ "\"alignmentLevel\" can be \""+DefinedConstants.ParagraphSepEmptyLineLevel+"\", \""+DefinedConstants.SentenceLevel+"\", or \""+DefinedConstants.ParagraphSepEmptyLineAndSentenceLevel+"\". Default: \""+DefinedConstants.SentenceLevel+"\".\n"
+ "\"alignmentStrategy\" can be \""+DefinedConstants.closestSimStrategy+"\" or \""+DefinedConstants.closestSimKeepingSeqStrategy+"\". Default: \""+DefinedConstants.closestSimStrategy+"\".\n"
+ "\"SubLevelalignmentStrategy\" can be \""+DefinedConstants.closestSimStrategy+"\" or \""+DefinedConstants.closestSimKeepingSeqStrategy+"\". Default: \""+DefinedConstants.closestSimStrategy+"\".\n"
+ "\"embeddingsTxtFile\" is the file with the embeddings using the classical word2vec txt format.\n"
)
def showCustomModelUsageMessage():
print("Usage:\nprogram -i inFile -o outFile -s similarityStrategy {-e embeddingsTxtFile}\n"
"\"inFile\" is a file with two tab-separated texts per line. The program will output a similarity score for each one of these text pairs.\n"
"\"outFile\" contains the original \"inFile\" tab-separated texts plus their similarity score.\n"
"\"similarityStrategy\" can be \""+DefinedConstants.CNGstrategy+"\", \""+DefinedConstants.WAVGstrategy+"\", or \""+DefinedConstants.CWASAstrategy+"\", where the N in \""+DefinedConstants.CNGstrategy+"\" should be replaced for the desired n-gram size, e.g. \""+DefinedConstants.CNGstrategy.replace("N", str(3)+"")+"\". Default: \""+DefinedConstants.CNGstrategy.replace("N", str(3)+"")+"\".\n"
"\"embeddingsTxtFile\" is the file with the embeddings using the classical word2vec txt format.\n"
)
def getOutputFileName(inFile, alignmentLevel, similarityStrategy, nGramSize):
simStr = similarityStrategy
if similarityStrategy == DefinedConstants.CNGstrategy:
simStr.replace("N", str(nGramSize)+"")
return inFile+"_"+ alignmentLevel+"_"+ simStr
def saveAlignmentsToCVS(alingments, outFile, fileEncoding="utf-8"):
with open(outFile, 'w',encoding=fileEncoding) as f:
for alingment in alingments:
f.write(alingment.toCVS()+"\n\n")
def getStats(alingments, nbrOfLineOrginal, nbrOfLineSimple, outFile):
data = numpy.zeros(len(alingments)).tolist()
for i in range(len(alingments)):
data[i] = alingments[i].getSimilarity()
histogram = calcHistogram(data, 0.0, 1.0, 10)
out = ""
out = outFile+";"+str(len(nbrOfLineOrginal))+"/"+str(getTotalWord(nbrOfLineOrginal))+";"
out += str(len(nbrOfLineSimple))+"/"+str(getTotalWord(nbrOfLineSimple))+";"
total =0.0
aboveTrashord=0.0
for i in range(len(histogram)):
total+=histogram[i]
if i>=4:
aboveTrashord+=histogram[i]
out += str(aboveTrashord)+";"
out += str(((aboveTrashord)/(total))) + "%;"
for i in range(len(histogram)):
out += str(histogram[i])+" ["+"{:.2f}".format((histogram[i]/total)*100.0)+"%]"+";"
return out
def getTotalWord(nbrOfLineOrginal):
x = 0
for sentence in nbrOfLineOrginal:
x+= sentence.getNbrOfWords()
return x
def calcHistogram(data, min, max, numBins):
result = numpy.zeros(numBins).tolist()
binSize = (max - min)/numBins
for d in data:
bin = ((d - min) / binSize)
if bin < 0:
bin=0
elif bin >= numBins:
bin = numBins -1
result[int(bin)] += 1
return result
| [
"Utils.DefinedConstants.CNGstrategy.replace",
"Utils.TextProcessingUtils.getCleanEmbeddingModelTokens",
"os.walk",
"os.path.join",
"re.match",
"numpy.zeros",
"decimal.Decimal"
] | [((1639, 1656), 'os.walk', 'os.walk', (['inFolder'], {}), '(inFolder)\n', (1646, 1656), False, 'import os\n'), ((1706, 1735), 're.match', 're.match', (['regFilter', 'filename'], {}), '(regFilter, filename)\n', (1714, 1735), False, 'import re\n'), ((7973, 7993), 'numpy.zeros', 'numpy.zeros', (['numBins'], {}), '(numBins)\n', (7984, 7993), False, 'import numpy\n'), ((1761, 1792), 'os.path.join', 'os.path.join', (['dirpath', 'filename'], {}), '(dirpath, filename)\n', (1773, 1792), False, 'import os\n'), ((496, 510), 'decimal.Decimal', 'Decimal', (['ar[i]'], {}), '(ar[i])\n', (503, 510), False, 'from decimal import Decimal\n'), ((1906, 1960), 'Utils.TextProcessingUtils.getCleanEmbeddingModelTokens', 'TextProcessingUtils.getCleanEmbeddingModelTokens', (['text'], {}), '(text)\n', (1954, 1960), False, 'from Utils import TextProcessingUtils\n'), ((2142, 2173), 'os.path.join', 'os.path.join', (['dirpath', 'filename'], {}), '(dirpath, filename)\n', (2154, 2173), False, 'import os\n'), ((2934, 3001), 'Utils.TextProcessingUtils.getCleanEmbeddingModelTokens', 'TextProcessingUtils.getCleanEmbeddingModelTokens', (['ar[fistSentIndex]'], {}), '(ar[fistSentIndex])\n', (2982, 3001), False, 'from Utils import TextProcessingUtils\n'), ((3032, 3101), 'Utils.TextProcessingUtils.getCleanEmbeddingModelTokens', 'TextProcessingUtils.getCleanEmbeddingModelTokens', (['ar[secondSentIndex]'], {}), '(ar[secondSentIndex])\n', (3080, 3101), False, 'from Utils import TextProcessingUtils\n'), ((946, 960), 'decimal.Decimal', 'Decimal', (['ar[i]'], {}), '(ar[i])\n', (953, 960), False, 'from decimal import Decimal\n'), ((2286, 2340), 'Utils.TextProcessingUtils.getCleanEmbeddingModelTokens', 'TextProcessingUtils.getCleanEmbeddingModelTokens', (['text'], {}), '(text)\n', (2334, 2340), False, 'from Utils import TextProcessingUtils\n'), ((4683, 4732), 'Utils.DefinedConstants.CNGstrategy.replace', 'DefinedConstants.CNGstrategy.replace', (['"""N"""', "(3 + '')"], {}), "('N', 3 + '')\n", (4719, 4732), False, 'from Utils import DefinedConstants\n'), ((4617, 4666), 'Utils.DefinedConstants.CNGstrategy.replace', 'DefinedConstants.CNGstrategy.replace', (['"""N"""', "(3 + '')"], {}), "('N', 3 + '')\n", (4653, 4666), False, 'from Utils import DefinedConstants\n')] |
from typing import Any, Dict, List, Type, TypeVar, Union, cast
import attr
from ..types import UNSET, Unset
T = TypeVar("T", bound="NewUser")
@attr.s(auto_attribs=True)
class NewUser:
""" """
password: str
permissions: List[str]
roles: List[str]
username: str
email: Union[Unset, str] = UNSET
def to_dict(self) -> Dict[str, Any]:
password = self.password
permissions = self.permissions
roles = self.roles
username = self.username
email = self.email
field_dict: Dict[str, Any] = {}
field_dict.update(
{
"password": password,
"permissions": permissions,
"roles": roles,
"username": username,
}
)
if email is not UNSET:
field_dict["email"] = email
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
password = d.pop("password")
permissions = cast(List[str], d.pop("permissions"))
roles = cast(List[str], d.pop("roles"))
username = d.pop("username")
email = d.pop("email", UNSET)
new_user = cls(
password=password,
permissions=permissions,
roles=roles,
username=username,
email=email,
)
return new_user
| [
"attr.s",
"typing.TypeVar"
] | [((115, 144), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {'bound': '"""NewUser"""'}), "('T', bound='NewUser')\n", (122, 144), False, 'from typing import Any, Dict, List, Type, TypeVar, Union, cast\n'), ((148, 173), 'attr.s', 'attr.s', ([], {'auto_attribs': '(True)'}), '(auto_attribs=True)\n', (154, 173), False, 'import attr\n')] |
from ai_safety_gridworlds.environments.shared import safety_game
from collections import defaultdict
import experiments.environment_helper as environment_helper
import numpy as np
class ModelFreeAUPAgent:
name = "Model-free AUP"
pen_epsilon, AUP_epsilon = .2, .9 # chance of choosing greedy action in training
default = {'lambd': 1./1.501, 'discount': .996, 'rpenalties': 30, 'episodes': 6000}
def __init__(self, env, lambd=default['lambd'], state_attainable=False, num_rewards=default['rpenalties'],
discount=default['discount'], episodes=default['episodes'], trials=50, use_scale=False):
"""Trains using the simulator and e-greedy exploration to determine a greedy policy.
:param env: Simulator.
:param lambd: Impact tuning parameter.
:param state_attainable: True - generate state indicator rewards; false - random rewards.
:param num_rewards: Size of the attainable set, |\mathcal{R}|.
:param discount:
:param episodes:
:param trials:
"""
self.actions = range(env.action_spec().maximum + 1)
self.probs = [[1.0 / (len(self.actions) - 1) if i != k else 0 for i in self.actions] for k in self.actions]
self.discount = discount
self.episodes = episodes
self.trials = trials
self.lambd = lambd
self.state_attainable = state_attainable
self.use_scale = use_scale
if state_attainable:
self.name = 'Relative reachability'
self.attainable_set = environment_helper.derive_possible_rewards(env)
else:
self.attainable_set = [defaultdict(np.random.random) for _ in range(num_rewards)]
if len(self.attainable_set) == 0:
self.name = 'Standard' # no penalty applied!
self.train(env)
def train(self, env):
self.performance = np.zeros((self.trials, self.episodes / 10))
# 0: high-impact, incomplete; 1: high-impact, complete; 2: low-impact, incomplete; 3: low-impact, complete
self.counts = np.zeros(4)
for trial in range(self.trials):
self.attainable_Q = defaultdict(lambda: np.zeros((len(self.attainable_set), len(self.actions))))
self.AUP_Q = defaultdict(lambda: np.zeros(len(self.actions)))
if not self.state_attainable:
self.attainable_set = [defaultdict(np.random.random) for _ in range(len(self.attainable_set))]
self.epsilon = self.pen_epsilon
for episode in range(self.episodes):
if episode > 2.0 / 3 * self.episodes: # begin greedy exploration
self.epsilon = self.AUP_epsilon
time_step = env.reset()
while not time_step.last():
last_board = str(time_step.observation['board'])
action = self.behavior_action(last_board)
time_step = env.step(action)
self.update_greedy(last_board, action, time_step)
if episode % 10 == 0:
_, actions, self.performance[trial][episode / 10], _ = environment_helper.run_episode(self, env)
self.counts[int(self.performance[trial, -1]) + 2] += 1 # -2 goes to idx 0
env.reset()
def act(self, obs):
return self.AUP_Q[str(obs['board'])].argmax()
def behavior_action(self, board):
"""Returns the e-greedy action for the state board string."""
greedy = self.AUP_Q[board].argmax()
if np.random.random() < self.epsilon or len(self.actions) == 1:
return greedy
else: # choose anything else
return np.random.choice(self.actions, p=self.probs[greedy])
def get_penalty(self, board, action):
if len(self.attainable_set) == 0: return 0
action_attainable = self.attainable_Q[board][:, action]
null_attainable = self.attainable_Q[board][:, safety_game.Actions.NOTHING]
diff = action_attainable - null_attainable
# Scaling number or vector (per-AU)
if self.use_scale:
scale = sum(abs(null_attainable))
if scale == 0:
scale = 1
penalty = sum(abs(diff) / scale)
else:
scale = np.copy(null_attainable)
scale[scale == 0] = 1 # avoid division by zero
penalty = np.average(np.divide(abs(diff), scale))
# Scaled difference between taking action and doing nothing
return self.lambd * penalty # ImpactUnit is 0!
def update_greedy(self, last_board, action, time_step):
"""Perform TD update on observed reward."""
learning_rate = 1
new_board = str(time_step.observation['board'])
def calculate_update(attainable_idx=None):
"""Do the update for the main function (or the attainable function at the given index)."""
if attainable_idx is not None:
reward = self.attainable_set[attainable_idx](new_board) if self.state_attainable \
else self.attainable_set[attainable_idx][new_board]
new_Q, old_Q = self.attainable_Q[new_board][attainable_idx].max(), \
self.attainable_Q[last_board][attainable_idx, action]
else:
reward = time_step.reward - self.get_penalty(last_board, action)
new_Q, old_Q = self.AUP_Q[new_board].max(), self.AUP_Q[last_board][action]
return learning_rate * (reward + self.discount * new_Q - old_Q)
# Learn the attainable reward functions
for attainable_idx in range(len(self.attainable_set)):
self.attainable_Q[last_board][attainable_idx, action] += calculate_update(attainable_idx)
if self.state_attainable:
self.attainable_Q[last_board][:, action] = np.clip(self.attainable_Q[last_board][:, action], 0, 1)
self.AUP_Q[last_board][action] += calculate_update()
| [
"numpy.clip",
"numpy.copy",
"numpy.random.choice",
"numpy.random.random",
"experiments.environment_helper.run_episode",
"numpy.zeros",
"experiments.environment_helper.derive_possible_rewards",
"collections.defaultdict"
] | [((1883, 1926), 'numpy.zeros', 'np.zeros', (['(self.trials, self.episodes / 10)'], {}), '((self.trials, self.episodes / 10))\n', (1891, 1926), True, 'import numpy as np\n'), ((2065, 2076), 'numpy.zeros', 'np.zeros', (['(4)'], {}), '(4)\n', (2073, 2076), True, 'import numpy as np\n'), ((1547, 1594), 'experiments.environment_helper.derive_possible_rewards', 'environment_helper.derive_possible_rewards', (['env'], {}), '(env)\n', (1589, 1594), True, 'import experiments.environment_helper as environment_helper\n'), ((3667, 3719), 'numpy.random.choice', 'np.random.choice', (['self.actions'], {'p': 'self.probs[greedy]'}), '(self.actions, p=self.probs[greedy])\n', (3683, 3719), True, 'import numpy as np\n'), ((4262, 4286), 'numpy.copy', 'np.copy', (['null_attainable'], {}), '(null_attainable)\n', (4269, 4286), True, 'import numpy as np\n'), ((5837, 5892), 'numpy.clip', 'np.clip', (['self.attainable_Q[last_board][:, action]', '(0)', '(1)'], {}), '(self.attainable_Q[last_board][:, action], 0, 1)\n', (5844, 5892), True, 'import numpy as np\n'), ((1644, 1673), 'collections.defaultdict', 'defaultdict', (['np.random.random'], {}), '(np.random.random)\n', (1655, 1673), False, 'from collections import defaultdict\n'), ((3523, 3541), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (3539, 3541), True, 'import numpy as np\n'), ((2383, 2412), 'collections.defaultdict', 'defaultdict', (['np.random.random'], {}), '(np.random.random)\n', (2394, 2412), False, 'from collections import defaultdict\n'), ((3130, 3171), 'experiments.environment_helper.run_episode', 'environment_helper.run_episode', (['self', 'env'], {}), '(self, env)\n', (3160, 3171), True, 'import experiments.environment_helper as environment_helper\n')] |
import tensorflow as tf
from tensorflow.keras.models import Model, Sequential
from tensorflow.keras.layers import Conv2D, BatchNormalization, ReLU, GlobalAveragePooling2D, Dropout
class ASPP(Model):
def __init__(self, filters, dilation_rates=[3, 6, 9]):
super().__init__()
self.aspp1 = ASPPConv(filters, 1, 1)
self.aspp2 = ASPPConv(filters, 3, dilation_rates[0])
self.aspp3 = ASPPConv(filters, 3, dilation_rates[1])
self.aspp4 = ASPPConv(filters, 3, dilation_rates[2])
self.pool = ASPPPooling(filters)
self.project = Sequential([
Conv2D(filters, 1, use_bias=False),
BatchNormalization(momentum=0.1, epsilon=1e-5),
ReLU(),
Dropout(0.1)
])
def call(self, x, training=None):
x = tf.concat([
self.aspp1(x, training=training),
self.aspp2(x, training=training),
self.aspp3(x, training=training),
self.aspp4(x, training=training),
self.pool(x, training=training)
], axis=-1)
x = self.project(x, training=training)
return x
class ASPPConv(Model):
def __init__(self, filters, kernel_size, dilation_rate):
super().__init__()
self.conv = Conv2D(filters, kernel_size, padding='SAME', dilation_rate=dilation_rate, use_bias=False)
self.bn = BatchNormalization(momentum=0.1, epsilon=1e-5)
self.relu = ReLU()
def call(self, x, training=None):
x = self.conv(x, training=training)
x = self.bn(x, training=training)
x = self.relu(x, training=training)
return x
class ASPPPooling(Model):
def __init__(self, filters):
super().__init__()
self.pool = GlobalAveragePooling2D()
self.conv = Conv2D(filters, 1, use_bias=False)
self.bn = BatchNormalization(momentum=0.1, epsilon=1e-5)
self.relu = ReLU()
def call(self, x, training=None):
h, w = tf.shape(x)[1], tf.shape(x)[2]
x = self.pool(x, training=training)
x = x[:, None, None, :]
x = self.conv(x, training=training)
x = self.bn(x, training=training)
x = self.relu(x, training=training)
x = tf.image.resize(x, (h, w), 'nearest')
return x | [
"tensorflow.shape",
"tensorflow.keras.layers.Conv2D",
"tensorflow.image.resize",
"tensorflow.keras.layers.ReLU",
"tensorflow.keras.layers.Dropout",
"tensorflow.keras.layers.BatchNormalization",
"tensorflow.keras.layers.GlobalAveragePooling2D"
] | [((1264, 1357), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['filters', 'kernel_size'], {'padding': '"""SAME"""', 'dilation_rate': 'dilation_rate', 'use_bias': '(False)'}), "(filters, kernel_size, padding='SAME', dilation_rate=dilation_rate,\n use_bias=False)\n", (1270, 1357), False, 'from tensorflow.keras.layers import Conv2D, BatchNormalization, ReLU, GlobalAveragePooling2D, Dropout\n'), ((1372, 1419), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'momentum': '(0.1)', 'epsilon': '(1e-05)'}), '(momentum=0.1, epsilon=1e-05)\n', (1390, 1419), False, 'from tensorflow.keras.layers import Conv2D, BatchNormalization, ReLU, GlobalAveragePooling2D, Dropout\n'), ((1439, 1445), 'tensorflow.keras.layers.ReLU', 'ReLU', ([], {}), '()\n', (1443, 1445), False, 'from tensorflow.keras.layers import Conv2D, BatchNormalization, ReLU, GlobalAveragePooling2D, Dropout\n'), ((1744, 1768), 'tensorflow.keras.layers.GlobalAveragePooling2D', 'GlobalAveragePooling2D', ([], {}), '()\n', (1766, 1768), False, 'from tensorflow.keras.layers import Conv2D, BatchNormalization, ReLU, GlobalAveragePooling2D, Dropout\n'), ((1789, 1823), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['filters', '(1)'], {'use_bias': '(False)'}), '(filters, 1, use_bias=False)\n', (1795, 1823), False, 'from tensorflow.keras.layers import Conv2D, BatchNormalization, ReLU, GlobalAveragePooling2D, Dropout\n'), ((1842, 1889), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'momentum': '(0.1)', 'epsilon': '(1e-05)'}), '(momentum=0.1, epsilon=1e-05)\n', (1860, 1889), False, 'from tensorflow.keras.layers import Conv2D, BatchNormalization, ReLU, GlobalAveragePooling2D, Dropout\n'), ((1909, 1915), 'tensorflow.keras.layers.ReLU', 'ReLU', ([], {}), '()\n', (1913, 1915), False, 'from tensorflow.keras.layers import Conv2D, BatchNormalization, ReLU, GlobalAveragePooling2D, Dropout\n'), ((2223, 2260), 'tensorflow.image.resize', 'tf.image.resize', (['x', '(h, w)', '"""nearest"""'], {}), "(x, (h, w), 'nearest')\n", (2238, 2260), True, 'import tensorflow as tf\n'), ((604, 638), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['filters', '(1)'], {'use_bias': '(False)'}), '(filters, 1, use_bias=False)\n', (610, 638), False, 'from tensorflow.keras.layers import Conv2D, BatchNormalization, ReLU, GlobalAveragePooling2D, Dropout\n'), ((652, 699), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'momentum': '(0.1)', 'epsilon': '(1e-05)'}), '(momentum=0.1, epsilon=1e-05)\n', (670, 699), False, 'from tensorflow.keras.layers import Conv2D, BatchNormalization, ReLU, GlobalAveragePooling2D, Dropout\n'), ((712, 718), 'tensorflow.keras.layers.ReLU', 'ReLU', ([], {}), '()\n', (716, 718), False, 'from tensorflow.keras.layers import Conv2D, BatchNormalization, ReLU, GlobalAveragePooling2D, Dropout\n'), ((732, 744), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.1)'], {}), '(0.1)\n', (739, 744), False, 'from tensorflow.keras.layers import Conv2D, BatchNormalization, ReLU, GlobalAveragePooling2D, Dropout\n'), ((1974, 1985), 'tensorflow.shape', 'tf.shape', (['x'], {}), '(x)\n', (1982, 1985), True, 'import tensorflow as tf\n'), ((1990, 2001), 'tensorflow.shape', 'tf.shape', (['x'], {}), '(x)\n', (1998, 2001), True, 'import tensorflow as tf\n')] |
from calendar import timegm
from flask_login import UserMixin, login_user
from datetime import datetime
import flask_socketio as sio
from .Permissions import Permissions
from .Token import Token
from .database import Database
from .Room import Room, ROOMS
from .Logger import Logger
from .. import config
from .Layout import Layout
logged_users = {}
class User(UserMixin):
_id = None
def get_id(self):
return str(self.id())
def is_authenticated(self):
return self.token().valid()
def id(self):
return self._id
def token(self):
c = Database().get_cursor()
c.execute("SELECT TokenId FROM User WHERE Id = ?;", (self.id(),))
fetch = c.fetchone()
if fetch[0] is None:
return None
return Token.from_id(fetch[0])
def set_token(self, token: Token):
if not isinstance(token, Token):
raise TypeError(
f"Object of type `Token` expected, however type `{type(token)}` was passed")
db = Database()
db.get_cursor().execute(
'UPDATE User SET TokenId = ? WHERE Id = ?;', (token.id(), self.id()))
db.commit()
def name(self):
c = Database().get_cursor()
c.execute("SELECT Name FROM User WHERE Id = ?;", (self.id(),))
fetch = c.fetchone()
return fetch[0] if fetch and fetch[0] else None
def set_name(self, name: str):
if not isinstance(name, str):
raise TypeError(
f"Object of type `str` expected, however type `{type(name)}` was passed")
db = Database()
db.get_cursor().execute('UPDATE User SET Name = ? WHERE Id = ?;', (name, self.id()))
db.commit()
def sid(self):
c = Database().get_cursor()
c.execute(
'SELECT SessionId FROM SessionId WHERE UserId = ? ORDER BY Updated DESC LIMIT 1;', (self.id(),))
fetch = c.fetchone()
return fetch[0] if fetch and fetch[0] else None
def set_sid(self, sid: str):
if not isinstance(sid, str):
raise TypeError(
f"Object of type `str` expected, however type `{type(sid)}` was passed")
db = Database()
db.get_cursor().execute('INSERT OR REPLACE INTO SessionId(`UserId`, `SessionId`) VALUES(?, ?);',
(self.id(), sid))
db.commit()
def latest_room(self):
c = Database().get_cursor()
c.execute('SELECT LatestRoom FROM User WHERE Id = ?;', (self.id(),))
fetch = c.fetchone()
return Room(fetch[0]) if fetch and fetch[0] else None
def set_latest_room(self, latest_room: Room):
if not isinstance(latest_room, Room):
raise TypeError(
f"Object of type `Room` expected, however type `{type(latest_room)}` was passed")
db = Database()
db.get_cursor().execute('UPDATE User SET LatestRoom = ? WHERE Id = ?;',
(latest_room.id(), self.id()))
db.commit()
def join_room(self, room: Room):
if not isinstance(room, Room):
raise TypeError(
f"Object of type `Room` expected, however type `{type(room)}` was passed")
db = Database()
db.get_cursor().execute('INSERT OR REPLACE INTO UserRoom(`UserId`, `RoomId`) VALUES (?, ?);',
(self.id(), room.id()))
db.commit()
self.set_latest_room(room)
sio.join_room(room.name(), self.sid())
if room.id() not in ROOMS:
logfile_format = '%Y-%m-%d %H-%M-%S'
if "logfile-date-format" in config["server"]:
logfile_format = config["server"]["logfile-date-format"]
logfile_date_format = '{:'+logfile_format+"}"
logfile_date = logfile_date_format.format(datetime.now())
ROOMS[room.id()] = {
'log': Logger('log/{}-{}.log'.format(logfile_date, room.name())),
'users': {},
'listeners': {}
}
users = [User.from_id(id).serialize()
for id in ROOMS[room.id()]['users']]
ROOMS[room.id()]['users'][self.id()] = self
history = []
for event in ROOMS[room.id()]['log'].get_data():
if (event["type"] == "new_image" or event["type"] == "text") and ('receiver' not in event or event["receiver"] == self.id()):
history.append(event)
if event["type"] == "command" and event["user"]['id'] == self.id():
history.append(event)
sio.emit('status', {
'type': 'join',
'user': self.serialize(),
'room': room.serialize(),
'timestamp': timegm(datetime.now().utctimetuple())
}, room=room.name())
sio.emit('joined_room', {
'room': room.serialize(),
'layout': Layout.from_json_file(room.layout_path()).serialize(),
'users': users,
'history': history,
'self': self.serialize(),
'permissions': Permissions(self.token(), room).serialize()
}, room=self.sid())
ROOMS[room.id()]['log'].append(
{'type': "join", 'user': self.serialize(), 'room': room.serialize()})
print(self.name(), "joined room:", room.name())
def leave_room(self, room: Room):
if not isinstance(room, Room):
raise TypeError(
f"Object of type `Room` expected, however type `{type(room)}` was passed")
db = Database()
db.get_cursor().execute(
'DELETE FROM UserRoom WHERE UserId = ? AND RoomId = ?;', (self.id(), room.id()))
db.commit()
sio.leave_room(room.name(), self.sid())
sio.emit('left_room', {'room': room.serialize()}, room=self.sid())
ROOMS[room.id()]['log'].append(
{'type': "leave", 'user': self.serialize(), 'room': room.serialize()})
print(self.name(), "left room:", room.name())
if room.id() in ROOMS:
if self.id() in ROOMS[room.id()]['users']:
del ROOMS[room.id()]['users'][self.id()]
if not ROOMS[room.id()]:
del ROOMS[room.id()]
sio.close_room(room.name())
sio.emit('status', {
'type': 'leave',
'room': room.serialize(),
'user': self.serialize(),
'timestamp': timegm(datetime.now().utctimetuple())
}, room=room.name())
def rooms(self):
return [Room(id[0]) for id in Database().get_cursor().execute('SELECT RoomId FROM UserRoom WHERE UserId = ?',
(self.id(),))]
def in_room(self, room: Room):
if not isinstance(room, Room):
raise TypeError(
f"Object of type `Room` expected, however type `{type(room)}` was passed")
c = Database().get_cursor()
c.execute('SELECT COUNT(*) FROM UserRoom WHERE UserId = ? AND RoomId = ?',
(self.id(), room.id()))
fetch = c.fetchone()
return Room(fetch[0]) if fetch[0] else None
def serialize(self):
return {
'id': self.id(),
'name': self.name(),
'sid': self.sid(),
'token': self.token().serialize(),
'latest_room': self.latest_room().serialize(),
'rooms': [room.serialize() for room in self.rooms()]
}
@classmethod
def from_id(cls, id):
if not isinstance(id, int) and not isinstance(id, str):
raise TypeError(
f"Object of type `int` or `str` expected, however type `{type(id)}` was passed")
global logged_users
if id not in logged_users:
c = Database().get_cursor()
c.execute('SELECT COUNT(*) FROM User WHERE Id = ?', (id,))
logged_users[id] = cls(id) if c.fetchone()[0] != 0 else None
return logged_users[id]
@classmethod
def from_sid(cls, sid: str):
if not isinstance(sid, str):
raise TypeError(
f"Object of type `str` expected, however type `{type(sid)}` was passed")
c = Database().get_cursor()
c.execute('SELECT UserId FROM SessionId WHERE SessionId = ?', (sid,))
id = c.fetchone()
return cls(id[0]) if id[0] else None
@classmethod
def login(cls, name: str, token: Token):
if not token:
return None
if not isinstance(name, str):
raise TypeError(
f"Object of type `str` expected, however type `{type(name)}` was passed")
if not isinstance(token, Token):
raise TypeError(
f"Object of type `Token` expected, however type `{type(token)}` was passed")
if not token.valid():
return None
db = Database()
c = db.get_cursor()
c.execute('INSERT INTO User(`TokenId`, `Name`) VALUES (?, ?);',
(token.id(), name))
db.commit()
user = cls(c.lastrowid)
login_user(user)
return user
def __repr__(self):
return str(self.serialize())
def __init__(self, id: int):
if not isinstance(id, int) and not isinstance(id, str):
raise TypeError(
f"Object of type `int` or `str` expected, however type `{type(id)}` was passed")
self._id = int(id)
| [
"flask_login.login_user",
"datetime.datetime.now"
] | [((9066, 9082), 'flask_login.login_user', 'login_user', (['user'], {}), '(user)\n', (9076, 9082), False, 'from flask_login import UserMixin, login_user\n'), ((3830, 3844), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3842, 3844), False, 'from datetime import datetime\n'), ((4728, 4742), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4740, 4742), False, 'from datetime import datetime\n'), ((6412, 6426), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6424, 6426), False, 'from datetime import datetime\n')] |
from sportsdb_setup import HGETestSetup, HGETestSetupArgs
from run_hge import HGE
import graphql
import multiprocessing
import json
import os
import docker
import ruamel.yaml as yaml
import cpuinfo
import subprocess
import threading
import time
import datetime
from colorama import Fore, Style
from plot import run_dash_server
import webbrowser
import pathlib
from urllib.parse import urlparse, urlunparse
import boto3
fileLoc = os.path.dirname(os.path.abspath(__file__))
def uri_path_join(uri, *paths):
p = urlparse(uri)
new_path = os.path.join(p.path, *paths)
return urlunparse(p._replace(path=new_path))
class HGEWrkBench(HGETestSetup):
wrk_docker_image = 'hasura/wrk:v0.3'
# We'll bind mount the lua script dir to this directory within the wrk container:
lua_dir = '/tmp/bench_scripts'
rps_steps = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000]
def __init__(
self, pg_url, remote_pg_url, pg_docker_image, hge_url=None,
remote_hge_url=None, hge_docker_image=None,
hge_args=[], skip_stack_build=False,
graphql_queries_file='queries.graphql', connections=50,
duration=300, results_hge_url = None, results_hge_admin_secret = None
):
self.load_queries(graphql_queries_file)
super().__init__(
pg_url = pg_url,
remote_pg_url = remote_pg_url,
pg_docker_image = pg_docker_image,
hge_url = hge_url,
remote_hge_url = remote_hge_url,
hge_docker_image = hge_docker_image,
hge_args = hge_args,
skip_stack_build = skip_stack_build
)
self.connections = connections
self.duration = duration
self.results_hge_url = results_hge_url
self.results_hge_admin_secret = results_hge_admin_secret
self.extract_cpu_info()
# NOTE: we generally want to do this just once; otherwise if we happen
# to be editing the tree while this script is running the shasum will
# keep changing:
self.server_shasum = self.get_server_shasum()
def load_queries(self, graphql_queries_file):
self.graphql_queries_file = graphql_queries_file
with open(self.graphql_queries_file) as f:
queries = f.read()
self.query_names = []
self.queries = []
for oper in graphql.parse(queries).definitions:
self.query_names.append(oper.name.value)
self.queries.append(oper)
def get_wrk2_params(self):
cpu_count = multiprocessing.cpu_count()
return {
'threads': cpu_count,
'connections': self.connections,
'duration': self.duration
}
def get_current_user(self):
return '{}:{}'.format(os.geteuid(), os.getegid())
def wrk2_test(self, query, rps):
def upload_files(files):
if self.upload_root_uri:
p = urlparse(self.upload_root_uri)
if p.scheme == 's3':
bucket = p.netloc
key = p.path.lstrip('/')
s3_client = boto3.client('s3')
for (f, f_key) in files:
s3_client.upload_file(f, bucket, os.path.join(key, f_key))
query_str = graphql.print_ast(query)
params = self.get_wrk2_params()
print(Fore.GREEN + "Running benchmark wrk2 for at {} req/s (duration: {}) for query\n".format(rps, params['duration']), query_str + Style.RESET_ALL)
bench_script = os.path.join(self.lua_dir, 'bench-wrk2.lua')
graphql_url = self.hge.url + '/v1/graphql'
timestamp = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
results_dir = self.results_root_dir
tests_path = [str(rps), timestamp]
results_dir = os.path.join(results_dir, *tests_path)
os.makedirs(results_dir, exist_ok=True)
wrk2_command = [
'wrk2',
'-R', str(rps),
'-t', str(params['threads']),
'-c', str(params['connections']),
'-d', str(params['duration']),
'--latency',
'-s', bench_script,
graphql_url,
query_str,
results_dir
]
volumes = self.get_scripts_vol()
volumes[results_dir] = {
'bind': results_dir,
'mode': 'rw'
}
self.docker_client = docker.from_env()
result = self.docker_client.containers.run(
self.wrk_docker_image,
detach = False,
stdout = True,
stderr = False,
command = wrk2_command,
network_mode = 'host',
environment = self.get_lua_env(),
volumes = volumes,
remove = True,
user = self.get_current_user()
).decode('ascii')
histogram_file = os.path.join(results_dir, 'latencies.hgrm')
histogram = self.get_latency_histogram(result, histogram_file)
summary_file = os.path.join(results_dir, 'summary.json')
with open(summary_file) as f:
summary = json.load(f)
latencies_file = os.path.join(results_dir, 'latencies')
def extract_data(v):
return v['data'] if isinstance(v, dict) and 'data' in v else v
tests_info = { k:extract_data(v) for (k, v) in self.gen_test_info(query, rps).items() }
tests_setup_file = os.path.join(results_dir, 'test_setup.json')
with open(tests_setup_file, 'w') as f:
f.write(json.dumps(tests_info, indent=2))
upload_files([
(x, os.path.join(*tests_path,y))
for (x,y) in [
(summary_file, 'summary.json'),
(latencies_file, 'latencies'),
(histogram_file, 'latencies.hgrm'),
(tests_setup_file, 'test_setup.json')
]
])
if self.upload_root_uri:
latencies_uri = uri_path_join(self.upload_root_uri, *tests_path, 'latencies')
else:
latencies_uri = pathlib.Path(latencies_file).as_uri()
self.insert_result(query, rps, summary, histogram, latencies_uri)
return (summary, histogram)
def get_latency_histogram(self, result, write_histogram_file):
const_true = lambda l : True
state_changes = {
'start' : {
(lambda l: 'Detailed Percentile spectrum' in l) : 'histogram_start'
},
'histogram_start': {
(lambda l: 'Value' in l and 'Percentile' in l): 'histogram_headers'
},
'histogram_headers': {
const_true: 'histogram_empty_line'
},
'histogram_empty_line' : {
const_true: 'histogram_values'
},
'histogram_values': {
(lambda l: l.strip().startswith('#')): 'histogram_summary'
},
'histogram_summary': {
(lambda l: not l.strip().startswith('#')): 'histogram_end'
}
}
state = 'start'
histogram = []
print(Fore.CYAN + "Latency histogram summary" + Style.RESET_ALL)
with open(write_histogram_file, 'w') as f:
for line in result.splitlines():
# Change the state
for (check, next_state) in state_changes[state].items():
if check(line):
state = next_state
break
if state == 'start':
continue
elif state == 'histogram_end':
break
if state == 'histogram_summary':
print(Fore.CYAN + line + Style.RESET_ALL)
if state in ['histogram_headers','histogram_values','histogram_summary']:
f.write(line+'\n')
if state == 'histogram_values':
(val, percentile, total_count, _) = line.strip().split()
histogram.append({
'percentile': float(percentile),
'latency': float(val),
'total_count': float(total_count)
})
return histogram
# The appropriate Lua env vars for execution within wrk container:
def get_lua_env(self):
return {
'LUA_PATH': '/usr/share/lua/5.1/?.lua;' +
os.path.join(self.lua_dir, '?.lua') + ';;',
'LUA_CPATH': '/usr/lib/lua/5.1/?.so;/usr/lib/x86_64-linux-gnu/lua/5.1/?.so;;'
}
def get_scripts_vol(self):
return {
os.path.join(fileLoc, 'wrk-websocket-server', 'bench_scripts'): {
'bind' : self.lua_dir,
'mode' : 'ro'
}
}
def max_rps_test(self, query):
query_str = graphql.print_ast(query)
print(Fore.GREEN + "(Compute maximum Request per second) Running wrk benchmark for query\n", query_str + Style.RESET_ALL)
self.hge.graphql_q(query_str) # Test query once for errors
bench_script = os.path.join(self.lua_dir + '/bench-wrk.lua')
graphql_url = self.hge.url + '/v1/graphql'
params = self.get_wrk2_params()
duration = 30
wrk_command = [
'wrk',
'-t', str(params['threads']),
'-c', str(params['connections']),
'-d', str(duration),
'--latency',
'-s', bench_script,
graphql_url,
query_str
]
self.docker_client = docker.from_env()
result = self.docker_client.containers.run(
self.wrk_docker_image,
detach = False,
stdout = False,
stderr = True,
command = wrk_command,
network_mode = 'host',
environment = self.get_lua_env(),
volumes = self.get_scripts_vol(),
remove = True,
user = self.get_current_user()
)
summary = json.loads(result)['summary']
# TODO explain this calculation. Why aren't we using wrk's reported 'max'? Should we call this avg_sustained_rps or something?
max_rps = round(summary['requests']/float(duration))
self.insert_max_rps_result(query, max_rps)
print("Max RPS", max_rps)
return max_rps
def get_version(self):
script = os.path.join(fileLoc, 'gen-version.sh')
return subprocess.check_output([script]).decode('ascii').strip()
def get_server_shasum(self):
script = os.path.join(fileLoc, 'get-server-sha.sh')
return subprocess.check_output([script]).decode('ascii').strip()
def extract_cpu_info(self):
self.cpu_info = cpuinfo.get_cpu_info()
for k in ['flags', 'python_version', 'hz_actual', 'hz_actual_raw']:
if self.cpu_info.get(k):
del self.cpu_info[k]
def get_results(self):
query = '''
query results {
latency: hge_bench_latest_results {
query_name
requests_per_sec
docker_image
version
latencies_uri
latency_histogram {
percentile
latency
}
}
max_rps: hge_bench_avg_query_max_rps {
query_name
docker_image
version
max_rps
}
}
'''
output = self.results_hge.graphql_q(query)
return output['data']
def set_cpu_info(self, insert_var):
cpu_key = self.cpu_info['brand'] + ' vCPUs: ' + str(self.cpu_info['count'])
insert_var['cpu']= {
'data' : {
'info': self.cpu_info,
'key': cpu_key
},
"on_conflict": {
"constraint": "cpu_info_pkey",
"update_columns": "key"
}
}
def set_query_info(self, insert_var, query):
insert_var["query"] = {
"data": {
"name" : query.name.value,
"query" : graphql.print_ast(query)
},
"on_conflict" : {
"constraint": "gql_query_query_key",
"update_columns": "query"
}
}
#TODO add executable shasum also
def set_version_info(self, insert_var):
if self.hge_docker_image:
insert_var["docker_image"] = self.hge_docker_image
else:
insert_var["version"] = self.get_version()
insert_var["server_shasum"] = self.server_shasum
insert_var['postgres_version'] = self.pg.get_server_version()
if self.scenario_name:
insert_var['scenario_name'] = self.scenario_name
def set_hge_args_env_vars(self, insert_var):
to_hide_env = ['HASURA_GRAPHQL_' + env for env in
[ 'ADMIN_SECRET', 'DATABASE_URL', 'JWT_SECRET']
]
env = { k:v for (k,v) in self.hge.get_hge_env().items() if (k.startswith('HASURA_GRAPHQL') and k not in to_hide_env) or k in ['GHCRTS'] }
args = self.hge.args
insert_var['hge_conf'] = {
'env': env,
'args': args
}
def gen_max_rps_insert_var(self, query, max_rps):
insert_var = dict()
self.set_cpu_info(insert_var)
self.set_query_info(insert_var, query)
self.set_version_info(insert_var)
self.set_hge_args_env_vars(insert_var)
insert_var['max_rps'] = max_rps
insert_var['wrk_parameters'] = self.get_wrk2_params()
return insert_var
def plot_results(self):
def open_plot_in_browser():
time.sleep(1)
webbrowser.open_new_tab('http://127.0.0.1:8050/')
threading.Thread(target=open_plot_in_browser).start()
run_dash_server(self.get_results())
# Collect info about the test environment
def gen_test_info(self, query, rps):
test_info = dict()
self.set_cpu_info(test_info)
self.set_query_info(test_info, query)
self.set_version_info(test_info)
self.set_hge_args_env_vars(test_info)
test_info["requests_per_sec"] = rps
test_info['wrk2_parameters'] = self.get_wrk2_params()
return test_info
def gen_result_insert_var(self, query, rps, summary, latency_histogram, latencies_uri):
insert_var = self.gen_test_info(query, rps)
insert_var["summary"] = summary
insert_var['latency_histogram'] = {
'data' : latency_histogram
}
insert_var['latencies_uri'] = latencies_uri
return insert_var
def insert_result(self, query, rps, summary, latency_histogram, latencies_uri):
result_var = self.gen_result_insert_var(query, rps, summary, latency_histogram, latencies_uri)
insert_query = """
mutation insertResult($result: hge_bench_results_insert_input!) {
insert_hge_bench_results(objects: [$result]){
affected_rows
}
}"""
variables = {'result': result_var}
self.results_hge.graphql_q(insert_query, variables)
def insert_max_rps_result(self, query, max_rps):
result_var = self.gen_max_rps_insert_var(query, max_rps)
insert_query = """
mutation insertMaxRps($result: hge_bench_query_max_rps_insert_input!) {
insert_hge_bench_query_max_rps(objects: [$result]){
affected_rows
}
}"""
variables = {'result': result_var}
self.results_hge.graphql_q(insert_query, variables)
def setup_results_schema(self):
if not self.results_hge_url:
self.results_hge_url = self.hge.url
self.results_hge_admin_secret = self.hge.admin_secret()
if self.results_hge_admin_secret:
results_hge_args = ['--admin-secret', self.results_hge_admin_secret]
else:
results_hge_args = []
self.results_hge = HGE(None, None, args=results_hge_args, log_file=None, url=self.results_hge_url)
results_table = {
'name' : 'results',
'schema': 'hge_bench'
}
if results_table in self.results_hge.get_all_tracked_tables():
return
schema_file = os.path.join(fileLoc, 'results_schema.yaml')
with open(schema_file) as f:
queries = yaml.safe_load(f)
self.results_hge.run_bulk(queries)
def run_query_benchmarks(self):
def get_results_root_dir(query):
if self.hge_docker_image:
ver_info = 'docker-tag-' + self.hge_docker_image.split(':')[1]
else:
ver_info = self.get_version()
query_name = query.name.value
# Store versioned runs under e.g. test_output/benchmark_runs/<hge_version>/
results_root_dir = os.path.abspath(os.path.join(self.work_dir, 'benchmark_runs'))
return os.path.join(results_root_dir, ver_info, query_name)
for query in self.queries:
try:
self.results_root_dir = get_results_root_dir(query)
max_rps = self.max_rps_test(query)
# The tests should definitely not be running very close to or higher than maximum requests per second
rps_steps = [ r for r in self.rps_steps if r < 0.6*max_rps]
print("Benchmarking queries with wrk2 for the following requests/sec", rps_steps)
for rps in rps_steps:
if rps < int(0.6*max_rps):
self.wrk2_test(query, rps)
except Exception:
print(Fore.RED + "Benchmarking Graphql Query '" + query.name.value + "' failed" + Style.RESET_ALL)
raise
def run_tests(self):
with self.graphql_engines_setup():
self.setup_results_schema()
if self.run_benchmarks:
self.run_query_benchmarks()
if not self.skip_plots:
self.plot_results()
class HGEWrkBenchArgs(HGETestSetupArgs):
def __init__(self):
self.set_arg_parse_options()
self.parse_args()
def set_arg_parse_options(self):
HGETestSetupArgs.set_arg_parse_options(self)
self.set_wrk_options()
def parse_args(self):
HGETestSetupArgs.parse_args(self)
self.parse_wrk_options()
def set_wrk_options(self):
def boolean_string(s):
s = s.lower()
if s not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s == 'true'
wrk_opts = self.arg_parser.add_argument_group('wrk')
wrk_opts.add_argument('--queries-file', metavar='HASURA_BENCH_QUERIES_FILE', help='Queries file for benchmarks', default='queries.graphql')
wrk_opts.add_argument('--connections', metavar='HASURA_BENCH_CONNECTIONS', help='Total number of open connections', default=50)
wrk_opts.add_argument('--duration', metavar='HASURA_BENCH_DURATION', help='Duration of tests in seconds', default=300)
wrk_opts.add_argument('--upload-root-uri', metavar='HASURA_BENCH_UPLOAD_ROOT_URI', help='The URI to which the latency results should be uploaded. Curently only s3 is supported', required=False)
wrk_opts.add_argument('--set-scenario-name', metavar='HASURA_BENCH_SCENARIO_NAME', help='Set a name for the test scenario. This will be shown in logs', required=False)
wrk_opts.add_argument('--results-hge-url', metavar='HASURA_BENCH_RESULTS_HGE_URL', help='The GraphQL engine to which the results should be uploaded', required=False)
wrk_opts.add_argument('--results-hge-admin-secret', metavar='HASURA_BENCH_RESULTS_HGE_ADMIN_SECRET', help='Admin secret of the GraphQL engine to which the results should be uploaded', required=False)
wrk_opts.add_argument('--skip-plots', help='Skip plotting', action='store_true', required=False)
wrk_opts.add_argument('--run-benchmarks', metavar='HASURA_BENCH_RUN_BENCHMARKS', help='Whether benchmarks should be run or not', default=True, type=boolean_string)
def get_s3_caller_identity(self):
return boto3.client('sts').get_caller_identity()
def parse_wrk_options(self):
self.connections, self.duration, self.graphql_queries_file, self.res_hge_url, upload_root_uri, self.res_hge_admin_secret, self.run_benchmarks, self.scenario_name = \
self.get_params([
('connections', 'HASURA_BENCH_CONNECTIONS'),
('duration', 'HASURA_BENCH_DURATION'),
('queries_file', 'HASURA_BENCH_QUERIES_FILE'),
('results_hge_url', 'HASURA_BENCH_RESULTS_HGE_URL'),
('upload_root_uri', 'HASURA_BENCH_UPLOAD_ROOT_URI'),
('results_hge_admin_secret', 'HASURA_BENCH_RESULTS_HGE_ADMIN_SECRET'),
('run_benchmarks', 'HASURA_BENCH_RUN_BENCHMARKS'),
('set_scenario_name', 'HASURA_BENCH_SCENARIO_NAME'),
])
self.upload_root_uri = None
if upload_root_uri:
p = urlparse(upload_root_uri)
if p.scheme == 's3':
# Check if aws credentials are set
self.get_s3_caller_identity()
self.upload_root_uri = upload_root_uri
self.skip_plots = self.parsed_args.skip_plots
class HGEWrkBenchWithArgs(HGEWrkBenchArgs, HGEWrkBench):
def __init__(self):
HGEWrkBenchArgs.__init__(self)
HGEWrkBench.__init__(
self,
pg_url = self.pg_url,
remote_pg_url = self.remote_pg_url,
pg_docker_image = self.pg_docker_image,
hge_url = self.hge_url,
remote_hge_url = self.remote_hge_url,
hge_docker_image = self.hge_docker_image,
hge_args = self.hge_args,
skip_stack_build = self.skip_stack_build,
graphql_queries_file = self.graphql_queries_file,
connections = self.connections,
duration = self.duration
)
if __name__ == "__main__":
bench = HGEWrkBenchWithArgs()
bench.run_tests()
| [
"run_hge.HGE",
"boto3.client",
"multiprocessing.cpu_count",
"time.sleep",
"sportsdb_setup.HGETestSetupArgs.set_arg_parse_options",
"graphql.parse",
"cpuinfo.get_cpu_info",
"pathlib.Path",
"json.dumps",
"webbrowser.open_new_tab",
"subprocess.check_output",
"json.loads",
"sportsdb_setup.HGETes... | [((447, 472), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (462, 472), False, 'import os\n'), ((515, 528), 'urllib.parse.urlparse', 'urlparse', (['uri'], {}), '(uri)\n', (523, 528), False, 'from urllib.parse import urlparse, urlunparse\n'), ((544, 572), 'os.path.join', 'os.path.join', (['p.path', '*paths'], {}), '(p.path, *paths)\n', (556, 572), False, 'import os\n'), ((2543, 2570), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (2568, 2570), False, 'import multiprocessing\n'), ((3285, 3309), 'graphql.print_ast', 'graphql.print_ast', (['query'], {}), '(query)\n', (3302, 3309), False, 'import graphql\n'), ((3531, 3575), 'os.path.join', 'os.path.join', (['self.lua_dir', '"""bench-wrk2.lua"""'], {}), "(self.lua_dir, 'bench-wrk2.lua')\n", (3543, 3575), False, 'import os\n'), ((3810, 3848), 'os.path.join', 'os.path.join', (['results_dir', '*tests_path'], {}), '(results_dir, *tests_path)\n', (3822, 3848), False, 'import os\n'), ((3857, 3896), 'os.makedirs', 'os.makedirs', (['results_dir'], {'exist_ok': '(True)'}), '(results_dir, exist_ok=True)\n', (3868, 3896), False, 'import os\n'), ((4411, 4428), 'docker.from_env', 'docker.from_env', ([], {}), '()\n', (4426, 4428), False, 'import docker\n'), ((4868, 4911), 'os.path.join', 'os.path.join', (['results_dir', '"""latencies.hgrm"""'], {}), "(results_dir, 'latencies.hgrm')\n", (4880, 4911), False, 'import os\n'), ((5007, 5048), 'os.path.join', 'os.path.join', (['results_dir', '"""summary.json"""'], {}), "(results_dir, 'summary.json')\n", (5019, 5048), False, 'import os\n'), ((5147, 5185), 'os.path.join', 'os.path.join', (['results_dir', '"""latencies"""'], {}), "(results_dir, 'latencies')\n", (5159, 5185), False, 'import os\n'), ((5414, 5458), 'os.path.join', 'os.path.join', (['results_dir', '"""test_setup.json"""'], {}), "(results_dir, 'test_setup.json')\n", (5426, 5458), False, 'import os\n'), ((8848, 8872), 'graphql.print_ast', 'graphql.print_ast', (['query'], {}), '(query)\n', (8865, 8872), False, 'import graphql\n'), ((9093, 9138), 'os.path.join', 'os.path.join', (["(self.lua_dir + '/bench-wrk.lua')"], {}), "(self.lua_dir + '/bench-wrk.lua')\n", (9105, 9138), False, 'import os\n'), ((9559, 9576), 'docker.from_env', 'docker.from_env', ([], {}), '()\n', (9574, 9576), False, 'import docker\n'), ((10387, 10426), 'os.path.join', 'os.path.join', (['fileLoc', '"""gen-version.sh"""'], {}), "(fileLoc, 'gen-version.sh')\n", (10399, 10426), False, 'import os\n'), ((10551, 10593), 'os.path.join', 'os.path.join', (['fileLoc', '"""get-server-sha.sh"""'], {}), "(fileLoc, 'get-server-sha.sh')\n", (10563, 10593), False, 'import os\n'), ((10724, 10746), 'cpuinfo.get_cpu_info', 'cpuinfo.get_cpu_info', ([], {}), '()\n', (10744, 10746), False, 'import cpuinfo\n'), ((15704, 15783), 'run_hge.HGE', 'HGE', (['None', 'None'], {'args': 'results_hge_args', 'log_file': 'None', 'url': 'self.results_hge_url'}), '(None, None, args=results_hge_args, log_file=None, url=self.results_hge_url)\n', (15707, 15783), False, 'from run_hge import HGE\n'), ((15998, 16042), 'os.path.join', 'os.path.join', (['fileLoc', '"""results_schema.yaml"""'], {}), "(fileLoc, 'results_schema.yaml')\n", (16010, 16042), False, 'import os\n'), ((17924, 17968), 'sportsdb_setup.HGETestSetupArgs.set_arg_parse_options', 'HGETestSetupArgs.set_arg_parse_options', (['self'], {}), '(self)\n', (17962, 17968), False, 'from sportsdb_setup import HGETestSetup, HGETestSetupArgs\n'), ((18035, 18068), 'sportsdb_setup.HGETestSetupArgs.parse_args', 'HGETestSetupArgs.parse_args', (['self'], {}), '(self)\n', (18062, 18068), False, 'from sportsdb_setup import HGETestSetup, HGETestSetupArgs\n'), ((2364, 2386), 'graphql.parse', 'graphql.parse', (['queries'], {}), '(queries)\n', (2377, 2386), False, 'import graphql\n'), ((2778, 2790), 'os.geteuid', 'os.geteuid', ([], {}), '()\n', (2788, 2790), False, 'import os\n'), ((2792, 2804), 'os.getegid', 'os.getegid', ([], {}), '()\n', (2802, 2804), False, 'import os\n'), ((5109, 5121), 'json.load', 'json.load', (['f'], {}), '(f)\n', (5118, 5121), False, 'import json\n'), ((8633, 8695), 'os.path.join', 'os.path.join', (['fileLoc', '"""wrk-websocket-server"""', '"""bench_scripts"""'], {}), "(fileLoc, 'wrk-websocket-server', 'bench_scripts')\n", (8645, 8695), False, 'import os\n'), ((10008, 10026), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (10018, 10026), False, 'import json\n'), ((13501, 13514), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (13511, 13514), False, 'import time\n'), ((13527, 13576), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['"""http://127.0.0.1:8050/"""'], {}), "('http://127.0.0.1:8050/')\n", (13550, 13576), False, 'import webbrowser\n'), ((16102, 16119), 'ruamel.yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (16116, 16119), True, 'import ruamel.yaml as yaml\n'), ((16666, 16718), 'os.path.join', 'os.path.join', (['results_root_dir', 'ver_info', 'query_name'], {}), '(results_root_dir, ver_info, query_name)\n', (16678, 16718), False, 'import os\n'), ((20806, 20831), 'urllib.parse.urlparse', 'urlparse', (['upload_root_uri'], {}), '(upload_root_uri)\n', (20814, 20831), False, 'from urllib.parse import urlparse, urlunparse\n'), ((2934, 2964), 'urllib.parse.urlparse', 'urlparse', (['self.upload_root_uri'], {}), '(self.upload_root_uri)\n', (2942, 2964), False, 'from urllib.parse import urlparse, urlunparse\n'), ((3647, 3670), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3668, 3670), False, 'import datetime\n'), ((5526, 5558), 'json.dumps', 'json.dumps', (['tests_info'], {'indent': '(2)'}), '(tests_info, indent=2)\n', (5536, 5558), False, 'import json\n'), ((11919, 11943), 'graphql.print_ast', 'graphql.print_ast', (['query'], {}), '(query)\n', (11936, 11943), False, 'import graphql\n'), ((13585, 13630), 'threading.Thread', 'threading.Thread', ([], {'target': 'open_plot_in_browser'}), '(target=open_plot_in_browser)\n', (13601, 13630), False, 'import threading\n'), ((16600, 16645), 'os.path.join', 'os.path.join', (['self.work_dir', '"""benchmark_runs"""'], {}), "(self.work_dir, 'benchmark_runs')\n", (16612, 16645), False, 'import os\n'), ((19891, 19910), 'boto3.client', 'boto3.client', (['"""sts"""'], {}), "('sts')\n", (19903, 19910), False, 'import boto3\n'), ((3117, 3135), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (3129, 3135), False, 'import boto3\n'), ((5600, 5628), 'os.path.join', 'os.path.join', (['*tests_path', 'y'], {}), '(*tests_path, y)\n', (5612, 5628), False, 'import os\n'), ((6063, 6091), 'pathlib.Path', 'pathlib.Path', (['latencies_file'], {}), '(latencies_file)\n', (6075, 6091), False, 'import pathlib\n'), ((8427, 8462), 'os.path.join', 'os.path.join', (['self.lua_dir', '"""?.lua"""'], {}), "(self.lua_dir, '?.lua')\n", (8439, 8462), False, 'import os\n'), ((10442, 10475), 'subprocess.check_output', 'subprocess.check_output', (['[script]'], {}), '([script])\n', (10465, 10475), False, 'import subprocess\n'), ((10609, 10642), 'subprocess.check_output', 'subprocess.check_output', (['[script]'], {}), '([script])\n', (10632, 10642), False, 'import subprocess\n'), ((3238, 3262), 'os.path.join', 'os.path.join', (['key', 'f_key'], {}), '(key, f_key)\n', (3250, 3262), False, 'import os\n')] |
import six
import time
import signal
import multiprocessing
from functools import partial
import numpy as np
from astropy.utils.console import (_get_stdout, isatty, isiterable,
human_file_size, _CAN_RESIZE_TERMINAL,
terminal_size, color_print, human_time)
import contextlib
import warnings
try:
import builtins
except ImportError:
# python2
import __builtin__ as builtins
'''
Copyright (c) 2011-2016, Astropy Developers
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
* Neither the name of the Astropy Team nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
class ProgressBar(six.Iterator):
"""
A class to display a progress bar in the terminal.
It is designed to be used either with the ``with`` statement::
with ProgressBar(len(items)) as bar:
for item in enumerate(items):
bar.update()
or as a generator::
for item in ProgressBar(items):
item.process()
"""
def __init__(self, total_or_items, ipython_widget=False, file=None):
"""
Parameters
----------
total_or_items : int or sequence
If an int, the number of increments in the process being
tracked. If a sequence, the items to iterate over.
ipython_widget : bool, optional
If `True`, the progress bar will display as an IPython
notebook widget.
file : writable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If `file` is not a tty (as determined by
calling its `isatty` member, if any, or special case hacks
to detect the IPython console), the progress bar will be
completely silent.
"""
ipython_widget = False
# if ipython_widget:
# # Import only if ipython_widget, i.e., widget in IPython
# # notebook
# if ipython_major_version < 4:
# from IPython.html import widgets
# else:
# from ipywidgets import widgets
# from IPython.display import display
if file is None:
file = _get_stdout()
if not isatty(file) and not ipython_widget:
self.update = self._silent_update
self._silent = True
else:
self._silent = False
if isiterable(total_or_items):
self._items = iter(total_or_items)
self._total = len(total_or_items)
else:
try:
self._total = int(total_or_items)
except TypeError:
raise TypeError("First argument must be int or sequence")
else:
self._items = iter(range(self._total))
self._file = file
self._start_time = time.time()
self._human_total = human_file_size(self._total)
self._ipython_widget = ipython_widget
self._signal_set = False
if not ipython_widget:
self._should_handle_resize = (
_CAN_RESIZE_TERMINAL and self._file.isatty())
self._handle_resize()
if self._should_handle_resize:
signal.signal(signal.SIGWINCH, self._handle_resize)
self._signal_set = True
self.update(0)
def _handle_resize(self, signum=None, frame=None):
terminal_width = terminal_size(self._file)[1]
self._bar_length = terminal_width - 37
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if not self._silent:
if exc_type is None:
self.update(self._total)
self._file.write('\n')
self._file.flush()
if self._signal_set:
signal.signal(signal.SIGWINCH, signal.SIG_DFL)
def __iter__(self):
return self
def __next__(self):
try:
rv = next(self._items)
except StopIteration:
self.__exit__(None, None, None)
raise
else:
self.update()
return rv
def update(self, value=None):
"""
Update progress bar via the console or notebook accordingly.
"""
# Update self.value
if value is None:
value = self._current_value + 1
self._current_value = value
# Choose the appropriate environment
if self._ipython_widget:
self._update_ipython_widget(value)
else:
self._update_console(value)
def _update_console(self, value=None):
"""
Update the progress bar to the given value (out of the total
given to the constructor).
"""
if self._total == 0:
frac = 1.0
else:
frac = float(value) / float(self._total)
file = self._file
write = file.write
if frac > 1:
bar_fill = int(self._bar_length)
else:
bar_fill = int(float(self._bar_length) * frac)
write('\r|')
color_print('=' * bar_fill, 'blue', file=file, end='')
if bar_fill < self._bar_length:
color_print('>', 'green', file=file, end='')
write('-' * (self._bar_length - bar_fill - 1))
write('|')
if value >= self._total:
t = time.time() - self._start_time
prefix = ' '
elif value <= 0:
t = None
prefix = ''
else:
t = ((time.time() - self._start_time) * (1.0 - frac)) / frac
prefix = ' ETA '
write(' {0:>4s}/{1:>4s}'.format(
human_file_size(value),
self._human_total))
write(' ({0:>6s}%)'.format('{0:.2f}'.format(frac * 100.0)))
write(prefix)
if t is not None:
write(human_time(t))
self._file.flush()
def _update_ipython_widget(self, value=None):
"""
Update the progress bar to the given value (out of a total
given to the constructor).
This method is for use in the IPython notebook 2+.
"""
pass
# Create and display an empty progress bar widget,
# if none exists.
# if not hasattr(self, '_widget'):
# # Import only if an IPython widget, i.e., widget in iPython NB
# if ipython_major_version < 4:
# from IPython.html import widgets
# self._widget = widgets.FloatProgressWidget()
# else:
# from ipywidgets import widgets
# self._widget = widgets.FloatProgress()
# from IPython.display import display
# display(self._widget)
# self._widget.value = 0
# # Calculate percent completion, and update progress bar
# percent = (value / self._total) * 100
# self._widget.value = percent
# self._widget.description = \
# ' ({0:>6s}%)'.format('{0:.2f}'.format(percent))
def _silent_update(self, value=None):
pass
@classmethod
def map(cls, function, items, multiprocess=False, file=None, chunksize=100,
item_len=None, nprocesses=None, **pool_kwargs):
"""
Does a `map` operation while displaying a progress bar with
percentage complete.
::
def work(i):
print(i)
ProgressBar.map(work, range(50))
Parameters
----------
function : function
Function to call for each step
items : sequence
Sequence where each element is a tuple of arguments to pass to
*function*.
multiprocess : bool, optional
If `True`, use the `multiprocessing` module to distribute each
task to a different processor core.
file : writeable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If `file` is not a tty (as determined by
calling its `isatty` member, if any), the scrollbar will
be completely silent.
step : int, optional
Update the progress bar at least every *step* steps (default: 100).
If ``multiprocess`` is `True`, this will affect the size
of the chunks of ``items`` that are submitted as separate tasks
to the process pool. A large step size may make the job
complete faster if ``items`` is very long.
"""
results = []
if file is None:
file = _get_stdout()
if item_len is not None:
assert isinstance(item_len, int)
if hasattr(items, "__len__"):
assert item_len == len(items)
else:
if hasattr(items, "__len__"):
item_len = len(items)
else:
# Will convert to iterable. Not a good thing to do with
# large inputs.
items = list(items)
item_len = len(items)
with cls(item_len, file=file) as bar:
if not multiprocess:
# Here chunksize is just how frequently the progress gets
# updated
if chunksize is None:
chunksize = np.floor(item_len / 100.).astype(int)
for i, item in enumerate(items):
results.append(function(item))
if (i % chunksize) == 0:
bar.update(i)
else:
max_proc = multiprocessing.cpu_count()
if nprocesses is None:
nprocesses = max_proc
elif nprocesses > max_proc:
nprocesses = max_proc
if chunksize is None:
chunksize = choose_chunksize(nprocesses, item_len)
pool = multiprocessing.Pool(nprocesses, **pool_kwargs)
for i, out in enumerate(pool.imap_unordered(function,
items,
chunksize=chunksize)):
bar.update(i)
results.append(out)
pool.close()
pool.join()
return results
'''
Copyright (c) 2014, spectral-cube developers
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
@contextlib.contextmanager
def _map_context(numcores, verbose=False, num_jobs=None, chunksize=None,
**pool_kwargs):
"""
Mapping context manager to allow parallel mapping or regular mapping
depending on the number of cores specified.
"""
if verbose:
if numcores is not None and numcores > 1:
parallel = True
else:
numcores = 1
parallel = False
map = lambda func, items: \
ProgressBar.map(func, items,
nprocesses=numcores,
multiprocess=parallel,
item_len=num_jobs,
chunksize=chunksize,
**pool_kwargs)
else:
if numcores is not None and numcores > 1:
try:
import multiprocessing
pool = multiprocessing.Pool(processes=numcores, **pool_kwargs)
if chunksize is None:
chunksize = 1
map = partial(pool.map, chunksize=chunksize)
parallel = True
except ImportError:
map = builtins.map
warnings.warn("Could not import multiprocessing. "
"map will be non-parallel.")
parallel = False
else:
parallel = False
map = builtins.map
try:
yield map
finally:
# ProgressBar.map already closes the pool
if not verbose and parallel:
pool.close()
pool.join()
def choose_chunksize(nprocesses, njobs):
'''
Split the chunks into roughly equal portions.
'''
# Auto split into close to equal chunks
if njobs % nprocesses == 0:
chunksize = njobs / nprocesses
else:
# Split into smaller chunks that are still
# roughly equal, but won't have any small
# leftovers that would slow things down
chunksize = njobs / (nprocesses + 1)
return chunksize if chunksize > 0 else 1
| [
"astropy.utils.console.color_print",
"astropy.utils.console.terminal_size",
"signal.signal",
"astropy.utils.console.human_time",
"astropy.utils.console.human_file_size",
"numpy.floor",
"multiprocessing.cpu_count",
"astropy.utils.console.isatty",
"functools.partial",
"multiprocessing.Pool",
"astr... | [((3767, 3793), 'astropy.utils.console.isiterable', 'isiterable', (['total_or_items'], {}), '(total_or_items)\n', (3777, 3793), False, 'from astropy.utils.console import _get_stdout, isatty, isiterable, human_file_size, _CAN_RESIZE_TERMINAL, terminal_size, color_print, human_time\n'), ((4200, 4211), 'time.time', 'time.time', ([], {}), '()\n', (4209, 4211), False, 'import time\n'), ((4240, 4268), 'astropy.utils.console.human_file_size', 'human_file_size', (['self._total'], {}), '(self._total)\n', (4255, 4268), False, 'from astropy.utils.console import _get_stdout, isatty, isiterable, human_file_size, _CAN_RESIZE_TERMINAL, terminal_size, color_print, human_time\n'), ((6449, 6503), 'astropy.utils.console.color_print', 'color_print', (["('=' * bar_fill)", '"""blue"""'], {'file': 'file', 'end': '""""""'}), "('=' * bar_fill, 'blue', file=file, end='')\n", (6460, 6503), False, 'from astropy.utils.console import _get_stdout, isatty, isiterable, human_file_size, _CAN_RESIZE_TERMINAL, terminal_size, color_print, human_time\n'), ((3563, 3576), 'astropy.utils.console._get_stdout', '_get_stdout', ([], {}), '()\n', (3574, 3576), False, 'from astropy.utils.console import _get_stdout, isatty, isiterable, human_file_size, _CAN_RESIZE_TERMINAL, terminal_size, color_print, human_time\n'), ((4775, 4800), 'astropy.utils.console.terminal_size', 'terminal_size', (['self._file'], {}), '(self._file)\n', (4788, 4800), False, 'from astropy.utils.console import _get_stdout, isatty, isiterable, human_file_size, _CAN_RESIZE_TERMINAL, terminal_size, color_print, human_time\n'), ((6556, 6600), 'astropy.utils.console.color_print', 'color_print', (['""">"""', '"""green"""'], {'file': 'file', 'end': '""""""'}), "('>', 'green', file=file, end='')\n", (6567, 6600), False, 'from astropy.utils.console import _get_stdout, isatty, isiterable, human_file_size, _CAN_RESIZE_TERMINAL, terminal_size, color_print, human_time\n'), ((9954, 9967), 'astropy.utils.console._get_stdout', '_get_stdout', ([], {}), '()\n', (9965, 9967), False, 'from astropy.utils.console import _get_stdout, isatty, isiterable, human_file_size, _CAN_RESIZE_TERMINAL, terminal_size, color_print, human_time\n'), ((3593, 3605), 'astropy.utils.console.isatty', 'isatty', (['file'], {}), '(file)\n', (3599, 3605), False, 'from astropy.utils.console import _get_stdout, isatty, isiterable, human_file_size, _CAN_RESIZE_TERMINAL, terminal_size, color_print, human_time\n'), ((4578, 4629), 'signal.signal', 'signal.signal', (['signal.SIGWINCH', 'self._handle_resize'], {}), '(signal.SIGWINCH, self._handle_resize)\n', (4591, 4629), False, 'import signal\n'), ((5172, 5218), 'signal.signal', 'signal.signal', (['signal.SIGWINCH', 'signal.SIG_DFL'], {}), '(signal.SIGWINCH, signal.SIG_DFL)\n', (5185, 5218), False, 'import signal\n'), ((6729, 6740), 'time.time', 'time.time', ([], {}), '()\n', (6738, 6740), False, 'import time\n'), ((7028, 7050), 'astropy.utils.console.human_file_size', 'human_file_size', (['value'], {}), '(value)\n', (7043, 7050), False, 'from astropy.utils.console import _get_stdout, isatty, isiterable, human_file_size, _CAN_RESIZE_TERMINAL, terminal_size, color_print, human_time\n'), ((7218, 7231), 'astropy.utils.console.human_time', 'human_time', (['t'], {}), '(t)\n', (7228, 7231), False, 'from astropy.utils.console import _get_stdout, isatty, isiterable, human_file_size, _CAN_RESIZE_TERMINAL, terminal_size, color_print, human_time\n'), ((10941, 10968), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (10966, 10968), False, 'import multiprocessing\n'), ((11270, 11317), 'multiprocessing.Pool', 'multiprocessing.Pool', (['nprocesses'], {}), '(nprocesses, **pool_kwargs)\n', (11290, 11317), False, 'import multiprocessing\n'), ((14088, 14143), 'multiprocessing.Pool', 'multiprocessing.Pool', ([], {'processes': 'numcores'}), '(processes=numcores, **pool_kwargs)\n', (14108, 14143), False, 'import multiprocessing\n'), ((14239, 14277), 'functools.partial', 'partial', (['pool.map'], {'chunksize': 'chunksize'}), '(pool.map, chunksize=chunksize)\n', (14246, 14277), False, 'from functools import partial\n'), ((14393, 14470), 'warnings.warn', 'warnings.warn', (['"""Could not import multiprocessing. map will be non-parallel."""'], {}), "('Could not import multiprocessing. map will be non-parallel.')\n", (14406, 14470), False, 'import warnings\n'), ((6891, 6902), 'time.time', 'time.time', ([], {}), '()\n', (6900, 6902), False, 'import time\n'), ((10675, 10701), 'numpy.floor', 'np.floor', (['(item_len / 100.0)'], {}), '(item_len / 100.0)\n', (10683, 10701), True, 'import numpy as np\n')] |
import numpy
from keras.models import Sequential
from keras.layers import Dense
#loading pima indians dataset from the csv
# fix random seed for reproducibility
numpy.random.seed(7)
dataset = numpy.loadtxt(
"./data/pima-indians-diabetes.csv", delimiter=","
)
#split into input (X) and (Y) variables
X = dataset[:,0:8]
Y = dataset[:,8]
#create model
model = Sequential()
model.add(Dense(12, input_dim=8, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
#compile model
model.compile(
loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']
)
#fit the model
model.fit(X, Y, epochs=150, batch_size=10)
#evaluate the model
scores = model.evaluate(X, Y)
print(
"\n%s: %.2f%%" % (model.metrics_names[1], scores[1]*100)
)
| [
"keras.layers.Dense",
"numpy.loadtxt",
"numpy.random.seed",
"keras.models.Sequential"
] | [((162, 182), 'numpy.random.seed', 'numpy.random.seed', (['(7)'], {}), '(7)\n', (179, 182), False, 'import numpy\n'), ((193, 257), 'numpy.loadtxt', 'numpy.loadtxt', (['"""./data/pima-indians-diabetes.csv"""'], {'delimiter': '""","""'}), "('./data/pima-indians-diabetes.csv', delimiter=',')\n", (206, 257), False, 'import numpy\n'), ((366, 378), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (376, 378), False, 'from keras.models import Sequential\n'), ((389, 430), 'keras.layers.Dense', 'Dense', (['(12)'], {'input_dim': '(8)', 'activation': '"""relu"""'}), "(12, input_dim=8, activation='relu')\n", (394, 430), False, 'from keras.layers import Dense\n'), ((442, 469), 'keras.layers.Dense', 'Dense', (['(8)'], {'activation': '"""relu"""'}), "(8, activation='relu')\n", (447, 469), False, 'from keras.layers import Dense\n'), ((481, 511), 'keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (486, 511), False, 'from keras.layers import Dense\n')] |
"""security converge queries
Revision ID: e37912a26567
Revises: 42b4c9e01447
Create Date: 2020-12-16 12:15:28.291777
"""
# revision identifiers, used by Alembic.
revision = "e37912a26567"
down_revision = "42b4c9e01447"
from alembic import op
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import Session
from rabbitai.migrations.shared.security_converge import (
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
migrate_roles,
Pvm,
)
NEW_PVMS = {"Query": ("can_read",)}
PVM_MAP = {
Pvm("QueryView", "can_list"): (Pvm("Query", "can_read"),),
Pvm("QueryView", "can_show"): (Pvm("Query", "can_read"),),
}
def upgrade():
bind = op.get_bind()
session = Session(bind=bind)
# Add the new permissions on the migration itself
add_pvms(session, NEW_PVMS)
migrate_roles(session, PVM_MAP)
try:
session.commit()
except SQLAlchemyError as ex:
print(f"An error occurred while upgrading permissions: {ex}")
session.rollback()
def downgrade():
bind = op.get_bind()
session = Session(bind=bind)
# Add the old permissions on the migration itself
add_pvms(session, get_reversed_new_pvms(PVM_MAP))
migrate_roles(session, get_reversed_pvm_map(PVM_MAP))
try:
session.commit()
except SQLAlchemyError as ex:
print(f"An error occurred while downgrading permissions: {ex}")
session.rollback()
pass
| [
"rabbitai.migrations.shared.security_converge.get_reversed_new_pvms",
"alembic.op.get_bind",
"rabbitai.migrations.shared.security_converge.migrate_roles",
"sqlalchemy.orm.Session",
"rabbitai.migrations.shared.security_converge.Pvm",
"rabbitai.migrations.shared.security_converge.add_pvms",
"rabbitai.migr... | [((534, 562), 'rabbitai.migrations.shared.security_converge.Pvm', 'Pvm', (['"""QueryView"""', '"""can_list"""'], {}), "('QueryView', 'can_list')\n", (537, 562), False, 'from rabbitai.migrations.shared.security_converge import add_pvms, get_reversed_new_pvms, get_reversed_pvm_map, migrate_roles, Pvm\n'), ((597, 625), 'rabbitai.migrations.shared.security_converge.Pvm', 'Pvm', (['"""QueryView"""', '"""can_show"""'], {}), "('QueryView', 'can_show')\n", (600, 625), False, 'from rabbitai.migrations.shared.security_converge import add_pvms, get_reversed_new_pvms, get_reversed_pvm_map, migrate_roles, Pvm\n'), ((686, 699), 'alembic.op.get_bind', 'op.get_bind', ([], {}), '()\n', (697, 699), False, 'from alembic import op\n'), ((714, 732), 'sqlalchemy.orm.Session', 'Session', ([], {'bind': 'bind'}), '(bind=bind)\n', (721, 732), False, 'from sqlalchemy.orm import Session\n'), ((792, 819), 'rabbitai.migrations.shared.security_converge.add_pvms', 'add_pvms', (['session', 'NEW_PVMS'], {}), '(session, NEW_PVMS)\n', (800, 819), False, 'from rabbitai.migrations.shared.security_converge import add_pvms, get_reversed_new_pvms, get_reversed_pvm_map, migrate_roles, Pvm\n'), ((824, 855), 'rabbitai.migrations.shared.security_converge.migrate_roles', 'migrate_roles', (['session', 'PVM_MAP'], {}), '(session, PVM_MAP)\n', (837, 855), False, 'from rabbitai.migrations.shared.security_converge import add_pvms, get_reversed_new_pvms, get_reversed_pvm_map, migrate_roles, Pvm\n'), ((1051, 1064), 'alembic.op.get_bind', 'op.get_bind', ([], {}), '()\n', (1062, 1064), False, 'from alembic import op\n'), ((1079, 1097), 'sqlalchemy.orm.Session', 'Session', ([], {'bind': 'bind'}), '(bind=bind)\n', (1086, 1097), False, 'from sqlalchemy.orm import Session\n'), ((565, 589), 'rabbitai.migrations.shared.security_converge.Pvm', 'Pvm', (['"""Query"""', '"""can_read"""'], {}), "('Query', 'can_read')\n", (568, 589), False, 'from rabbitai.migrations.shared.security_converge import add_pvms, get_reversed_new_pvms, get_reversed_pvm_map, migrate_roles, Pvm\n'), ((628, 652), 'rabbitai.migrations.shared.security_converge.Pvm', 'Pvm', (['"""Query"""', '"""can_read"""'], {}), "('Query', 'can_read')\n", (631, 652), False, 'from rabbitai.migrations.shared.security_converge import add_pvms, get_reversed_new_pvms, get_reversed_pvm_map, migrate_roles, Pvm\n'), ((1175, 1205), 'rabbitai.migrations.shared.security_converge.get_reversed_new_pvms', 'get_reversed_new_pvms', (['PVM_MAP'], {}), '(PVM_MAP)\n', (1196, 1205), False, 'from rabbitai.migrations.shared.security_converge import add_pvms, get_reversed_new_pvms, get_reversed_pvm_map, migrate_roles, Pvm\n'), ((1234, 1263), 'rabbitai.migrations.shared.security_converge.get_reversed_pvm_map', 'get_reversed_pvm_map', (['PVM_MAP'], {}), '(PVM_MAP)\n', (1254, 1263), False, 'from rabbitai.migrations.shared.security_converge import add_pvms, get_reversed_new_pvms, get_reversed_pvm_map, migrate_roles, Pvm\n')] |
###
# MD5 encryption example.
#
# License - MIT.
####
import os
import hashlib
# Main function.
def main():
# {
teststr = 'To be No.1'
hmd5 = hashlib.md5()
hmd5.update(teststr.encode(encoding = 'UTF-8'))
print('Source data:\t' + teststr)
print('Dest data:\t' + hmd5.hexdigest())
# }
# Program entry.
if '__main__' == __name__:
main()
| [
"hashlib.md5"
] | [((155, 168), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (166, 168), False, 'import hashlib\n')] |
from dataclasses import dataclass
from typing import Optional
from bohr.config.pathconfig import PathConfig, load_config_dict_from_file
from bohr.fs import find_project_root
from bohr.util.paths import AbsolutePath
@dataclass(frozen=True)
class AppConfig:
verbose: bool
paths: PathConfig
@staticmethod
def load(project_root: Optional[AbsolutePath] = None) -> "AppConfig":
project_root = project_root or find_project_root()
config_dict = load_config_dict_from_file(project_root)
try:
verbose_str = config_dict["core"]["verbose"]
verbose = verbose_str == "true" or verbose_str == "True"
except KeyError:
verbose = False
return AppConfig(verbose, PathConfig.load())
| [
"bohr.config.pathconfig.PathConfig.load",
"bohr.config.pathconfig.load_config_dict_from_file",
"dataclasses.dataclass",
"bohr.fs.find_project_root"
] | [((219, 241), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (228, 241), False, 'from dataclasses import dataclass\n'), ((473, 513), 'bohr.config.pathconfig.load_config_dict_from_file', 'load_config_dict_from_file', (['project_root'], {}), '(project_root)\n', (499, 513), False, 'from bohr.config.pathconfig import PathConfig, load_config_dict_from_file\n'), ((431, 450), 'bohr.fs.find_project_root', 'find_project_root', ([], {}), '()\n', (448, 450), False, 'from bohr.fs import find_project_root\n'), ((740, 757), 'bohr.config.pathconfig.PathConfig.load', 'PathConfig.load', ([], {}), '()\n', (755, 757), False, 'from bohr.config.pathconfig import PathConfig, load_config_dict_from_file\n')] |
import os
configVars = ['env','implementation_name','ssh_ansible_user','ansible_private_key_path','application_host','app_address_space','dns_name','proto','database_host','database_password','<PASSWORD>_admin_password','<PASSWORD>_password','trampoline_secret','backup_storage_key','badger_admin_password','<PASSWORD>ger_admin_email','ekstep_api_key','sunbird_image_storage_url','sunbird_azure_storage_key','sunbird_azure_storage_account']
configToWrite = []
contentToCreateCurl = []
with open('config_templet','r') as fp:
lines = fp.readlines()
for line in lines:
for key in configVars:
if key in line:
keyName = key+ ':'
keyValue = keyName +' '+ os.getenv(key)
line = line.replace(keyName, keyValue)
contentToCreateCurl.insert(len(contentToCreateCurl),keyValue)
configToWrite.insert(len(configToWrite),line)
fp.close()
with open('config','w') as fp:
fp.writelines(configToWrite)
fp.close()
with open('confValue.yml','w') as fp:
fp.writelines(contentToCreateCurl)
fp.close()
# import yaml
# # import os
# # fname = "config"
# # configElement = ['env','implementation_name','ssh_ansible_user','sudo_passwd','ansible_private_key_path','application_host','app_address_space','dns_name','proto','database_host','database_password','<PASSWORD>','<PASSWORD>_password','trampoline_secret','backup_storage_key','badger_admin_password','badger_admin_email','ekstep_api_key','sunbird_image_storage_url','sunbird_azure_storage_key','sunbird_azure_storage_account']
# # with open(fname, "w") as f:
# # for key in configElement:
# # configLine = key + ': ' + "NULL \n" if os.getenv(key) is None else key + ': ' + os.getenv(key)+'\n'
# # print configLine
# # f.write(configLine)
# #
# #
| [
"os.getenv"
] | [((665, 679), 'os.getenv', 'os.getenv', (['key'], {}), '(key)\n', (674, 679), False, 'import os\n')] |
import pytest
from backend.common.models.team import Team
from backend.common.models.tests.util import (
CITY_STATE_COUNTRY_PARAMETERS,
LOCATION_PARAMETERS,
)
@pytest.mark.parametrize("key", ["frc177", "frc1"])
def test_valid_key_names(key: str) -> None:
assert Team.validate_key_name(key) is True
@pytest.mark.parametrize("key", ["bcr077", "frc 011", "frc711\\"])
def test_invalid_key_names(key: str) -> None:
assert Team.validate_key_name(key) is False
def test_key_name() -> None:
team = Team(id="frc254", team_number=254)
assert team.key_name == "frc254"
@pytest.mark.parametrize(LOCATION_PARAMETERS[0], LOCATION_PARAMETERS[1])
def test_location(
city: str, state: str, country: str, postalcode: str, output: str
) -> None:
team = Team(
city=city,
state_prov=state,
country=country,
postalcode=postalcode,
)
assert team.location == output
@pytest.mark.parametrize(
CITY_STATE_COUNTRY_PARAMETERS[0], CITY_STATE_COUNTRY_PARAMETERS[1]
)
def test_city_state_country(city: str, state: str, country: str, output: str) -> None:
team = Team(
city=city,
state_prov=state,
country=country,
)
assert team.city_state_country == output
def test_details_url() -> None:
team = Team(
id="frc254",
team_number=254,
)
assert team.details_url == "/team/254"
| [
"pytest.mark.parametrize",
"backend.common.models.team.Team",
"backend.common.models.team.Team.validate_key_name"
] | [((171, 221), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""key"""', "['frc177', 'frc1']"], {}), "('key', ['frc177', 'frc1'])\n", (194, 221), False, 'import pytest\n'), ((316, 381), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""key"""', "['bcr077', 'frc 011', 'frc711\\\\']"], {}), "('key', ['bcr077', 'frc 011', 'frc711\\\\'])\n", (339, 381), False, 'import pytest\n'), ((593, 664), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['LOCATION_PARAMETERS[0]', 'LOCATION_PARAMETERS[1]'], {}), '(LOCATION_PARAMETERS[0], LOCATION_PARAMETERS[1])\n', (616, 664), False, 'import pytest\n'), ((927, 1022), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['CITY_STATE_COUNTRY_PARAMETERS[0]', 'CITY_STATE_COUNTRY_PARAMETERS[1]'], {}), '(CITY_STATE_COUNTRY_PARAMETERS[0],\n CITY_STATE_COUNTRY_PARAMETERS[1])\n', (950, 1022), False, 'import pytest\n'), ((518, 552), 'backend.common.models.team.Team', 'Team', ([], {'id': '"""frc254"""', 'team_number': '(254)'}), "(id='frc254', team_number=254)\n", (522, 552), False, 'from backend.common.models.team import Team\n'), ((776, 849), 'backend.common.models.team.Team', 'Team', ([], {'city': 'city', 'state_prov': 'state', 'country': 'country', 'postalcode': 'postalcode'}), '(city=city, state_prov=state, country=country, postalcode=postalcode)\n', (780, 849), False, 'from backend.common.models.team import Team\n'), ((1123, 1173), 'backend.common.models.team.Team', 'Team', ([], {'city': 'city', 'state_prov': 'state', 'country': 'country'}), '(city=city, state_prov=state, country=country)\n', (1127, 1173), False, 'from backend.common.models.team import Team\n'), ((1295, 1329), 'backend.common.models.team.Team', 'Team', ([], {'id': '"""frc254"""', 'team_number': '(254)'}), "(id='frc254', team_number=254)\n", (1299, 1329), False, 'from backend.common.models.team import Team\n'), ((277, 304), 'backend.common.models.team.Team.validate_key_name', 'Team.validate_key_name', (['key'], {}), '(key)\n', (299, 304), False, 'from backend.common.models.team import Team\n'), ((439, 466), 'backend.common.models.team.Team.validate_key_name', 'Team.validate_key_name', (['key'], {}), '(key)\n', (461, 466), False, 'from backend.common.models.team import Team\n')] |
import glob
import pickle
import sys
import msprime as msp
import numpy as np
import os
import multiprocessing as mp
import shutil
import random
import copy
import argparse
import h5py
import allel
import time
from sklearn.neighbors import NearestNeighbors
from sklearn.utils import resample
import matplotlib as mpl
mpl.use('pdf')
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.keras import layers
from tensorflow.keras.models import Model, model_from_json
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, TerminateOnNaN
| [
"matplotlib.use"
] | [((319, 333), 'matplotlib.use', 'mpl.use', (['"""pdf"""'], {}), "('pdf')\n", (326, 333), True, 'import matplotlib as mpl\n')] |
import gurulhutils
import random
def init( keys ):
global db
status, database = gurulhutils.db_init( keys["Database"] )
db = database.get_collection("taylorswiftdb")
return status
def help():
return "Look what you made me do."
def recursion( word ):
c = db.find_one( { "_word" : word } )
pop = list( c.keys() )
w = list( c.values() )
for i in ["_id", "_word"]:
try:
k = pop.index( i )
pop.pop( k )
w.pop( k )
except:
pass
try:
choice = random.choices( population=pop, weights=w )[0]
except:
return "."
if choice == "_end":
return "."
else:
return " " + choice + recursion( choice )
def reply( query ):
reply = recursion( "_begin" )[1:]
query.update( { "rinterface" : query["qinterface"], "to": query["from"], "rtype" : "text", "rcontent" : reply } )
return query
| [
"gurulhutils.db_init",
"random.choices"
] | [((89, 126), 'gurulhutils.db_init', 'gurulhutils.db_init', (["keys['Database']"], {}), "(keys['Database'])\n", (108, 126), False, 'import gurulhutils\n'), ((550, 591), 'random.choices', 'random.choices', ([], {'population': 'pop', 'weights': 'w'}), '(population=pop, weights=w)\n', (564, 591), False, 'import random\n')] |
import os
from flask import Blueprint, request, jsonify
from math import exp
bp = Blueprint('app', __name__)
MODEL_COEFFICIENTS = {
'CarrierAA': -0.0019204985425103213,
'CarrierAS': -0.84841944514035605,
'CarrierB6': 0.12241821143901417,
'CarrierDL': -0.13261989508615579,
'CarrierEV': -0.010973177444743456,
'CarrierF9': 0.0,
'CarrierHA': 0.0,
'CarrierNK': 0.0,
'CarrierOO': -0.023123225427465505,
'CarrierUA': 0.16964701242785432,
'CarrierVX': -0.053647076481738616,
'CarrierWN': -0.10363757129666461,
'DayOfWeek1': 0.063727159827779406,
'DayOfWeek2': -0.1853191855446592,
'DayOfWeek3': -0.30014428231028273,
'DayOfWeek4': -0.094916176670324648,
'DayOfWeek5': -0.062572400878981818,
'DayOfWeek6': -0.084710367475524101,
'DayOfWeek7': -0.21834041248771782,
'DepTimeBucket0': 1.3627774299145421,
'DepTimeBucket1': -1.9928496577654837,
'DepTimeBucket2': -0.82127521506520362,
'DepTimeBucket3': -0.27652436329447116,
'DepTimeBucket4': 0.17856868964922734,
'DepTimeBucket5': 0.66702745101136107,
'OriginGroup0': -0.67761214167229855,
'OriginGroup1': -0.25382448266698804,
'OriginGroup2': -0.11041084237121421,
'OriginGroup3': -0.067072611283875219,
'OriginGroup4': 0.22664441244453568}
MODEL_INTERCEPT = -0.88227567
ORIGIN_GROUPS = [{'ABR', 'ADQ', 'AMA', 'ANC', 'ATW', 'AZO', 'BET', 'BFL', 'BIL', 'BIS', 'BRW', 'BTM', 'BZN', 'CDC', 'CIU', 'CPR', 'DLG', 'DVL', 'EFD', 'EKO', 'EWN', 'FCA', 'FNT', 'FSD', 'GCC', 'GEG', 'GFK', 'GJT', 'GTF', 'HDN', 'HLN', 'HNL', 'HRL', 'HYS', 'IDA', 'INL', 'ISN', 'ITH', 'ITO', 'JMS', 'JNU', 'KOA', 'KTN', 'LCH', 'LIH', 'LSE', 'LWS', 'MOT', 'MQT', 'MSO', 'OGG', 'PIH', 'PLN', 'PPG', 'RKS', 'SCC', 'SGU', 'SIT', 'TWF', 'WYS', 'YUM'},
{'ABY', 'ASE', 'BDL', 'BGM', 'BMI', 'BOI', 'BQK', 'BRD', 'BRO', 'BUF', 'CHA', 'COD', 'CRP', 'CWA', 'DAY', 'DSM', 'ECP', 'EGE', 'ERI', 'EUG', 'EYW', 'FAR', 'FWA', 'GNV', 'GRB', 'GRK', 'GRR', 'GST', 'GUC', 'ILM', 'JAC', 'LAN', 'LAR', 'LBB', 'LNK', 'MAF', 'MCI', 'MHT', 'MKE', 'MLB', 'MRY', 'MTJ', 'OKC', 'OMA', 'OTZ', 'PDX', 'PHF', 'PIT', 'PSC', 'PSE', 'RHI', 'ROW', 'RST', 'SBP', 'SDF', 'SGF', 'SJC', 'SLC', 'TLH', 'TUL', 'XNA'},
{'ABQ', 'AKN', 'ALB', 'APN', 'AVP', 'BHM', 'BJI', 'BUR', 'CAK', 'CMH', 'DCA', 'DRO', 'ELP', 'FAI', 'FAT', 'FLG', 'FSM', 'GPT', 'GSP', 'HIB', 'HSV', 'IAD', 'ICT', 'IMT', 'IND', 'ISP', 'JAN', 'JAX', 'LGB', 'LIT', 'MBS', 'MEM', 'MLU', 'MSN', 'MSY', 'OAK', 'OME', 'ONT', 'PAH', 'PIA', 'PNS', 'PSG', 'PVD', 'PWM', 'RAP', 'RNO', 'ROC', 'RSW', 'SAN', 'SAT', 'SEA', 'SMF', 'SNA', 'SPS', 'STT', 'SYR', 'TRI', 'TUS', 'TXK', 'UST', 'VLD'},
{'ACK', 'ATL', 'AUS', 'BNA', 'BOS', 'BTR', 'BWI', 'CAE', 'CHS', 'CID', 'CLE', 'CLL', 'CMX', 'COS', 'CRW', 'CSG', 'CVG', 'DAB', 'DTW', 'EVV', 'FAY', 'GCK', 'GSO', 'GTR', 'HOB', 'HOU', 'HYA', 'IAH', 'LEX', 'LFT', 'LRD', 'MDW', 'MFE', 'MFR', 'MGM', 'MLI', 'MOB', 'MSP', 'OAJ', 'ORF', 'PBI', 'PHL', 'PHX', 'PIB', 'PSP', 'RDU', 'RIC', 'ROA', 'SBA', 'SBN', 'SHV', 'STL', 'STX', 'SUN', 'TPA', 'TTN', 'TVC', 'TYS', 'VPS', 'WRG', 'YAK'},
{'ABE', 'ABI', 'ACT', 'ACV', 'ACY', 'ADK', 'AEX', 'AGS', 'AVL', 'BGR', 'BPT', 'BQN', 'BTV', 'CDV', 'CHO', 'CLT', 'DAL', 'DEN', 'DFW', 'DHN', 'DLH', 'EAU', 'ELM', 'ESC', 'EWR', 'FLL', 'GGG', 'GRI', 'GUM', 'HPN', 'IAG', 'JFK', 'JLN', 'LAS', 'LAW', 'LAX', 'LBE', 'LGA', 'MCO', 'MDT', 'MEI', 'MIA', 'MKG', 'MVY', 'MYR', 'ORD', 'ORH', 'OTH', 'PBG', 'RDD', 'RDM', 'SAF', 'SAV', 'SCE', 'SFO', 'SJT', 'SJU', 'SMX', 'SPI', 'SRQ', 'SWF'}]
DEP_TIME_THRESHOLDS = [400, 800, 1200, 1600, 2000, 2400]
@bp.route('/')
def hello():
return 'hello world'
@bp.route('/flight_delay_prediction', methods=['POST', 'OPTIONS'])
def flight_delay_prediction():
payload = request.get_json(force=True)
return jsonify({'prob_delay': prediction(payload['data'])})
def prediction(data):
operand = MODEL_INTERCEPT
operand += MODEL_COEFFICIENTS['Carrier%s' % data['carrier']]
operand += MODEL_COEFFICIENTS['DayOfWeek%s' % data['day_of_week']]
for i, threshold in enumerate(DEP_TIME_THRESHOLDS):
if int(data['departure_time']) < threshold:
time_bucket = i
operand += MODEL_COEFFICIENTS['DepTimeBucket%s' %i]
break
for i, origin_group in enumerate(ORIGIN_GROUPS):
if data['origin'] in origin_group:
group_number = i
operand += MODEL_COEFFICIENTS['OriginGroup%s' %group_number]
break
return 1 / (1 + exp(-1 * operand))
| [
"flask.request.get_json",
"flask.Blueprint",
"math.exp"
] | [((84, 110), 'flask.Blueprint', 'Blueprint', (['"""app"""', '__name__'], {}), "('app', __name__)\n", (93, 110), False, 'from flask import Blueprint, request, jsonify\n'), ((3935, 3963), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)'}), '(force=True)\n', (3951, 3963), False, 'from flask import Blueprint, request, jsonify\n'), ((4675, 4692), 'math.exp', 'exp', (['(-1 * operand)'], {}), '(-1 * operand)\n', (4678, 4692), False, 'from math import exp\n')] |
from app import app
if __name__ == '__main__':
app = app.Session() | [
"app.app.Session"
] | [((58, 71), 'app.app.Session', 'app.Session', ([], {}), '()\n', (69, 71), False, 'from app import app\n')] |
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
#################Create EvalCallBack ########################
"""
import numpy as np
from mindspore.train.callback import Callback
from mindspore.train.serialization import load_param_into_net, load_checkpoint
from mindspore.communication.management import get_rank
from mindspore import Tensor, save_checkpoint
from src.c3d_model import C3D
from src.model_utils.config import config
from src.dataset import classification_dataset
class EvalCallBack(Callback):
"""EvalCallBack"""
def __init__(self, model, eval_per_epoch, epoch_per_eval, save_ckpt_path, train_batch_num):
config.load_type = 'test'
self.model = model
self.rank = get_rank() if config.is_distributed else 0
self.eval_per_epoch = eval_per_epoch
self.epoch_per_eval = epoch_per_eval
self.save_ckpt_path = save_ckpt_path
self.eval_dataset, self.eval_dataset_len = classification_dataset(config.batch_size, 1, shuffle=True,
repeat_num=1, drop_remainder=True)
self.best_ckpt = 0
self.best_acc = 0
self.train_batch_num = train_batch_num
def epoch_end(self, run_context):
"""culculate acc"""
network = C3D(config.num_classes)
cb_param = run_context.original_args()
cur_epoch = cb_param.cur_epoch_num
save_ckpt_path = self.save_ckpt_path + str(self.rank) + '-' + str(cur_epoch) + '_' \
+ str(self.train_batch_num) + '.ckpt'
# pre_trained
param_dict = load_checkpoint(save_ckpt_path)
param_not_load = load_param_into_net(network, param_dict)
batch_num = self.eval_dataset.get_dataset_size()
print('ckpt:', save_ckpt_path)
print('param_not_load', param_not_load)
if cur_epoch % self.eval_per_epoch == 0:
network.set_train(mode=False)
acc_sum, sample_num = 0, 0
for idnum, (input_data, label) in enumerate(self.eval_dataset):
predictions = network(Tensor(input_data))
predictions, label = predictions.asnumpy(), label.asnumpy()
acc = np.sum(np.argmax(predictions, 1) == label[:, -1])
batch_size = label.shape[0]
acc_sum += acc
sample_num += batch_size
if idnum % 20 == 0:
print("setep: {}/{}, acc: {}".format(idnum + 1, batch_num, acc / batch_size))
top_1 = acc_sum / sample_num
print('eval result: top_1 {:.3f}%'.format(top_1 * 100))
if self.best_acc < top_1:
self.best_acc = top_1
self.best_ckpt = cur_epoch
best_ckpt_file = 'best_acc.ckpt'
best_ckpt_file = self.save_ckpt_path + str(self.rank) + best_ckpt_file
save_checkpoint(network, best_ckpt_file)
print('best result: top_1 {:.3f}%'.format(self.best_acc * 100))
print('best ckpt:{}'.format(self.best_ckpt))
| [
"mindspore.train.serialization.load_checkpoint",
"numpy.argmax",
"src.dataset.classification_dataset",
"mindspore.save_checkpoint",
"mindspore.train.serialization.load_param_into_net",
"src.c3d_model.C3D",
"mindspore.Tensor",
"mindspore.communication.management.get_rank"
] | [((1565, 1662), 'src.dataset.classification_dataset', 'classification_dataset', (['config.batch_size', '(1)'], {'shuffle': '(True)', 'repeat_num': '(1)', 'drop_remainder': '(True)'}), '(config.batch_size, 1, shuffle=True, repeat_num=1,\n drop_remainder=True)\n', (1587, 1662), False, 'from src.dataset import classification_dataset\n'), ((1918, 1941), 'src.c3d_model.C3D', 'C3D', (['config.num_classes'], {}), '(config.num_classes)\n', (1921, 1941), False, 'from src.c3d_model import C3D\n'), ((2231, 2262), 'mindspore.train.serialization.load_checkpoint', 'load_checkpoint', (['save_ckpt_path'], {}), '(save_ckpt_path)\n', (2246, 2262), False, 'from mindspore.train.serialization import load_param_into_net, load_checkpoint\n'), ((2288, 2328), 'mindspore.train.serialization.load_param_into_net', 'load_param_into_net', (['network', 'param_dict'], {}), '(network, param_dict)\n', (2307, 2328), False, 'from mindspore.train.serialization import load_param_into_net, load_checkpoint\n'), ((1336, 1346), 'mindspore.communication.management.get_rank', 'get_rank', ([], {}), '()\n', (1344, 1346), False, 'from mindspore.communication.management import get_rank\n'), ((3516, 3556), 'mindspore.save_checkpoint', 'save_checkpoint', (['network', 'best_ckpt_file'], {}), '(network, best_ckpt_file)\n', (3531, 3556), False, 'from mindspore import Tensor, save_checkpoint\n'), ((2717, 2735), 'mindspore.Tensor', 'Tensor', (['input_data'], {}), '(input_data)\n', (2723, 2735), False, 'from mindspore import Tensor, save_checkpoint\n'), ((2842, 2867), 'numpy.argmax', 'np.argmax', (['predictions', '(1)'], {}), '(predictions, 1)\n', (2851, 2867), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
"""
======
Slider
======
A slideshow component which may be similar to Album but with difference that
a slide item can have HTML content.
Slide items are ordered from their ``order`` field value. Items with a zero
value for their order will be ordered in an almost arbitrary order (mostly
depending from item object id).
"""
from django.conf import settings
from django.core.validators import FileExtensionValidator
from django.db import models
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import gettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from cmsplugin_blocks.choices_helpers import (get_slider_default_template,
get_slider_template_choices)
from cmsplugin_blocks.utils import SmartFormatMixin
class Slider(CMSPlugin):
"""
Slide container for items.
"""
title = models.CharField(
_("Title"),
blank=False,
max_length=150,
default="",
)
"""
A required title string.
"""
template = models.CharField(
_("Template"),
blank=False,
max_length=150,
choices=get_slider_template_choices(),
default=get_slider_default_template(),
help_text=_("Used template for content look."),
)
"""
Template choice from available plugin templates in setting
``BLOCKS_SLIDER_TEMPLATES``. Default to the first choice item.
"""
def __str__(self):
return Truncator(strip_tags(self.title)).words(
settings.BLOCKS_MODEL_TRUNCATION_LENGTH,
truncate=settings.BLOCKS_MODEL_TRUNCATION_CHR
)
def copy_relations(self, oldinstance):
"""
Copy FK relations when plugin object is copied as another object
See:
http://docs.django-cms.org/en/latest/how_to/custom_plugins.html#for-foreign-key-relations-from-other-objects
:meta private:
"""
self.slide_item.all().delete()
for slide_item in oldinstance.slide_item.all():
slide_item.pk = None
slide_item.slider = self
slide_item.save()
class Meta:
verbose_name = _("Slider")
verbose_name_plural = _("Sliders")
class SlideItem(SmartFormatMixin, models.Model):
"""
Slide item to include in container.
"""
slider = models.ForeignKey(
Slider,
related_name="slide_item",
on_delete=models.CASCADE
)
title = models.CharField(
_("Title"),
blank=False,
max_length=150,
default="",
)
"""
Required title string.
"""
image = models.FileField(
_("Image"),
upload_to="blocks/slider/%y/%m",
max_length=255,
null=True,
blank=False,
default=None,
validators=[
FileExtensionValidator(
allowed_extensions=settings.BLOCKS_ALLOWED_IMAGE_EXTENSIONS
),
]
)
"""
Required image file, limited to enabled image formats from settings
``BLOCKS_ALLOWED_IMAGE_EXTENSIONS``.
"""
content = models.TextField(
_(u"Content"),
blank=True,
default="",
)
"""
Optional long text, it will be editable through CKeditor on plugin form.
"""
order = models.IntegerField(
_("Order"),
blank=False,
default=0
)
"""
Number for order position in item list.
"""
link_name = models.CharField(
_("link name"),
blank=True,
max_length=45,
)
"""
Optional string for link name.
"""
link_url = models.CharField(
_("link url"),
blank=True,
max_length=255,
)
"""
Optional string for link URL.
"""
link_open_blank = models.BooleanField(
_("open new window"),
default=False,
help_text=_("If checked the link will be open in a new window"),
)
"""
Checkbox to enable opening link URL in a new window/tab.
"""
def __str__(self):
return Truncator(strip_tags(self.title)).words(
settings.BLOCKS_MODEL_TRUNCATION_LENGTH,
truncate=settings.BLOCKS_MODEL_TRUNCATION_CHR
)
def get_image_format(self):
return self.media_format(self.image)
class Meta:
verbose_name = _("Slide item")
verbose_name_plural = _("Slide items")
| [
"django.core.validators.FileExtensionValidator",
"django.utils.html.strip_tags",
"cmsplugin_blocks.choices_helpers.get_slider_template_choices",
"django.db.models.ForeignKey",
"django.utils.translation.gettext_lazy",
"cmsplugin_blocks.choices_helpers.get_slider_default_template"
] | [((2410, 2488), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Slider'], {'related_name': '"""slide_item"""', 'on_delete': 'models.CASCADE'}), "(Slider, related_name='slide_item', on_delete=models.CASCADE)\n", (2427, 2488), False, 'from django.db import models\n'), ((969, 979), 'django.utils.translation.gettext_lazy', '_', (['"""Title"""'], {}), "('Title')\n", (970, 979), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1139, 1152), 'django.utils.translation.gettext_lazy', '_', (['"""Template"""'], {}), "('Template')\n", (1140, 1152), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2235, 2246), 'django.utils.translation.gettext_lazy', '_', (['"""Slider"""'], {}), "('Slider')\n", (2236, 2246), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2277, 2289), 'django.utils.translation.gettext_lazy', '_', (['"""Sliders"""'], {}), "('Sliders')\n", (2278, 2289), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2558, 2568), 'django.utils.translation.gettext_lazy', '_', (['"""Title"""'], {}), "('Title')\n", (2559, 2568), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2723, 2733), 'django.utils.translation.gettext_lazy', '_', (['"""Image"""'], {}), "('Image')\n", (2724, 2733), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3196, 3209), 'django.utils.translation.gettext_lazy', '_', (['u"""Content"""'], {}), "(u'Content')\n", (3197, 3209), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3392, 3402), 'django.utils.translation.gettext_lazy', '_', (['"""Order"""'], {}), "('Order')\n", (3393, 3402), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3552, 3566), 'django.utils.translation.gettext_lazy', '_', (['"""link name"""'], {}), "('link name')\n", (3553, 3566), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3710, 3723), 'django.utils.translation.gettext_lazy', '_', (['"""link url"""'], {}), "('link url')\n", (3711, 3723), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3877, 3897), 'django.utils.translation.gettext_lazy', '_', (['"""open new window"""'], {}), "('open new window')\n", (3878, 3897), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4397, 4412), 'django.utils.translation.gettext_lazy', '_', (['"""Slide item"""'], {}), "('Slide item')\n", (4398, 4412), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4443, 4459), 'django.utils.translation.gettext_lazy', '_', (['"""Slide items"""'], {}), "('Slide items')\n", (4444, 4459), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1215, 1244), 'cmsplugin_blocks.choices_helpers.get_slider_template_choices', 'get_slider_template_choices', ([], {}), '()\n', (1242, 1244), False, 'from cmsplugin_blocks.choices_helpers import get_slider_default_template, get_slider_template_choices\n'), ((1262, 1291), 'cmsplugin_blocks.choices_helpers.get_slider_default_template', 'get_slider_default_template', ([], {}), '()\n', (1289, 1291), False, 'from cmsplugin_blocks.choices_helpers import get_slider_default_template, get_slider_template_choices\n'), ((1311, 1347), 'django.utils.translation.gettext_lazy', '_', (['"""Used template for content look."""'], {}), "('Used template for content look.')\n", (1312, 1347), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3940, 3993), 'django.utils.translation.gettext_lazy', '_', (['"""If checked the link will be open in a new window"""'], {}), "('If checked the link will be open in a new window')\n", (3941, 3993), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2895, 2983), 'django.core.validators.FileExtensionValidator', 'FileExtensionValidator', ([], {'allowed_extensions': 'settings.BLOCKS_ALLOWED_IMAGE_EXTENSIONS'}), '(allowed_extensions=settings.\n BLOCKS_ALLOWED_IMAGE_EXTENSIONS)\n', (2917, 2983), False, 'from django.core.validators import FileExtensionValidator\n'), ((1550, 1572), 'django.utils.html.strip_tags', 'strip_tags', (['self.title'], {}), '(self.title)\n', (1560, 1572), False, 'from django.utils.html import strip_tags\n'), ((4127, 4149), 'django.utils.html.strip_tags', 'strip_tags', (['self.title'], {}), '(self.title)\n', (4137, 4149), False, 'from django.utils.html import strip_tags\n')] |