code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
# Generated by Django 3.1.7 on 2021-03-24 17:18 import datetime from django.db import migrations, models import django.db.models.deletion import uuid class Migration(migrations.Migration): dependencies = [ ('xmpt', '0001_initial'), ] operations = [ migrations.CreateModel( name='BusinessCategory', fields=[ ('business_category_id', models.IntegerField(primary_key=True, serialize=False)), ('business_category_code', models.CharField(max_length=254)), ('business_category_title', models.CharField(max_length=254)), ('version_year', models.CharField(default='2017', max_length=10)), ], options={ 'verbose_name_plural': 'Business Categories', }, ), migrations.CreateModel( name='Community', fields=[ ('community_guid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), ('community_code', models.CharField(default='empty', max_length=254)), ('community_name', models.CharField(default='empty', max_length=254)), ('email', models.EmailField(blank=True, max_length=254)), ('phone_number', models.CharField(blank=True, max_length=20)), ('phone_type', models.CharField(blank=True, choices=[('M', 'Mobile'), ('O', 'Office')], max_length=1)), ('street_address_1', models.CharField(default='empty', max_length=254)), ('street_address_2', models.CharField(blank=True, max_length=254)), ('apt_suit', models.CharField(blank=True, max_length=50)), ('postal_code', models.CharField(default='00000', max_length=20)), ('postal_code_9', models.CharField(blank=True, max_length=20)), ('is_active', models.BooleanField(default=True)), ('is_enabled', models.BooleanField(default=True)), ('is_community_owner', models.BooleanField(default=False)), ('created_timestamp', models.DateTimeField(auto_now=True)), ('deactivated_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ('disabled_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ('business_category', models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='xmpt.businesscategory')), ], options={ 'verbose_name_plural': 'Communities', }, ), migrations.CreateModel( name='CommunityHierarchy', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('parentCommunityGUID', models.CharField(max_length=36)), ('childCommunityGUID', models.CharField(max_length=36)), ('relationshipType', models.IntegerField(blank=True, choices=[(1, 'Parent_Child'), (2, 'Sibling_Sibling')], default=1)), ('can_share', models.BooleanField(default=True)), ('can_view', models.BooleanField(default=True)), ('can_manage', models.BooleanField(default=False)), ('can_report', models.BooleanField(default=True)), ('can_analyze', models.BooleanField(default=True)), ('can_audit', models.BooleanField(default=True)), ('is_active', models.BooleanField(default=True)), ('is_enabled', models.BooleanField(default=True)), ('created_timestamp', models.DateTimeField(auto_now=True)), ('deactivated_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ('deleted_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ], options={ 'verbose_name_plural': 'Community Hierarchy', }, ), migrations.CreateModel( name='Country', fields=[ ('country_id', models.IntegerField(primary_key=True, serialize=False)), ('country_alpha2', models.CharField(max_length=2)), ('country_alpha3', models.CharField(max_length=3)), ('country_name', models.CharField(max_length=254)), ], options={ 'verbose_name_plural': 'Countries', }, ), migrations.CreateModel( name='TaxForm', fields=[ ('tax_form_id', models.IntegerField(primary_key=True, serialize=False)), ('tax_form_name', models.CharField(blank=True, max_length=254)), ('jurisdiction_name', models.CharField(blank=True, max_length=254)), ], options={ 'verbose_name': 'Tax Form', 'verbose_name_plural': 'Tax Forms', }, ), migrations.CreateModel( name='TIN', fields=[ ('tax_code_id', models.AutoField(primary_key=True, serialize=False)), ('tin_type', models.CharField(choices=[('EIN', 'EIN'), ('TIN', 'TIN'), ('SSN', 'SSN'), ('STN', 'STID')], max_length=3)), ('tin_code', models.CharField(max_length=254)), ('is_active', models.BooleanField(default=True)), ('is_enabled', models.BooleanField(default=True)), ('created_timestamp', models.DateTimeField(auto_now=True)), ('deactivated_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ('disabled_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ('country_id', models.ForeignKey(default=840, on_delete=django.db.models.deletion.CASCADE, to='xmpt.country')), ], options={ 'verbose_name_plural': 'Tax Identifiers', }, ), migrations.CreateModel( name='USCounty', fields=[ ('county_id', models.IntegerField(primary_key=True, serialize=False)), ('state_alpha2', models.CharField(default='ZZ', max_length=2)), ('state_fips_code', models.CharField(blank=True, max_length=10)), ('county_fips_code', models.CharField(blank=True, max_length=10)), ('county_subdivision_fips_code', models.CharField(blank=True, max_length=10)), ('county_name', models.CharField(max_length=254)), ('jurisdiction_level_code', models.CharField(blank=True, max_length=10)), ('country_alpha2', models.CharField(default='US', max_length=2)), ('country_alpha3', models.CharField(default='USA', max_length=3)), ('country_name', models.CharField(default='United States of America', max_length=254)), ('country_id', models.ForeignKey(default=840, on_delete=django.db.models.deletion.CASCADE, to='xmpt.country')), ], options={ 'verbose_name_plural': 'US Counties', }, ), migrations.CreateModel( name='User', fields=[ ('user_guid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), ('user_name', models.CharField(default='empty', max_length=254)), ('first_name', models.CharField(blank=True, max_length=254)), ('middle_name', models.CharField(blank=True, max_length=100)), ('last_name', models.CharField(blank=True, max_length=254)), ('email', models.EmailField(blank=True, max_length=254)), ('phone_number', models.CharField(blank=True, max_length=20)), ('phone_type', models.CharField(blank=True, choices=[('M', 'Mobile'), ('O', 'Office')], max_length=1)), ('street_address_1', models.CharField(default='empty', max_length=254)), ('street_address_2', models.CharField(blank=True, max_length=254)), ('apt_suit', models.CharField(blank=True, max_length=50)), ('postal_code', models.CharField(default='00000', max_length=20)), ('postal_code_9', models.CharField(blank=True, max_length=20)), ('signature_name', models.CharField(blank=True, max_length=254)), ('signature_title', models.CharField(blank=True, max_length=254)), ('signature_date', models.DateTimeField(blank=True, default=datetime.datetime.now)), ('drivers_license_number', models.CharField(default='empty', max_length=20)), ('drivers_license_issue_date', models.DateField(default='9999-12-31')), ('drivers_license_exp_date', models.DateField(default='9999-12-31')), ('drivers_license_front', models.FileField(blank=True, upload_to='', verbose_name='Picture DL front')), ('drivers_license_back', models.FileField(blank=True, upload_to='', verbose_name='Picture DL back')), ('is_active', models.BooleanField(default=True)), ('is_enabled', models.BooleanField(default=True)), ('is_community_owner', models.BooleanField(default=False)), ('created_timestamp', models.DateTimeField(auto_now=True)), ('deactivated_timestamp', models.DateTimeField(blank=True)), ('disabled_timestamp', models.DateTimeField(blank=True)), ('business_category', models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='xmpt.businesscategory')), ('country_name', models.ForeignKey(default=840, on_delete=django.db.models.deletion.CASCADE, to='xmpt.country')), ('county_name', models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='xmpt.uscounty')), ], ), migrations.CreateModel( name='USState', fields=[ ('state_id', models.IntegerField(default=0, primary_key=True, serialize=False)), ('state_alpha2', models.CharField(default='ZZ', max_length=2)), ('state_name', models.CharField(default='ZZ', max_length=254)), ('jurisdiction_level_code', models.CharField(blank=True, max_length=10)), ('state_fips_code', models.CharField(blank=True, max_length=10)), ('country_alpha2', models.CharField(default='US', max_length=2)), ('country_alpha3', models.CharField(default='USA', max_length=3)), ('country_name', models.CharField(default='United States of America', max_length=254)), ('country_id', models.ForeignKey(default=840, on_delete=django.db.models.deletion.CASCADE, to='xmpt.country')), ], options={ 'verbose_name': 'US State', 'verbose_name_plural': 'US States', }, ), migrations.CreateModel( name='XmptCertificate', fields=[ ('certificate_guid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), ('buyer_user_guid', models.CharField(max_length=36)), ('seller_user_guid', models.CharField(max_length=36)), ('certificate_qr_code', models.ImageField(height_field=200, upload_to='', width_field=200)), ('certificate_description', models.TextField()), ('certificate_type', models.CharField(blank=True, max_length=254)), ('is_active', models.BooleanField(default=True)), ('is_valid', models.BooleanField(default=True)), ('is_enabled', models.BooleanField(default=True)), ('created_timestamp', models.DateTimeField(auto_now=True)), ('expired_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ('tax_form_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='xmpt.taxform')), ], ), migrations.CreateModel( name='USTown', fields=[ ('town_id', models.IntegerField(primary_key=True, serialize=False)), ('state_fips_code', models.CharField(blank=True, max_length=10)), ('county_fips_code', models.CharField(blank=True, max_length=10)), ('county_subdivision_fips_code', models.CharField(blank=True, max_length=10)), ('place_fips_code', models.CharField(blank=True, max_length=10)), ('city_fips_code', models.CharField(blank=True, max_length=10)), ('town_name', models.CharField(max_length=254)), ('jurisdiction_level_code', models.CharField(blank=True, max_length=10)), ('state_alpha2', models.CharField(default='ZZ', max_length=2)), ('country_alpha2', models.CharField(default='US', max_length=2)), ('country_alpha3', models.CharField(default='USA', max_length=3)), ('country_name', models.CharField(default='United States of America', max_length=254)), ('country_id', models.ForeignKey(default=840, on_delete=django.db.models.deletion.CASCADE, to='xmpt.country')), ('state_id', models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='xmpt.usstate')), ], options={ 'verbose_name_plural': 'US Towns', }, ), migrations.CreateModel( name='UserTINs', fields=[ ('user_tin_id', models.AutoField(primary_key=True, serialize=False)), ('is_active', models.BooleanField(default=True)), ('is_enabled', models.BooleanField(default=True)), ('created_timestamp', models.DateTimeField(auto_now=True)), ('deactivated_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ('disabled_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ('tin_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='xmpt.tin')), ('user_guid', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='xmpt.user')), ], options={ 'verbose_name_plural': 'User TINs', }, ), migrations.AddField( model_name='user', name='state_name', field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='xmpt.usstate'), ), migrations.AddField( model_name='user', name='town_name', field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='xmpt.ustown'), ), migrations.AddField( model_name='uscounty', name='state_id', field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='xmpt.usstate'), ), migrations.AddField( model_name='tin', name='state_id', field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='xmpt.usstate'), ), migrations.CreateModel( name='TaxAuthority', fields=[ ('tax_authority_guid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), ('state_name', models.CharField(max_length=50)), ('community_guid', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='xmpt.community')), ('state_id', models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='xmpt.usstate')), ], options={ 'verbose_name_plural': 'Tax Authorities', }, ), migrations.CreateModel( name='Membership', fields=[ ('membership_id', models.IntegerField(primary_key=True, serialize=False)), ('roleCode', models.SmallIntegerField(choices=[(0, 'Member'), (1, 'Owner'), (2, 'Tax Authority'), (3, 'XMPT')], default=0)), ('is_active', models.BooleanField(default=True)), ('is_enabled', models.BooleanField(default=True)), ('created_timestamp', models.DateTimeField(auto_now=True)), ('deactivated_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ('disabled_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ('community_guid', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='xmpt.community')), ('user_guid', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='xmpt.user')), ], ), migrations.CreateModel( name='CommunityTINs', fields=[ ('community_tin_id', models.AutoField(primary_key=True, serialize=False)), ('is_active', models.BooleanField(default=True)), ('is_enabled', models.BooleanField(default=True)), ('created_timestamp', models.DateTimeField(auto_now=True)), ('deactivated_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ('disabled_timestamp', models.DateTimeField(blank=True, default='9999-12-31 00:00')), ('community_guid', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='xmpt.community')), ('tin_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='xmpt.tin')), ], options={ 'verbose_name_plural': 'Community TINs', }, ), migrations.AddField( model_name='community', name='country_name', field=models.ForeignKey(default=840, on_delete=django.db.models.deletion.CASCADE, to='xmpt.country'), ), migrations.AddField( model_name='community', name='county_name', field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='xmpt.uscounty'), ), migrations.AddField( model_name='community', name='state_name', field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='xmpt.usstate'), ), migrations.AddField( model_name='community', name='town_name', field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='xmpt.ustown'), ), ]
[ "django.db.models.EmailField", "django.db.models.DateField", "django.db.models.TextField", "django.db.models.IntegerField", "django.db.models.ForeignKey", "django.db.models.FileField", "django.db.models.BooleanField", "django.db.models.ImageField", "django.db.models.AutoField", "django.db.models.S...
[((14781, 14877), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(0)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.usstate"""'}), "(default=0, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.usstate')\n", (14798, 14877), False, 'from django.db import migrations, models\n'), ((14994, 15089), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(0)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.ustown"""'}), "(default=0, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.ustown')\n", (15011, 15089), False, 'from django.db import migrations, models\n'), ((15209, 15305), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(0)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.usstate"""'}), "(default=0, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.usstate')\n", (15226, 15305), False, 'from django.db import migrations, models\n'), ((15420, 15516), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(0)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.usstate"""'}), "(default=0, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.usstate')\n", (15437, 15516), False, 'from django.db import migrations, models\n'), ((18219, 18317), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(840)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.country"""'}), "(default=840, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.country')\n", (18236, 18317), False, 'from django.db import migrations, models\n'), ((18441, 18538), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(0)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.uscounty"""'}), "(default=0, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.uscounty')\n", (18458, 18538), False, 'from django.db import migrations, models\n'), ((18661, 18757), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(0)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.usstate"""'}), "(default=0, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.usstate')\n", (18678, 18757), False, 'from django.db import migrations, models\n'), ((18879, 18974), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(0)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.ustown"""'}), "(default=0, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.ustown')\n", (18896, 18974), False, 'from django.db import migrations, models\n'), ((405, 459), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (424, 459), False, 'from django.db import migrations, models\n'), ((505, 537), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (521, 537), False, 'from django.db import migrations, models\n'), ((584, 616), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (600, 616), False, 'from django.db import migrations, models\n'), ((652, 699), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""2017"""', 'max_length': '(10)'}), "(default='2017', max_length=10)\n", (668, 699), False, 'from django.db import migrations, models\n'), ((945, 1036), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (961, 1036), False, 'from django.db import migrations, models\n'), ((1070, 1119), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""empty"""', 'max_length': '(254)'}), "(default='empty', max_length=254)\n", (1086, 1119), False, 'from django.db import migrations, models\n'), ((1157, 1206), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""empty"""', 'max_length': '(254)'}), "(default='empty', max_length=254)\n", (1173, 1206), False, 'from django.db import migrations, models\n'), ((1235, 1280), 'django.db.models.EmailField', 'models.EmailField', ([], {'blank': '(True)', 'max_length': '(254)'}), '(blank=True, max_length=254)\n', (1252, 1280), False, 'from django.db import migrations, models\n'), ((1316, 1359), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(20)'}), '(blank=True, max_length=20)\n', (1332, 1359), False, 'from django.db import migrations, models\n'), ((1393, 1483), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('M', 'Mobile'), ('O', 'Office')]", 'max_length': '(1)'}), "(blank=True, choices=[('M', 'Mobile'), ('O', 'Office')],\n max_length=1)\n", (1409, 1483), False, 'from django.db import migrations, models\n'), ((1519, 1568), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""empty"""', 'max_length': '(254)'}), "(default='empty', max_length=254)\n", (1535, 1568), False, 'from django.db import migrations, models\n'), ((1608, 1652), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(254)'}), '(blank=True, max_length=254)\n', (1624, 1652), False, 'from django.db import migrations, models\n'), ((1684, 1727), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(50)'}), '(blank=True, max_length=50)\n', (1700, 1727), False, 'from django.db import migrations, models\n'), ((1762, 1810), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""00000"""', 'max_length': '(20)'}), "(default='00000', max_length=20)\n", (1778, 1810), False, 'from django.db import migrations, models\n'), ((1847, 1890), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(20)'}), '(blank=True, max_length=20)\n', (1863, 1890), False, 'from django.db import migrations, models\n'), ((1923, 1956), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1942, 1956), False, 'from django.db import migrations, models\n'), ((1990, 2023), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (2009, 2023), False, 'from django.db import migrations, models\n'), ((2065, 2099), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (2084, 2099), False, 'from django.db import migrations, models\n'), ((2140, 2175), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (2160, 2175), False, 'from django.db import migrations, models\n'), ((2220, 2280), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (2240, 2280), False, 'from django.db import migrations, models\n'), ((2322, 2382), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (2342, 2382), False, 'from django.db import migrations, models\n'), ((2423, 2528), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(0)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.businesscategory"""'}), "(default=0, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.businesscategory')\n", (2440, 2528), False, 'from django.db import migrations, models\n'), ((2759, 2852), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2775, 2852), False, 'from django.db import migrations, models\n'), ((2891, 2922), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(36)'}), '(max_length=36)\n', (2907, 2922), False, 'from django.db import migrations, models\n'), ((2964, 2995), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(36)'}), '(max_length=36)\n', (2980, 2995), False, 'from django.db import migrations, models\n'), ((3035, 3136), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'choices': "[(1, 'Parent_Child'), (2, 'Sibling_Sibling')]", 'default': '(1)'}), "(blank=True, choices=[(1, 'Parent_Child'), (2,\n 'Sibling_Sibling')], default=1)\n", (3054, 3136), False, 'from django.db import migrations, models\n'), ((3165, 3198), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (3184, 3198), False, 'from django.db import migrations, models\n'), ((3230, 3263), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (3249, 3263), False, 'from django.db import migrations, models\n'), ((3297, 3331), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (3316, 3331), False, 'from django.db import migrations, models\n'), ((3365, 3398), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (3384, 3398), False, 'from django.db import migrations, models\n'), ((3433, 3466), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (3452, 3466), False, 'from django.db import migrations, models\n'), ((3499, 3532), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (3518, 3532), False, 'from django.db import migrations, models\n'), ((3565, 3598), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (3584, 3598), False, 'from django.db import migrations, models\n'), ((3632, 3665), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (3651, 3665), False, 'from django.db import migrations, models\n'), ((3706, 3741), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (3726, 3741), False, 'from django.db import migrations, models\n'), ((3786, 3846), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (3806, 3846), False, 'from django.db import migrations, models\n'), ((3887, 3947), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (3907, 3947), False, 'from django.db import migrations, models\n'), ((4187, 4241), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (4206, 4241), False, 'from django.db import migrations, models\n'), ((4279, 4309), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2)'}), '(max_length=2)\n', (4295, 4309), False, 'from django.db import migrations, models\n'), ((4347, 4377), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3)'}), '(max_length=3)\n', (4363, 4377), False, 'from django.db import migrations, models\n'), ((4413, 4445), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (4429, 4445), False, 'from django.db import migrations, models\n'), ((4676, 4730), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (4695, 4730), False, 'from django.db import migrations, models\n'), ((4767, 4811), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(254)'}), '(blank=True, max_length=254)\n', (4783, 4811), False, 'from django.db import migrations, models\n'), ((4852, 4896), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(254)'}), '(blank=True, max_length=254)\n', (4868, 4896), False, 'from django.db import migrations, models\n'), ((5167, 5218), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (5183, 5218), False, 'from django.db import migrations, models\n'), ((5250, 5360), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('EIN', 'EIN'), ('TIN', 'TIN'), ('SSN', 'SSN'), ('STN', 'STID')]", 'max_length': '(3)'}), "(choices=[('EIN', 'EIN'), ('TIN', 'TIN'), ('SSN', 'SSN'), (\n 'STN', 'STID')], max_length=3)\n", (5266, 5360), False, 'from django.db import migrations, models\n'), ((5387, 5419), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (5403, 5419), False, 'from django.db import migrations, models\n'), ((5452, 5485), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (5471, 5485), False, 'from django.db import migrations, models\n'), ((5519, 5552), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (5538, 5552), False, 'from django.db import migrations, models\n'), ((5593, 5628), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (5613, 5628), False, 'from django.db import migrations, models\n'), ((5673, 5733), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (5693, 5733), False, 'from django.db import migrations, models\n'), ((5775, 5835), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (5795, 5835), False, 'from django.db import migrations, models\n'), ((5869, 5967), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(840)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.country"""'}), "(default=840, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.country')\n", (5886, 5967), False, 'from django.db import migrations, models\n'), ((6199, 6253), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (6218, 6253), False, 'from django.db import migrations, models\n'), ((6289, 6333), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""ZZ"""', 'max_length': '(2)'}), "(default='ZZ', max_length=2)\n", (6305, 6333), False, 'from django.db import migrations, models\n'), ((6372, 6415), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)'}), '(blank=True, max_length=10)\n', (6388, 6415), False, 'from django.db import migrations, models\n'), ((6455, 6498), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)'}), '(blank=True, max_length=10)\n', (6471, 6498), False, 'from django.db import migrations, models\n'), ((6550, 6593), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)'}), '(blank=True, max_length=10)\n', (6566, 6593), False, 'from django.db import migrations, models\n'), ((6628, 6660), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (6644, 6660), False, 'from django.db import migrations, models\n'), ((6707, 6750), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)'}), '(blank=True, max_length=10)\n', (6723, 6750), False, 'from django.db import migrations, models\n'), ((6788, 6832), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""US"""', 'max_length': '(2)'}), "(default='US', max_length=2)\n", (6804, 6832), False, 'from django.db import migrations, models\n'), ((6870, 6915), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""USA"""', 'max_length': '(3)'}), "(default='USA', max_length=3)\n", (6886, 6915), False, 'from django.db import migrations, models\n'), ((6951, 7019), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""United States of America"""', 'max_length': '(254)'}), "(default='United States of America', max_length=254)\n", (6967, 7019), False, 'from django.db import migrations, models\n'), ((7053, 7151), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(840)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.country"""'}), "(default=840, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.country')\n", (7070, 7151), False, 'from django.db import migrations, models\n'), ((7375, 7466), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (7391, 7466), False, 'from django.db import migrations, models\n'), ((7495, 7544), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""empty"""', 'max_length': '(254)'}), "(default='empty', max_length=254)\n", (7511, 7544), False, 'from django.db import migrations, models\n'), ((7578, 7622), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(254)'}), '(blank=True, max_length=254)\n', (7594, 7622), False, 'from django.db import migrations, models\n'), ((7657, 7701), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)'}), '(blank=True, max_length=100)\n', (7673, 7701), False, 'from django.db import migrations, models\n'), ((7734, 7778), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(254)'}), '(blank=True, max_length=254)\n', (7750, 7778), False, 'from django.db import migrations, models\n'), ((7807, 7852), 'django.db.models.EmailField', 'models.EmailField', ([], {'blank': '(True)', 'max_length': '(254)'}), '(blank=True, max_length=254)\n', (7824, 7852), False, 'from django.db import migrations, models\n'), ((7888, 7931), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(20)'}), '(blank=True, max_length=20)\n', (7904, 7931), False, 'from django.db import migrations, models\n'), ((7965, 8055), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('M', 'Mobile'), ('O', 'Office')]", 'max_length': '(1)'}), "(blank=True, choices=[('M', 'Mobile'), ('O', 'Office')],\n max_length=1)\n", (7981, 8055), False, 'from django.db import migrations, models\n'), ((8091, 8140), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""empty"""', 'max_length': '(254)'}), "(default='empty', max_length=254)\n", (8107, 8140), False, 'from django.db import migrations, models\n'), ((8180, 8224), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(254)'}), '(blank=True, max_length=254)\n', (8196, 8224), False, 'from django.db import migrations, models\n'), ((8256, 8299), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(50)'}), '(blank=True, max_length=50)\n', (8272, 8299), False, 'from django.db import migrations, models\n'), ((8334, 8382), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""00000"""', 'max_length': '(20)'}), "(default='00000', max_length=20)\n", (8350, 8382), False, 'from django.db import migrations, models\n'), ((8419, 8462), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(20)'}), '(blank=True, max_length=20)\n', (8435, 8462), False, 'from django.db import migrations, models\n'), ((8500, 8544), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(254)'}), '(blank=True, max_length=254)\n', (8516, 8544), False, 'from django.db import migrations, models\n'), ((8583, 8627), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(254)'}), '(blank=True, max_length=254)\n', (8599, 8627), False, 'from django.db import migrations, models\n'), ((8665, 8728), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': 'datetime.datetime.now'}), '(blank=True, default=datetime.datetime.now)\n', (8685, 8728), False, 'from django.db import migrations, models\n'), ((8774, 8822), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""empty"""', 'max_length': '(20)'}), "(default='empty', max_length=20)\n", (8790, 8822), False, 'from django.db import migrations, models\n'), ((8872, 8910), 'django.db.models.DateField', 'models.DateField', ([], {'default': '"""9999-12-31"""'}), "(default='9999-12-31')\n", (8888, 8910), False, 'from django.db import migrations, models\n'), ((8958, 8996), 'django.db.models.DateField', 'models.DateField', ([], {'default': '"""9999-12-31"""'}), "(default='9999-12-31')\n", (8974, 8996), False, 'from django.db import migrations, models\n'), ((9041, 9116), 'django.db.models.FileField', 'models.FileField', ([], {'blank': '(True)', 'upload_to': '""""""', 'verbose_name': '"""Picture DL front"""'}), "(blank=True, upload_to='', verbose_name='Picture DL front')\n", (9057, 9116), False, 'from django.db import migrations, models\n'), ((9160, 9234), 'django.db.models.FileField', 'models.FileField', ([], {'blank': '(True)', 'upload_to': '""""""', 'verbose_name': '"""Picture DL back"""'}), "(blank=True, upload_to='', verbose_name='Picture DL back')\n", (9176, 9234), False, 'from django.db import migrations, models\n'), ((9267, 9300), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (9286, 9300), False, 'from django.db import migrations, models\n'), ((9334, 9367), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (9353, 9367), False, 'from django.db import migrations, models\n'), ((9409, 9443), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (9428, 9443), False, 'from django.db import migrations, models\n'), ((9484, 9519), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (9504, 9519), False, 'from django.db import migrations, models\n'), ((9564, 9596), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)'}), '(blank=True)\n', (9584, 9596), False, 'from django.db import migrations, models\n'), ((9638, 9670), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)'}), '(blank=True)\n', (9658, 9670), False, 'from django.db import migrations, models\n'), ((9711, 9816), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(0)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.businesscategory"""'}), "(default=0, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.businesscategory')\n", (9728, 9816), False, 'from django.db import migrations, models\n'), ((9848, 9946), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(840)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.country"""'}), "(default=840, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.country')\n", (9865, 9946), False, 'from django.db import migrations, models\n'), ((9977, 10074), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(0)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.uscounty"""'}), "(default=0, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.uscounty')\n", (9994, 10074), False, 'from django.db import migrations, models\n'), ((10209, 10274), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=0, primary_key=True, serialize=False)\n', (10228, 10274), False, 'from django.db import migrations, models\n'), ((10310, 10354), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""ZZ"""', 'max_length': '(2)'}), "(default='ZZ', max_length=2)\n", (10326, 10354), False, 'from django.db import migrations, models\n'), ((10388, 10434), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""ZZ"""', 'max_length': '(254)'}), "(default='ZZ', max_length=254)\n", (10404, 10434), False, 'from django.db import migrations, models\n'), ((10481, 10524), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)'}), '(blank=True, max_length=10)\n', (10497, 10524), False, 'from django.db import migrations, models\n'), ((10563, 10606), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)'}), '(blank=True, max_length=10)\n', (10579, 10606), False, 'from django.db import migrations, models\n'), ((10644, 10688), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""US"""', 'max_length': '(2)'}), "(default='US', max_length=2)\n", (10660, 10688), False, 'from django.db import migrations, models\n'), ((10726, 10771), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""USA"""', 'max_length': '(3)'}), "(default='USA', max_length=3)\n", (10742, 10771), False, 'from django.db import migrations, models\n'), ((10807, 10875), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""United States of America"""', 'max_length': '(254)'}), "(default='United States of America', max_length=254)\n", (10823, 10875), False, 'from django.db import migrations, models\n'), ((10909, 11007), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(840)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.country"""'}), "(default=840, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.country')\n", (10926, 11007), False, 'from django.db import migrations, models\n'), ((11291, 11382), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (11307, 11382), False, 'from django.db import migrations, models\n'), ((11417, 11448), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(36)'}), '(max_length=36)\n', (11433, 11448), False, 'from django.db import migrations, models\n'), ((11488, 11519), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(36)'}), '(max_length=36)\n', (11504, 11519), False, 'from django.db import migrations, models\n'), ((11562, 11628), 'django.db.models.ImageField', 'models.ImageField', ([], {'height_field': '(200)', 'upload_to': '""""""', 'width_field': '(200)'}), "(height_field=200, upload_to='', width_field=200)\n", (11579, 11628), False, 'from django.db import migrations, models\n'), ((11675, 11693), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (11691, 11693), False, 'from django.db import migrations, models\n'), ((11733, 11777), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(254)'}), '(blank=True, max_length=254)\n', (11749, 11777), False, 'from django.db import migrations, models\n'), ((11810, 11843), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (11829, 11843), False, 'from django.db import migrations, models\n'), ((11875, 11908), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (11894, 11908), False, 'from django.db import migrations, models\n'), ((11942, 11975), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (11961, 11975), False, 'from django.db import migrations, models\n'), ((12016, 12051), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (12036, 12051), False, 'from django.db import migrations, models\n'), ((12092, 12152), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (12112, 12152), False, 'from django.db import migrations, models\n'), ((12187, 12273), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.taxform"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'xmpt.taxform')\n", (12204, 12273), False, 'from django.db import migrations, models\n'), ((12405, 12459), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (12424, 12459), False, 'from django.db import migrations, models\n'), ((12498, 12541), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)'}), '(blank=True, max_length=10)\n', (12514, 12541), False, 'from django.db import migrations, models\n'), ((12581, 12624), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)'}), '(blank=True, max_length=10)\n', (12597, 12624), False, 'from django.db import migrations, models\n'), ((12676, 12719), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)'}), '(blank=True, max_length=10)\n', (12692, 12719), False, 'from django.db import migrations, models\n'), ((12758, 12801), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)'}), '(blank=True, max_length=10)\n', (12774, 12801), False, 'from django.db import migrations, models\n'), ((12839, 12882), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)'}), '(blank=True, max_length=10)\n', (12855, 12882), False, 'from django.db import migrations, models\n'), ((12915, 12947), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (12931, 12947), False, 'from django.db import migrations, models\n'), ((12994, 13037), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)'}), '(blank=True, max_length=10)\n', (13010, 13037), False, 'from django.db import migrations, models\n'), ((13073, 13117), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""ZZ"""', 'max_length': '(2)'}), "(default='ZZ', max_length=2)\n", (13089, 13117), False, 'from django.db import migrations, models\n'), ((13155, 13199), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""US"""', 'max_length': '(2)'}), "(default='US', max_length=2)\n", (13171, 13199), False, 'from django.db import migrations, models\n'), ((13237, 13282), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""USA"""', 'max_length': '(3)'}), "(default='USA', max_length=3)\n", (13253, 13282), False, 'from django.db import migrations, models\n'), ((13318, 13386), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""United States of America"""', 'max_length': '(254)'}), "(default='United States of America', max_length=254)\n", (13334, 13386), False, 'from django.db import migrations, models\n'), ((13420, 13518), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(840)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.country"""'}), "(default=840, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.country')\n", (13437, 13518), False, 'from django.db import migrations, models\n'), ((13546, 13642), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(0)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.usstate"""'}), "(default=0, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.usstate')\n", (13563, 13642), False, 'from django.db import migrations, models\n'), ((13869, 13920), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (13885, 13920), False, 'from django.db import migrations, models\n'), ((13953, 13986), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (13972, 13986), False, 'from django.db import migrations, models\n'), ((14020, 14053), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (14039, 14053), False, 'from django.db import migrations, models\n'), ((14094, 14129), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (14114, 14129), False, 'from django.db import migrations, models\n'), ((14174, 14234), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (14194, 14234), False, 'from django.db import migrations, models\n'), ((14276, 14336), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (14296, 14336), False, 'from django.db import migrations, models\n'), ((14366, 14443), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.tin"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='xmpt.tin')\n", (14383, 14443), False, 'from django.db import migrations, models\n'), ((14476, 14554), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.user"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='xmpt.user')\n", (14493, 14554), False, 'from django.db import migrations, models\n'), ((15650, 15741), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (15666, 15741), False, 'from django.db import migrations, models\n'), ((15771, 15802), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (15787, 15802), False, 'from django.db import migrations, models\n'), ((15840, 15928), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.community"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'xmpt.community')\n", (15857, 15928), False, 'from django.db import migrations, models\n'), ((15955, 16051), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(0)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.usstate"""'}), "(default=0, on_delete=django.db.models.deletion.CASCADE,\n to='xmpt.usstate')\n", (15972, 16051), False, 'from django.db import migrations, models\n'), ((16289, 16343), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (16308, 16343), False, 'from django.db import migrations, models\n'), ((16375, 16488), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'choices': "[(0, 'Member'), (1, 'Owner'), (2, 'Tax Authority'), (3, 'XMPT')]", 'default': '(0)'}), "(choices=[(0, 'Member'), (1, 'Owner'), (2,\n 'Tax Authority'), (3, 'XMPT')], default=0)\n", (16399, 16488), False, 'from django.db import migrations, models\n'), ((16517, 16550), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (16536, 16550), False, 'from django.db import migrations, models\n'), ((16584, 16617), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (16603, 16617), False, 'from django.db import migrations, models\n'), ((16658, 16693), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (16678, 16693), False, 'from django.db import migrations, models\n'), ((16738, 16798), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (16758, 16798), False, 'from django.db import migrations, models\n'), ((16840, 16900), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (16860, 16900), False, 'from django.db import migrations, models\n'), ((16938, 17026), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.community"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'xmpt.community')\n", (16955, 17026), False, 'from django.db import migrations, models\n'), ((17054, 17132), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.user"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='xmpt.user')\n", (17071, 17132), False, 'from django.db import migrations, models\n'), ((17285, 17336), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (17301, 17336), False, 'from django.db import migrations, models\n'), ((17369, 17402), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (17388, 17402), False, 'from django.db import migrations, models\n'), ((17436, 17469), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (17455, 17469), False, 'from django.db import migrations, models\n'), ((17510, 17545), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (17530, 17545), False, 'from django.db import migrations, models\n'), ((17590, 17650), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (17610, 17650), False, 'from django.db import migrations, models\n'), ((17692, 17752), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'default': '"""9999-12-31 00:00"""'}), "(blank=True, default='9999-12-31 00:00')\n", (17712, 17752), False, 'from django.db import migrations, models\n'), ((17790, 17878), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.community"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'xmpt.community')\n", (17807, 17878), False, 'from django.db import migrations, models\n'), ((17903, 17980), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""xmpt.tin"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='xmpt.tin')\n", (17920, 17980), False, 'from django.db import migrations, models\n')]
# INFO __author__ = "<NAME>" __date__ = "26 Mar 2022" __license__ = "MIT" __version__ = "1.0" __maintainer__ = "<NAME>" __email__ = "<EMAIL>" __status__ = "Definitive version" __copyright__ = "© 2022" # SCRIPT import numpy as np from logistic_regression import * from other_functions import * print('\033[92m\033[1m\n\n------------------ 1. TRAINING RESULTS --------------------\033[0m') # Load the training data train_X, train_Y = load_file("../titanic-data/titanic-train.txt") ''' Understand the best learning rate value ''' # Which is a good value for the learning rate? learning_rate_accuracy = {} learning_rate_values = 1e-4, 0.5e-4, 1e-3, 0.5e-3, 1e-2, 0.05e-1 for lr in learning_rate_values: w, b, iteration_loss = logreg_train(train_X, train_Y, lr) P = logreg_inference(train_X, w, b) Y_hat_train = (P >= 0.5) accuracy = (train_Y == Y_hat_train).mean() * 100 learning_rate_accuracy[lr] = accuracy # Show how the accuracy goes for specific learning rate values display_lr_vs_accuracy(learning_rate_accuracy) # Find the best learning rate value, such that it maximizes the accuracy best_lr_value, max_accuracy = find_best_lr_value(learning_rate_accuracy) # What is the training accuracy of the trained model? print('\nBest learning rate value:', best_lr_value, ' Accuracy:', round(max_accuracy, 2)) ''' Once understood the best learning rate, now lets understand which is the best value for the iterations ''' w, b, iteration_loss = logreg_train(train_X, train_Y, best_lr_value) # Show how the loss goes for specific iterations value display_iteration_vs_loss(iteration_loss) # Find the best iterations value, such that it minimizes the loss function best_iterations_value, min_loss = find_best_iterations_value(iteration_loss) # How many iterations are required to converge? print('\nBest iterations value:', best_iterations_value, ' Loss:', round(min_loss, 2)) ''' Extra: Load the obtained parameters into an external .csv file ''' # Obtain the parameters considering the "best model", using the previously obtained values # Why 60000 as iteration value? Time-performance trade-off...by increasing the number of iterations over # 60000 the loss function doesn't decreases so much and the time spent it's not worth it w, b, iteration_loss = logreg_train(train_X, train_Y, best_lr_value, 60000) # Load the parameters into the 'parameter.csv' file np.savetxt( '../parameters.csv', np.append(w, b)) # END print('\n')
[ "numpy.append" ]
[((2421, 2436), 'numpy.append', 'np.append', (['w', 'b'], {}), '(w, b)\n', (2430, 2436), True, 'import numpy as np\n')]
import requests import urllib class ResolveCoreference: def __init__(self): self.resolved_sentece = [] self.input_sentence = None def process(self, corenlp_output): self.input_sentence = corenlp_output for coref in self.input_sentence['corefs']: mentions = self.input_sentence['corefs'][coref] antecedent = mentions[0] # first mention in the coreference chain for j in range(1, len(mentions)): mention = mentions[j] if mention['type'] == 'PRONOMINAL': # get the attributes of the target mention in the corresponding sentence target_sentence = mention['sentNum'] target_token = mention['startIndex'] - 1 # transfer the antecedent's word form to the appropriate token in the sentence self.input_sentence['sentences'][target_sentence - 1]['tokens'][target_token]['word'] = antecedent['text'] def get_resolved(self): possessives = {'hers', 'his', 'their', 'theirs'} for sentence in self.input_sentence['sentences']: for token in sentence['tokens']: output_word = token['word'] # check lemmas as well as tags for possessive pronouns in case of tagging errors if token['lemma'] in possessives or token['pos'] == 'PRP$': output_word += "'s" # add the possessive morpheme output_word += token['after'] self.resolved_sentece.append(output_word) resolvedStr = ''.join(self.resolved_sentece) return resolvedStr def ResolveCoreferenceDriver(data): text = "Tom and Jane are good friends. They are cool. He knows a lot of things and so does she. His car is red, but " \ "hers is blue. It is older than hers. The big cat ate its dinner." url = 'http://localhost:9000?properties=' params = '\"annotators": \"dcoref\", \"outputFormat\": \"json\"' #print(params) queryString = urllib.parse.quote(params) #data = 'John is teacher. He loves coding' #data = "You pick up the fork and plate. Then move them to another table." #data = 'You move the green ball on red block. Now pick it up again and put it on yellow base' #data = 'push the box to left of table and put fork on it.' #print(url+queryString) response = requests.post(url+queryString,data) resolveObj = ResolveCoreference() resolveObj.process(response.json()) return resolveObj.get_resolved() if __name__ == "__main__": print(ResolveCoreferenceDriver(input()))
[ "requests.post", "urllib.parse.quote" ]
[((2063, 2089), 'urllib.parse.quote', 'urllib.parse.quote', (['params'], {}), '(params)\n', (2081, 2089), False, 'import urllib\n'), ((2422, 2460), 'requests.post', 'requests.post', (['(url + queryString)', 'data'], {}), '(url + queryString, data)\n', (2435, 2460), False, 'import requests\n')]
#!/usr/bin/python3.8 """ Genetic Algorithm to maximize surveillance over a population for AI Assignment. Author: Sam (eremus-dev) Repo: https://github.com/eremus-dev """ import math from collections import Counter from typing import List, Dict import numpy as np import matplotlib.pyplot as plt from test_pop import test_pop """ GENETIC ALGORITHM CONFIGURATION VARIABLES """ # Genetic Algorithm and Camera Config genetic_pop = 100 # number different genetic strains generation = 100 # number of generations to maximize coverage view_radius = 15 # how far the cameras see citizens = 200 # how many people we need to surveil cam_count = 4 # how many cams we have to surveil them with mutation_chance = 10 # percentage chance mutation occurs threshold = 100 # stop at this result or generation test_number = 10 # number of tests to run, set to zero if no tests Coord = List[int] # Type of co-ordinates def gen_randpop(size: int) -> List[Coord]: """ Function to generate randomly distributed population to surveil """ obs = [] # [x,y] of size number of people for _ in range(1, size + 1): xy = [] # x, y co-ords of each person x = np.random.randint(1, 100) xy.append(x) y = np.random.randint(1, 100) xy.append(y) obs.append(xy) return np.array(obs, copy=True) def rate_gen(cams: Dict[int, List[Coord]], pop: List[Coord]) -> Dict[int, int]: """ Function to get the best of the population to breed, mutate and to survive """ scores = {} for n in cams: scores[n] = fitness_function(cams[n], pop) return scores def fitness_function(cams: List[Coord], pop: List[Coord]) -> int: """ Function to calculate number of surveilled citizens. Check if all the cameras can see them, if any can score increases """ score = [] for cit in pop: test = False for cam in cams: if ( math.sqrt(((cam[0] - cit[0]) ** 2) + ((cam[1] - cit[1]) ** 2)) <= view_radius ): test = True score.append(test) return score.count(True) def select_from_pop( cams: Dict[int, List[Coord]], total_scores ) -> Dict[int, List[Coord]]: """ Function that takes a dict of camera positions and a dict of scores and breeds the strongest returns new population of cameras """ top_scores = {} new_pop = {} selection = int(len(total_scores) / 2) scores = sorted(total_scores, key=total_scores.get, reverse=True)[:selection] assert len(scores) == selection for i in scores: top_scores[i] = total_scores[i] new_pop[i] = cams[i] assert len(new_pop) == selection return breed_strongest(top_scores, new_pop) def breed_strongest( top_scores: Dict[int, int], new_pop: Dict[int, List[Coord]] ) -> Dict[int, List[Coord]]: """ Function to breed 25 best positions. Strongest always remains unchanged. """ count = 0 full_pop = {} keys = list(new_pop.keys()) for i in keys: dad = [] child = [] mum = [] mum = np.copy(new_pop[i]) child = dad = np.copy( new_pop[np.random.choice(keys)] ) # randomly select breeding mate child[0] = mum[np.random.randint(0, 3)] child[1] = mum[np.random.randint(0, 3)] full_pop[count] = mum # save mum count += 1 full_pop[count] = child # add random child count += 1 full_pop = mutate(full_pop, top_scores) assert len(full_pop) == genetic_pop return full_pop def mutate( full_pop: Dict[int, List[Coord]], top_scores: Dict[int, int] ) -> Dict[int, List[Coord]]: """ Function to mutate population, 10% chance they will mutate """ for i in full_pop: if np.random.randint(0, 100) > (100 - mutation_chance): temp = full_pop[i] xmod, ymod = [ np.random.randint(-20, 20), np.random.randint(-20, 20), ] # pick random mutation camera_num = np.random.randint(0, 3) camera = temp[camera_num] # cameras to mod camera[0] = (camera[0] + xmod) % 100 camera[1] = (camera[1] + ymod) % 100 temp[camera_num] = camera full_pop[i] = temp return full_pop def plot_pop(pop: List[Coord], cams: List[Coord], top_score: int, gen: int, run: int) -> None: """ Function to plot placement of cams and population on graph """ plt.cla() # clears graph plt.gcf().canvas.mpl_connect( # allows exit key to quit qraph "key_release_event", lambda event: [exit(0) if event.key == "escape" else None] ) plt.axis("equal") plt.grid(True) plt.plot(pop[:, 0], pop[:, 1], "ok") plt.plot(cams[:, 0], cams[:, 1], "*") for i in range(len(cams)): # plots camera view range circle = plt.Circle( (cams[i][0], cams[i][1]), view_radius, color="r", fill=False ) ax = plt.gca() ax.add_artist(circle) ax = plt.gca() ax.set_xlabel("City Terrain X") # sets up all labels ax.set_ylabel("City Terrain Y") ax.set_title(f"Visualisation of Cameras and Population\nSurveilled Population {max_seen} in Generation {gen}") plt.pause(0.01) plt.draw() # draws graph if gen == 199: plt.savefig(f'./results/last_gen_test{run}.png') def plot_final_results(generational_record: Dict[int, int], run: int, max_seen: int) -> None: ''' Produces final plot of the progression of the GA across a single generational run ''' plt.cla() plt.grid(True) lists = sorted(generational_record.items()) x, y = zip(*lists) plt.xlim(-2, generation+2) plt.ylim(50, 120) plt.plot(x, y, label="Pop Surveilled", linestyle="--", marker='o') ax = plt.gca() ax.set_xlabel("Generations") ax.set_ylabel("Number of Population Surveilled") ax.set_title(f"Population Surveilled Over Generations\nMax Population Surveilled {max_seen}") plt.savefig(f'./results/final_results_test{run}.png') if test_number > 0: plt.pause(0.5) plt.draw() else: plt.show() def plot_aggregate_results(aggregate_results: Dict[int, int], ) -> None: ''' Produces plot of aggregate results for test runs of GA ''' # Graph aggregate results and average of test runs plt.cla() plt.grid(True) lists = sorted(aggregate_results.items()) x,y = zip(*lists) avg = [sum(y) / len(y)] * len(x) mean = np.mean(y) std_dev = format(np.std(y), '.3f') maximum = max(y) plt.scatter(x, y, label="Pop Surveilled", color="r") ax = plt.gca() ax.plot(x, avg, label='Mean', linestyle='--') ax.set_title(f"Population Surveilled Over Tests using Genetic Algorithm\nPopulation Surveilled Mean: {mean}, Max {maximum}, Stdev {std_dev}") ax.legend(loc='upper left') ax.set_xlabel("Test Number") ax.set_ylabel("Number of Population Surveilled") plt.savefig(f'./results/aggregate_result_GA_test_run.png') plt.show() if __name__ == "__main__": aggregate_results = {} # collect each tests results # run the GA for test_number times and graph results for run in range(0, test_number): generational_record = {} # record to graph at end cams = {} # dictionary of genetic population #citpop = gen_randpop(citizens) # a numpy array of citizens randomly distributed citpop = np.array(test_pop) for i in range(genetic_pop): # generate genetic population cams[i] = gen_randpop(cam_count) # a numpy array of cams randomly distributed # Main Genetic Algorithm Loop gen = 0 max_seen = 0 while (gen < generation) & ( max_seen < threshold ): # evolve for number of generations if gen != 0: # do nothing first time through loop cams = select_from_pop(cams, total_scores) total_scores = rate_gen(cams, citpop) best_cam = max(total_scores, key=total_scores.get) max_seen = total_scores[best_cam] print(f"We surveilled {max_seen} in generation {gen}, best is {best_cam}") plot_pop(citpop, cams[best_cam], max_seen, gen, run) # print best fit for each generation generational_record[gen] = max_seen # to graph at end of process gen += 1 # Graph Results of Genetic Algorithm over generations plot_final_results(generational_record, run, max_seen) aggregate_results[run] = max_seen # Graph aggregate results and average of test runs plot_aggregate_results(aggregate_results)
[ "matplotlib.pyplot.grid", "math.sqrt", "numpy.array", "numpy.mean", "matplotlib.pyplot.plot", "matplotlib.pyplot.scatter", "matplotlib.pyplot.axis", "matplotlib.pyplot.ylim", "matplotlib.pyplot.cla", "matplotlib.pyplot.savefig", "matplotlib.pyplot.Circle", "numpy.random.choice", "matplotlib....
[((1327, 1351), 'numpy.array', 'np.array', (['obs'], {'copy': '(True)'}), '(obs, copy=True)\n', (1335, 1351), True, 'import numpy as np\n'), ((4549, 4558), 'matplotlib.pyplot.cla', 'plt.cla', ([], {}), '()\n', (4556, 4558), True, 'import matplotlib.pyplot as plt\n'), ((4776, 4793), 'matplotlib.pyplot.axis', 'plt.axis', (['"""equal"""'], {}), "('equal')\n", (4784, 4793), True, 'import matplotlib.pyplot as plt\n'), ((4821, 4835), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (4829, 4835), True, 'import matplotlib.pyplot as plt\n'), ((4840, 4876), 'matplotlib.pyplot.plot', 'plt.plot', (['pop[:, 0]', 'pop[:, 1]', '"""ok"""'], {}), "(pop[:, 0], pop[:, 1], 'ok')\n", (4848, 4876), True, 'import matplotlib.pyplot as plt\n'), ((4881, 4918), 'matplotlib.pyplot.plot', 'plt.plot', (['cams[:, 0]', 'cams[:, 1]', '"""*"""'], {}), "(cams[:, 0], cams[:, 1], '*')\n", (4889, 4918), True, 'import matplotlib.pyplot as plt\n'), ((5156, 5165), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (5163, 5165), True, 'import matplotlib.pyplot as plt\n'), ((5382, 5397), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.01)'], {}), '(0.01)\n', (5391, 5397), True, 'import matplotlib.pyplot as plt\n'), ((5402, 5412), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (5410, 5412), True, 'import matplotlib.pyplot as plt\n'), ((5731, 5740), 'matplotlib.pyplot.cla', 'plt.cla', ([], {}), '()\n', (5738, 5740), True, 'import matplotlib.pyplot as plt\n'), ((5745, 5759), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (5753, 5759), True, 'import matplotlib.pyplot as plt\n'), ((5835, 5863), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-2)', '(generation + 2)'], {}), '(-2, generation + 2)\n', (5843, 5863), True, 'import matplotlib.pyplot as plt\n'), ((5866, 5883), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(50)', '(120)'], {}), '(50, 120)\n', (5874, 5883), True, 'import matplotlib.pyplot as plt\n'), ((5888, 5954), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y'], {'label': '"""Pop Surveilled"""', 'linestyle': '"""--"""', 'marker': '"""o"""'}), "(x, y, label='Pop Surveilled', linestyle='--', marker='o')\n", (5896, 5954), True, 'import matplotlib.pyplot as plt\n'), ((5964, 5973), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (5971, 5973), True, 'import matplotlib.pyplot as plt\n'), ((6162, 6215), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""./results/final_results_test{run}.png"""'], {}), "(f'./results/final_results_test{run}.png')\n", (6173, 6215), True, 'import matplotlib.pyplot as plt\n'), ((6529, 6538), 'matplotlib.pyplot.cla', 'plt.cla', ([], {}), '()\n', (6536, 6538), True, 'import matplotlib.pyplot as plt\n'), ((6543, 6557), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (6551, 6557), True, 'import matplotlib.pyplot as plt\n'), ((6674, 6684), 'numpy.mean', 'np.mean', (['y'], {}), '(y)\n', (6681, 6684), True, 'import numpy as np\n'), ((6749, 6801), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x', 'y'], {'label': '"""Pop Surveilled"""', 'color': '"""r"""'}), "(x, y, label='Pop Surveilled', color='r')\n", (6760, 6801), True, 'import matplotlib.pyplot as plt\n'), ((6811, 6820), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6818, 6820), True, 'import matplotlib.pyplot as plt\n'), ((7139, 7197), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""./results/aggregate_result_GA_test_run.png"""'], {}), "(f'./results/aggregate_result_GA_test_run.png')\n", (7150, 7197), True, 'import matplotlib.pyplot as plt\n'), ((7202, 7212), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7210, 7212), True, 'import matplotlib.pyplot as plt\n'), ((1185, 1210), 'numpy.random.randint', 'np.random.randint', (['(1)', '(100)'], {}), '(1, 100)\n', (1202, 1210), True, 'import numpy as np\n'), ((1245, 1270), 'numpy.random.randint', 'np.random.randint', (['(1)', '(100)'], {}), '(1, 100)\n', (1262, 1270), True, 'import numpy as np\n'), ((3145, 3164), 'numpy.copy', 'np.copy', (['new_pop[i]'], {}), '(new_pop[i])\n', (3152, 3164), True, 'import numpy as np\n'), ((4999, 5071), 'matplotlib.pyplot.Circle', 'plt.Circle', (['(cams[i][0], cams[i][1])', 'view_radius'], {'color': '"""r"""', 'fill': '(False)'}), "((cams[i][0], cams[i][1]), view_radius, color='r', fill=False)\n", (5009, 5071), True, 'import matplotlib.pyplot as plt\n'), ((5107, 5116), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (5114, 5116), True, 'import matplotlib.pyplot as plt\n'), ((5480, 5528), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""./results/last_gen_test{run}.png"""'], {}), "(f'./results/last_gen_test{run}.png')\n", (5491, 5528), True, 'import matplotlib.pyplot as plt\n'), ((6253, 6267), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.5)'], {}), '(0.5)\n', (6262, 6267), True, 'import matplotlib.pyplot as plt\n'), ((6276, 6286), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (6284, 6286), True, 'import matplotlib.pyplot as plt\n'), ((6305, 6315), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6313, 6315), True, 'import matplotlib.pyplot as plt\n'), ((6706, 6715), 'numpy.std', 'np.std', (['y'], {}), '(y)\n', (6712, 6715), True, 'import numpy as np\n'), ((7620, 7638), 'numpy.array', 'np.array', (['test_pop'], {}), '(test_pop)\n', (7628, 7638), True, 'import numpy as np\n'), ((3307, 3330), 'numpy.random.randint', 'np.random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (3324, 3330), True, 'import numpy as np\n'), ((3355, 3378), 'numpy.random.randint', 'np.random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (3372, 3378), True, 'import numpy as np\n'), ((3839, 3864), 'numpy.random.randint', 'np.random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (3856, 3864), True, 'import numpy as np\n'), ((4101, 4124), 'numpy.random.randint', 'np.random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (4118, 4124), True, 'import numpy as np\n'), ((1960, 2018), 'math.sqrt', 'math.sqrt', (['((cam[0] - cit[0]) ** 2 + (cam[1] - cit[1]) ** 2)'], {}), '((cam[0] - cit[0]) ** 2 + (cam[1] - cit[1]) ** 2)\n', (1969, 2018), False, 'import math\n'), ((3216, 3238), 'numpy.random.choice', 'np.random.choice', (['keys'], {}), '(keys)\n', (3232, 3238), True, 'import numpy as np\n'), ((3966, 3992), 'numpy.random.randint', 'np.random.randint', (['(-20)', '(20)'], {}), '(-20, 20)\n', (3983, 3992), True, 'import numpy as np\n'), ((4010, 4036), 'numpy.random.randint', 'np.random.randint', (['(-20)', '(20)'], {}), '(-20, 20)\n', (4027, 4036), True, 'import numpy as np\n'), ((4607, 4616), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (4614, 4616), True, 'import matplotlib.pyplot as plt\n')]
#################################################################### # Animus AI Developed by <NAME>ated 13th March 2018 # #################################################################### ''' THIS PROGRAM IS INTENDED FOR USE UNDER SUPERVISION OF HUMAN INTERVENTION. THE SKILL ADDS CAPABILITY OF SCANNING AND ACCESSING DEVICES ON THE NETWORK. NEITHER THE CREATOR OR THE DEVELOPERS OF ANIMUS AI ARE RESPONSIBLE FOR MISUSE OF THIS CODE FOR MALITIOUS PURPOSES. ANIMUS AI HAS THE CAPABILITY TO SCAN FOR DEVICES ON THE NETWORK TO ENABLE IT TO INTERACT WITH OTHER DEVICES. PLEASE USE WISELY AND AT YOUR OWN DISCRETION. ''' #GoogleSkill.py #from Summarize import search_shorten from .EchoSkill import echo def google(search): echo("Sir, I am Searching Google for " + search) from googlesearch.googlesearch import GoogleSearch response = GoogleSearch().search(search, num_results = 2) for result in response.results: echo("Title: " + result.title) # search_shorten(result.getText()) # echo("Content: " + result.getText()) return result.getText() ###################################################################### #System wide google search implementation through the google skill in# #Animus AI # #Copyright <NAME> # ######################################################################
[ "googlesearch.googlesearch.GoogleSearch" ]
[((874, 888), 'googlesearch.googlesearch.GoogleSearch', 'GoogleSearch', ([], {}), '()\n', (886, 888), False, 'from googlesearch.googlesearch import GoogleSearch\n')]
''' Copyright (c) The Dojo Foundation 2011. All Rights Reserved. Copyright (c) IBM Corporation 2008, 2011. All Rights Reserved. ''' # tornado import tornado.ioloop # std lib import logging import time # coweb from .session import BayeuxSession from .connection import BayeuxConnection from .channel import BayeuxChannel log = logging.getLogger('bayeux.server') class BayeuxManager(object): '''Base class for a Bayeux manager tracking known clients.''' def __init__(self, purgeInterval=15, deadAfter=60, exts=[], client_cls=BayeuxSession, connection_cls=BayeuxConnection): self.deadAfter = deadAfter self.timeout = int(self.deadAfter/2.0) self._clientCls = client_cls self._connectionCls = connection_cls self._root = BayeuxChannel('/') self._clients = {} self._flushable = set() self._ioloop = tornado.ioloop.IOLoop.instance() self._purgeGen = None if purgeInterval is None: self._timer = None else: self._timer = tornado.ioloop.PeriodicCallback(self.purge_clients, purgeInterval*1000) self._timer.start() self._exts = exts self._willFlush = False def destroy(self): '''Destroyes the Bayeux manager and deletes its known clients''' # stop the purge timer if self._timer is not None: # do after in case we're in the callback to prevent re-registration # due to tornado bug self._ioloop.add_callback(self._timer.stop) # force the callback reference to None, because stop doesn't # remove the reference from the ioloop, else gc impeded until # next callback occurs, even though stopped self._timer.callback = None self._timer = None self._purgeGen = None # purge all clients cids = list(self._clients.keys()) for cid in cids: cl = self.delete_client(cid) cl.destroy() self._clients = {} def flush(self): '''Flushes all outgoing client messages immediately.''' self._willFlush = False leftover = set() while self._flushable: cl = self._flushable.pop() if not cl.on_flush(): leftover.add(cl) # don't reschedule flush now, next connection will do it self._flushable = leftover def should_flush(self, cl): ''' Marks a client as having outgoing messages that should be flushed. ''' self._flushable.add(cl) # schedule the flush if not one already pending if not self._willFlush: self._ioloop.add_callback(self.flush) self._willFlush = True def publish(self, msg): '''Publishes a message to all clients subscribed to its channel.''' # strip off client ID before sending out to other clients try: senderId = msg['clientId'] del msg['clientId'] except KeyError: senderId = None try: del msg['id'] except KeyError: pass ch = msg.get('channel', None) # find all subscribed clients clients = set() self._root.collect_clients(clients, BayeuxChannel.split(ch)) for cl in clients: cl.add_message(msg, senderId) def get_root_channel(self): '''Gets the root / BayeuxChannel instance.''' return self._root def build_connection(self, handler): ''' Builds a BayeuxConnection instance to represent a single connection over a negotiated transport from a client. ''' return self._connectionCls(handler, self) def new_client(self): ''' Builds a new BayeuxSession instance to represent a continuing session with a client.''' c = self._clientCls(self, exts=self._exts) self._clients[c.clientId] = c return c def is_client(self, cid): '''Gets if the given client ID is one for a known client.''' return cid in self._clients def get_client(self, cid): '''Gets the client associated with the given client ID.''' return self._clients[cid] def delete_client(self, cid): '''Deletes the client assocaited with the given client ID.''' cl = self._clients[cid] del self._clients[cid] try: self.on_purging_client(cid, cl) except Exception: log.exception('purge delegate') return cl def purge_clients(self): ''' Purges clients that have not performed any action within the configured self.deadAfter interval in seconds. ''' if not self._purgeGen: # build a new purge generator self._purgeGen = self._purge_clients() try: # iterate the generator next(self._purgeGen) except StopIteration: # purge complete, toss the generator self._purgeGen = None else: # purge incomplete, schedule for immediately continuation self._ioloop.add_callback(self.purge_clients) def _purge_clients(self, timeout=2, blockSize=100): # snapshot the clients in the dict cids = list(self._clients.keys()) # get the current time now = time.time() for i, cid in enumerate(cids): cl = self._clients[cid] if cl.lastSeen is None: self.delete_client(cid) cl.destroy() continue dt = now - cl.lastSeen if dt > self.deadAfter: cl = self.delete_client(cid) cl.destroy() if i % blockSize == 0 and time.time() - now > timeout: # we're taking too long, yield yield def on_purging_client(self, cid, cl): ''' Called after this manager stops tracking a client but before the BayeuxSession instace for the client is destroyed (i.e., it's final messages flushed, sockets closed, etc.) No expected return value. ''' pass
[ "logging.getLogger", "time.time" ]
[((327, 361), 'logging.getLogger', 'logging.getLogger', (['"""bayeux.server"""'], {}), "('bayeux.server')\n", (344, 361), False, 'import logging\n'), ((5462, 5473), 'time.time', 'time.time', ([], {}), '()\n', (5471, 5473), False, 'import time\n'), ((5862, 5873), 'time.time', 'time.time', ([], {}), '()\n', (5871, 5873), False, 'import time\n')]
"""Consumer responsible for writing to stackdriver and associated helpers.""" import logging from datetime import tzinfo, timedelta, datetime class _FixedOffsetTimeZone(tzinfo): """Hack for dealing w/ lack of %z in 2.7 strptime. See https://docs.python.org/2/library/datetime.html#datetime.tzinfo.fromutc """ def __init__(self, offset): self.__offset = timedelta(seconds=offset) self.__name = 'UTC%+is' % (offset) def utcoffset(self, dt): _ = dt return self.__offset def tzname(self, dt): _ = dt return self.__name def dst(self, dt): _ = dt return timedelta(0) class NginxAccessLogConsumer(object): """Consumes nginx log lines and exports to custom stackdriver metrics. Currently only supports exporting request counts by status code. """ NGINX_BASE_TIMESTAMP_FORMAT = '%d/%b/%Y:%H:%M:%S' def __init__(self, client, resource, http_response_metric_name): """Initialize NginxAccessLogConsumer. Args: client: cloud monitoring client. resource: resource object identifying the monitored instance. """ self._client = client self._resource = resource self._reset_time_utc = datetime.utcnow() self._reset_time_utc_offset = self._reset_time_utc.replace( tzinfo=_FixedOffsetTimeZone(0)) self._response_codes = {} self._response_code_metrics = {} self._has_delta = False self._http_response_metric_name = http_response_metric_name def _parse_nginx_timestamp(self, ts_str): """Parse the provided timestamp string. Args: ts_str: nginx format timestamp string '02/Jul/2017:00:00:00 +0000' Returns: datetime object or None if the timestamp could not be parsed. """ try: base, offset = ts_str.split(' ') base_dt = datetime.strptime(base, self.NGINX_BASE_TIMESTAMP_FORMAT) except ValueError: return None return base_dt.replace(tzinfo=_FixedOffsetTimeZone(int(offset))) def reset_time_utc(self): """Returns the time relative to which metric counters are registered. Returns: datetime object relative to UTC (no tzinfo). """ return self._reset_time_utc def record(self, parsed_groups): """Record supported metrics from the parsed log line. Args: parsed_groups: dict of str => str elements from an nginx access log line; only relevant fields are datetime and statuscode. """ log_time = self._parse_nginx_timestamp(parsed_groups['datetime']) if log_time is None: logging.warn('Could not parse datetime: "%s"', parsed_groups['datetime']) return if log_time < self._reset_time_utc_offset: return try: code = int(parsed_groups['statuscode']) except ValueError: logging.warn('Could not parse statuscode: "%s"', parsed_groups['statuscode']) return if code not in self._response_codes: self._response_codes[code] = 0 self._response_codes[code] += 1 self._has_delta = True def commit(self): """Write the supported metrics to cloud monitoring.""" if self._has_delta: logging.info('Writing updated counters to %s: %s', self._http_response_metric_name, str(self._response_codes)) for code, count in self._response_codes.iteritems(): if code not in self._response_code_metrics: self._response_code_metrics[code] = self._client.metric( type_=self._http_response_metric_name, labels={'response_code': str(code)}) self._client.write_point( self._response_code_metrics[code], self._resource, count, start_time=self._reset_time_utc) self._has_delta = False
[ "datetime.datetime.strptime", "datetime.timedelta", "logging.warn", "datetime.datetime.utcnow" ]
[((382, 407), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'offset'}), '(seconds=offset)\n', (391, 407), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((648, 660), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (657, 660), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((1262, 1279), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1277, 1279), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((1936, 1993), 'datetime.datetime.strptime', 'datetime.strptime', (['base', 'self.NGINX_BASE_TIMESTAMP_FORMAT'], {}), '(base, self.NGINX_BASE_TIMESTAMP_FORMAT)\n', (1953, 1993), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((2736, 2809), 'logging.warn', 'logging.warn', (['"""Could not parse datetime: "%s\\""""', "parsed_groups['datetime']"], {}), '(\'Could not parse datetime: "%s"\', parsed_groups[\'datetime\'])\n', (2748, 2809), False, 'import logging\n'), ((3028, 3105), 'logging.warn', 'logging.warn', (['"""Could not parse statuscode: "%s\\""""', "parsed_groups['statuscode']"], {}), '(\'Could not parse statuscode: "%s"\', parsed_groups[\'statuscode\'])\n', (3040, 3105), False, 'import logging\n')]
#! /usr/bin/env python3 """ %(prog)s takes a fault injection executable and executes it Usage: %(prog)s --CLI/--GUI <fault injection executable> <the same options that you use to run the excutable before> %(prog)s --help(-h): show help information Prerequisite: 0. You need to specify --CLI or --GUI depending on whether you're invoking it from the command line or from the GUI. 1. You need to be at the parent directory of the <fault injection executable> to invoke %(prog)s. This is to make it easier for LLFI to track the outputs generated by <fault injection executable> 2. (prog)s only checks recursively at the current directory for possible outputs, if your output is not under current directory, you need to store that output by yourself 3. You need to put your input files (if any) under current directory 4. You need to have 'input.yaml' under your current directory, which contains appropriate options for LLFI. """ # This script injects faults the program and produces output # This script should be run after the profiling step import sys, os, subprocess import yaml import time import random import shutil from subprocess import TimeoutExpired runOverride = False optionlist = [] defaultTimeout = 500 fi_max_multiple_default = 100 # basedir is assigned in parseArgs(args) basedir = "" prog = os.path.basename(sys.argv[0]) fi_exe = "" options = { "verbose": False, } def usage(msg = None): retval = 0 if msg is not None: retval = 1 msg = "ERROR: " + msg print(msg, file=sys.stderr) print(__doc__ % globals(), file=sys.stderr) sys.exit(retval) def parseArgs(args): global optionlist, fi_exe if args[0] == "--help" or args[0] == "-h": usage() fi_exe = os.path.realpath(args[0]) basedir = basedir = os.path.abspath(os.path.dirname(os.path.dirname(fi_exe))) optionlist = args[1:] # remove the directory prefix for input files, this is to make it easier for the program # to take a snapshot for index, opt in enumerate(optionlist): if os.path.isfile(opt): if os.path.realpath(os.path.dirname(opt)) != basedir: usage("File %s passed through option is not under current directory" % opt) else: optionlist[index] = os.path.basename(opt) if basedir != os.getcwd(): print("Change directory to:", basedir) os.chdir(basedir) def checkInputYaml(): global doc global defaultTimeout #Check for input.yaml's presence yamldir = os.path.dirname(os.path.dirname(fi_exe)) try: f = open(os.path.join(basedir, 'input.yaml'),'r') except: usage("No input.yaml file in the parent directory of fault injection executable") exit(1) #Check for input.yaml's correct formmating try: doc = yaml.load(f) except: f.close() usage("input.yaml is not formatted in proper YAML (reminder: use spaces, not tabs)") exit(1) finally: f.close() if "kernelOption" in doc: for opt in doc["kernelOption"]: if opt=="forceRun": runOverride = True print("Kernel: Forcing run") if "defaultTimeout" in doc: defaultTimeout = int(doc["defaultTimeout"]) assert defaultTimeout > 0, "The timeOut option must be greater than 0" else: print("Default timeout is set to " + str(defaultTimeout) + " by default.") def print_progressbar(idx, nruns): pct = (float(idx) / float(nruns)) WIDTH = 50 bar = "=" * int(pct * WIDTH) bar += ">" bar += "-" * (WIDTH - int(pct * WIDTH)) print(("\r[%s] %.1f%% (%d / %d)" % (bar, pct * 100, idx, nruns)), end='\n') sys.stdout.flush() ################################################################################ def config(): global inputdir, outputdir, errordir, stddir, llfi_stat_dir # config llfi_dir = os.path.dirname(fi_exe) inputdir = os.path.join(llfi_dir, "prog_input") outputdir = os.path.join(llfi_dir, "prog_output") errordir = os.path.join(llfi_dir, "error_output") stddir = os.path.join(llfi_dir, "std_output") llfi_stat_dir = os.path.join(llfi_dir, "llfi_stat_output") if not os.path.isdir(outputdir): os.mkdir(outputdir) if not os.path.isdir(errordir): os.mkdir(errordir) if not os.path.isdir(inputdir): os.mkdir(inputdir) if not os.path.isdir(stddir): os.mkdir(stddir) if not os.path.isdir(llfi_stat_dir): os.mkdir(llfi_stat_dir) ################################################################################ def execute( execlist, timeout): global outputfile global return_codes print(' '.join(execlist)) #get state of directory dirSnapshot() print(execlist) p = subprocess.Popen(execlist, stdout = subprocess.PIPE) outputFile = open(outputfile, "wb") program_timed_out = False start_time = 0 elapsetime = 0 #communicate() will block until program exits or until timeout is reached try: start_time = time.time() (p_stdout,p_stderr) = p.communicate(timeout=timeout) elapsetime = int(time.time() - start_time + 1) except TimeoutExpired: #Child process timed out p.kill() #Need to kill the process and then clean up commmunication (p_stdout,p_stderr) = p.communicate(timeout=timeout) program_timed_out = True moveOutput() if program_timed_out: print("\tParent : Child timed out. Cleaning up ... ") else: print("\t program finish", p.returncode) print("\t time taken", elapsetime,"\n") outputFile = open(outputfile, "wb") if program_timed_out: outputFile.write( bytes("\n\n ### Process killed by LLFI for timing out ###\n","UTF-8")) outputFile.write(p_stdout) if program_timed_out: outputFile.write( bytes("\n\n ### Process killed by LLFI for timing out ###\n","UTF-8")) outputFile.close() replenishInput() #for cases where program deletes input or alters them each run # Keep a dict of all return codes received. if program_timed_out: if "TO" in return_codes: return_codes["TO"] += 1 else: return_codes["TO"] = 1 else: if p.returncode in return_codes: return_codes[p.returncode] += 1 else: return_codes[p.returncode] = 1 if program_timed_out: return "timed-out" else: return str(p.returncode) ################################################################################ def storeInputFiles(): global inputList inputList=[] ##========Consider comma as separator of arguments ================================== temp_optionlist = [] for item in optionlist: if item.count(',') == 0: temp_optionlist.append(item) else: temp_optionlist.extend(item.split(',')) ##=================================================================================== for opt in temp_optionlist: if os.path.isfile(opt):#stores all files in inputList and copy over to inputdir shutil.copy2(opt, os.path.join(inputdir, opt)) inputList.append(opt) ################################################################################ def replenishInput():#TODO make condition to skip this if input is present for each in inputList: if not os.path.isfile(each):#copy deleted inputfiles back to basedir shutil.copy2(os.path.join(inputdir, each), each) ################################################################################ def moveOutput(): #move all newly created files newfiles = [_file for _file in os.listdir(".")] for each in newfiles: if each not in dirBefore: fileSize = os.stat(each).st_size if fileSize == 0 and each.startswith("llfi"): #empty library output, can delete print(each+ " is going to be deleted for having size of " + str(fileSize)) os.remove(each) else: flds = each.split(".") newName = '.'.join(flds[0:-1]) newName+='.'+run_id+'.'+flds[-1] if newName.startswith("llfi"): os.rename(each, os.path.join(llfi_stat_dir, newName)) else: os.rename(each, os.path.join(outputdir, newName)) ################################################################################ def dirSnapshot(): #snapshot of directory before each execute() is performed global dirBefore dirBefore = [_file for _file in os.listdir(".")] ################################################################################ def readCycles(): global totalcycles profinput= open("llfi.stat.prof.txt","r") while 1: line = profinput.readline() if line.strip(): if line[0] == 't': label, totalcycles = line.split("=") break profinput.close() ################################################################################ def checkValues(key, val, var1 = None,var2 = None,var3 = None,var4 = None): #preliminary input checking for fi options #also checks for fi_bit usage by non-kernel users #optional var# are used for fi_bit's case only if key =='run_number': assert isinstance(val, int)==True, key+" must be an integer in input.yaml" assert int(val)>0, key+" must be greater than 0 in input.yaml" elif key == 'fi_type': pass ##======== Add number of corrupted bits QINING @MAR 13th======== elif key == 'fi_num_bits': assert isinstance(val, int)==True, key+" must be an integer in input.yaml" assert int(val) >=1, key+" must be greater than or equal to 1 in input.yaml" ##============================================================== ##======== Add second corrupted regs QINING @MAR 27th=========== elif key == "window_len": assert isinstance(val, int)==True, key+" must be an integer in input.yaml" assert int(val) >=0, key+" must be greater than or equal to zero in input.yaml" ##================================================================== ##BEHROOZ: Add max number of target locations elif key == "fi_max_multiple": assert isinstance(val, int)==True, key+" must be an integer in input.yaml" assert int(val) >1, key+" must be greater than one in input.yaml" assert int(val) <=int(fi_max_multiple_default), key+" must be smaller than or equal to "+str(fi_max_multiple_default)+ " in input.yaml" ##============================================================== ##BEHROOZ: Add multiple corrupted regs elif key == "window_len_multiple": assert isinstance(val, int)==True, key+" must be an integer in input.yaml" assert int(val) >0, key+" must be greater than zero in input.yaml" elif key == "window_len_multiple_startindex": assert isinstance(val, int)==True, key+" must be an integer in input.yaml" assert int(val) >0, key+" must be greater than zero in input.yaml" elif key == "window_len_multiple_endindex": assert isinstance(val, int)==True, key+" must be an integer in input.yaml" assert int(val) >0, key+" must be greater than zero in input.yaml" ##============================================================== elif key == 'fi_cycle': assert isinstance(val, int)==True, key+" must be an integer in input.yaml" ##BEHROOZ: I changed the below line to the current one to fix the fi_cycle assert int(val) > 0, key+" must be greater than 0 in input.yaml" #assert int(val) >= 0, key+" must be greater than or equal to 0 in input.yaml" assert int(val) <= int(totalcycles), key +" must be less than or equal to "+totalcycles.strip()+" in input.yaml" elif key == 'fi_reg_index': assert isinstance(val, int)==True, key+" must be an integer in input.yaml" assert int(val) >= 0, key+" must be greater than or equal to 0 in input.yaml" elif key == 'fi_bit': assert isinstance(val, int)==True, key+" must be an integer in input.yaml" assert int(val) >= 0, key+" must be greater than or equal to 0 in input.yaml" if runOverride: pass elif var1 != None and var1 > 1 and (var2 or var3) and var4: user_input = input("\nWARNING: Injecting into the same cycle(index), bit multiple times "+ "is redundant as it would yield the same result."+ "\nTo turn off this warning, please see Readme "+ "for kernel mode.\nDo you wish to continue anyway? (Y/N)\n ") if user_input.upper() =="Y": pass else: exit(1) elif key == 'fi_random_seed': assert isinstance(val, int)==True, key+" must be an integer in input.yaml" assert int(val) >= 0, key+" must be greater than or equal to 0 in input.yaml" ################################################################################ def main(args): global optionlist, outputfile, totalcycles,run_id, return_codes global defaultTimeout fi_indexs=[] parseArgs(args) checkInputYaml() config() # get total num of cycles readCycles() storeInputFiles() #Set up each config file and its corresponding run_number try: rOpt = doc["runOption"] except: print("ERROR: Please include runOption in input.yaml.") exit(1) if not os.path.isfile(fi_exe): print("ERROR: The executable "+ fi_exe+" does not exist.") print("Please build the executables with create-executables.\n") exit(1) else: print("======Fault Injection======") for ii, run in enumerate(rOpt): # Maintain a dict of all return codes received and print summary at end return_codes = {} # Put an empty line between configs if ii > 0: print("") print("---FI Config #"+str(ii)+"---") if "numOfRuns" not in run["run"]: print("ERROR: Must include a run number per fi config in input.yaml.") exit(1) if "timeOut" in run["run"]: timeout = int(run["run"]["timeOut"]) assert timeout > 0, "The timeOut option must be greater than 0" else: timeout = defaultTimeout print("Run with default timeout " + str(timeout)) run_number=run["run"]["numOfRuns"] checkValues("run_number", run_number) # check for verbosity option, set at the FI run level if "verbose" in run["run"]: options["verbose"] = run["run"]["verbose"] # reset all configurations if 'fi_type' in locals(): del fi_type if 'loop_number' in locals(): del loop_number if 'loop_start' in locals(): del loop_start if 'fi_cycle' in locals(): del fi_cycle if 'fi_index' in locals(): del fi_index if 'fi_reg_index' in locals(): del fi_reg_index if 'fi_bit' in locals(): del fi_bit ##======== Add number of corrupted bits QINING @MAR 13th======== if 'fi_num_bits' in locals(): del fi_num_bits ##============================================================== ##======== Add second corrupted regs QINING @MAR 27th=========== if 'window_len' in locals(): del window_len ##============================================================== if 'fi_random_seed' in locals(): del fi_random_seed ##============================================================== ##BEHROOZ: Add max number of target locations if 'fi_max_multiple' in locals(): del fi_max_multiple ##============================================================== ##BEHROOZ: Add multiple corrupted regs if 'window_len_multiple' in locals(): del window_len_multiple if 'window_len_multiple_startindex' in locals(): del window_len_multiple_startindex if 'window_len_multiple_endindex' in locals(): del window_len_multiple_endindex ##============================================================== #write new fi config file according to input.yaml num_of_data=1 if "num_of_data" in run["run"]: num_of_data=run["run"]["num_of_data"] if "fi_type" in run["run"]: fi_type=run["run"]["fi_type"] if fi_type == "SoftwareFault" or fi_type == "AutoInjection" or fi_type == "Automated": try: cOpt = doc["compileOption"] injectorname = cOpt["instSelMethod"][0]["customInstselector"]["include"][0] except: print("\n\nERROR: Cannot extract fi_type from instSelMethod. Please check the customInstselector field in input.yaml\n") else: fi_type = injectorname checkValues("fi_type",fi_type) ##======== Add number of corrupted bits QINING @MAR 13th======== if "fi_num_bits" in run["run"]: fi_num_bits=run["run"]["fi_num_bits"] checkValues("fi_num_bits", fi_num_bits) ##============================================================== ##======== Add second corrupted regs QINING @MAR 27th=========== if 'window_len' in run["run"]: window_len=run["run"]["window_len"] checkValues("window_len", window_len) ##============================================================== ##BEHROOZ: Add max number of target locations if 'fi_max_multiple' in run["run"]: fi_max_multiple=run["run"]["fi_max_multiple"] checkValues("fi_max_multiple", fi_max_multiple) if ('fi_max_multiple' in locals()) and 'window_len' in locals(): print(("\nERROR: window_len and fi_max_multiple cannot be specified" " at the same time in the input.yaml file. Please choose one.")) exit(1) ##============================================================== ##BEHROOZ: Add multiple corrupted regs if 'window_len_multiple' in run["run"]: window_len_multiple=run["run"]["window_len_multiple"] checkValues("window_len_multiple", window_len_multiple) if ('window_len_multiple' in locals()): if ('window_len' in run["run"]): print(("\nERROR: window_len and window_len_multiple cannot be specified" " at the same time in the input.yaml file. Please choose one.")) exit(1) elif ('window_len_multiple_startindex' in run["run"]): print(("\nERROR: window_len_multiple_startindex and window_len_multiple cannot be specified" " at the same time in the input.yaml file. Please choose one.")) exit(1) elif ('window_len_multiple_endindex' in run["run"]): print(("\nERROR: window_len_multiple_endindex and window_len_multiple cannot be specified" " at the same time in the input.yaml file. Please choose one.")) exit(1) if 'window_len_multiple_startindex' in run["run"]: window_len_multiple_startindex=run["run"]["window_len_multiple_startindex"] checkValues("window_len_multiple_startindex", window_len_multiple_startindex) if ('window_len_multiple_startindex' in locals()): if ('window_len' in run["run"]): print(("\nERROR: window_len and window_len_multiple_startindex cannot be specified" " at the same time in the input.yaml file. Please choose one.")) exit(1) elif ('window_len_multiple' in run["run"]): print(("\nERROR: window_len_multiple_startindex and window_len_multiple cannot be specified" " at the same time in the input.yaml file. Please choose one.")) exit(1) elif ('window_len_multiple_endindex' not in run["run"]): print(("\nERROR: window_len_multiple_startindex should come with window_len_multiple_endindex." " Please specify both.")) exit(1) if 'window_len_multiple_endindex' in run["run"]: window_len_multiple_endindex=run["run"]["window_len_multiple_endindex"] checkValues("window_len_multiple_endindex", window_len_multiple_endindex) if ('window_len_multiple_endindex' in locals()): if('window_len' in run["run"]): print(("\nERROR: window_len and window_len_multiple_endindex cannot be specified" " at the same time in the input.yaml file. Please choose one.")) exit(1) elif('window_len_multiple' in run["run"]): print(("\nERROR: window_len_multiple_endindex and window_len_multiple cannot be specified" " at the same time in the input.yaml file. Please choose one.")) exit(1) elif('window_len_multiple_startindex' not in run["run"]): print(("\nERROR: window_len_multiple_endindex should come with window_len_multiple_startindex." " Please specify both.")) exit(1) ##============================================================== if "fi_cycle" in run["run"]: fi_cycle=run["run"]["fi_cycle"] checkValues("fi_cycle",fi_cycle) ##================UPDATE of LCFI================================ if "fi_index" in run["run"] and num_of_data == 1: fi_index=run["run"]["fi_index"][0] checkValues("fi_index",fi_index) if "loop_start" in run["run"]: loop_start=run["run"]["loop_start"] checkValues("loop_start",loop_start) if "loop_number" in run["run"]: loop_number=run["run"]["loop_number"] checkValues("loop_start",loop_number) if num_of_data >= 1: fi_index=run["run"]["fi_index"][0] for i in range(num_of_data): fi_indexs.append(run["run"]["fi_index"][i]) ##================UPDATE of LCFI================================ if "fi_reg_index" in run["run"]: fi_reg_index=run["run"]["fi_reg_index"] checkValues("fi_reg_index",fi_reg_index) if "fi_bit" in run["run"]: fi_bit=run["run"]["fi_bit"] checkValues("fi_bit",fi_bit,run_number,fi_cycle,fi_index,fi_reg_index) if "fi_random_seed" in run["run"]: fi_random_seed=run["run"]["fi_random_seed"] checkValues("fi_random_seed",fi_random_seed) if ('fi_cycle' not in locals()) and 'fi_index' in locals(): print(("\nINFO: You choose to inject faults based on LLFI index, " "this will inject into every runtime instruction whose LLFI " "index is %d\n" % fi_index)) ##BEHROOZ: if ('window_len_multiple' in locals() or 'window_len_multiple_startindex' in locals() or 'window_len_multiple_endindex' in locals()): if('fi_max_multiple' not in locals()): print(("\nINFO: You choose a window length for multiple bit-flip injection, " "however you have not specified the maximum number of locations." " Thus, the maximum number of locations will be chosen as " +str(fi_max_multiple_default)+ ".\n")) fi_max_multiple = int(fi_max_multiple_default) if ('window_len_multiple' not in locals() and 'window_len_multiple_startindex' not in locals()) and 'fi_max_multiple' in locals(): print(("\nINFO: You choose the maximum number of multiple bit injection, " "however you have not specified the window length for multiple bit-flip injection." " Thus, the window size will be chosen equal to the total number of cycles-1= " + str(int(totalcycles)-1)+ ".\n")) window_len_multiple = int(totalcycles) - 1 ##====================================================== need_to_calc_fi_cycle = True if ('fi_cycle' in locals()) or 'fi_index' in locals(): need_to_calc_fi_cycle = False # fault injection for index in range(0, run_number): run_id = str(ii)+"-"+str(index) outputfile = stddir + "/std_outputfile-" + "run-"+run_id errorfile = errordir + "/errorfile-" + "run-"+run_id execlist = [fi_exe] if('fi_cycle' not in locals() and 'fi_random_seed' in locals()): random.seed(fi_random_seed) if need_to_calc_fi_cycle: ##BEHROOZ: I changed the below line to the current one to fix the fi_cycle fi_cycle = random.randint(1, int(totalcycles)) ##fi_cycle = random.randint(0, int(totalcycles) - 1) ficonfig_File = open("llfi.config.runtime.txt", 'w') if 'fi_cycle' in locals(): ficonfig_File.write("fi_cycle="+str(fi_cycle)+'\n') ##================UPDATE of LCFI================================ #elif 'fi_index' in locals() and num_of_data == 1: # ficonfig_File.write("fi_index="+str(fi_index)+'\n') elif 'fi_index' in locals(): #print(num_of_data) for i in range(num_of_data): ficonfig_File.write("fi_index="+str(fi_indexs[i])+'\n') if 'loop_start' in locals(): ficonfig_File.write("loop_start="+str(loop_start)+'\n') if 'loop_number' in locals(): ficonfig_File.write("loop_number="+str(loop_number)+'\n') ##================UPDATE of LCFI================================ if 'fi_type' in locals(): ficonfig_File.write("fi_type="+fi_type+'\n') if 'fi_reg_index' in locals(): ficonfig_File.write("fi_reg_index="+str(fi_reg_index)+'\n') if 'fi_bit' in locals(): ficonfig_File.write("fi_bit="+str(fi_bit)+'\n') ##======== Add number of corrupted bits QINING @MAR 13th======== if 'fi_num_bits' in locals(): ficonfig_File.write("fi_num_bits="+str(fi_num_bits)+'\n') ##============================================================== ##======== Add second corrupted regs QINING @MAR 27th=========== if 'window_len' in locals(): ##BEHROOZ: I changed the below line to the current one to fix the fi_cycle fi_second_cycle = min(fi_cycle + random.randint(1, int(window_len)), int(totalcycles)) #fi_second_cycle = min(fi_cycle + random.randint(1, int(window_len)), int(totalcycles) - 1) ficonfig_File.write("fi_second_cycle="+str(fi_second_cycle)+'\n') ##================================================================== ##BEHROOZ: Add max number of target locations if ('fi_max_multiple' in locals()): win_start_index = 1 win_end_index = 1 if('window_len_multiple' in locals()): win_end_index = int(window_len_multiple) elif('window_len_multiple_startindex' in locals() and 'window_len_multiple_endindex' in locals()): win_start_index = window_len_multiple_startindex win_end_index = window_len_multiple_endindex if(win_start_index > win_end_index): print(("\nERROR: In the yaml file, the window_len_multiple_startindex cannot be bigger than window_len_multiple_endindex!")) exit(1) #The line below has been substituted with the one below it. This way the maximum number injection is not selected randomly and is #equal to the value specified by the user ##selected_num_of_injection = random.randint(1, int(fi_max_multiple)) ficonfig_File.write("fi_max_multiple="+str(fi_max_multiple)+'\n') selected_num_of_injection = fi_max_multiple ##======The -1 here is because we have already selected the first location by choosing the fi-cycle ##===== and here we are looking for the remaining cycles.================= fi_next_cycle = fi_cycle for index_multiple in range(1, int(selected_num_of_injection)): fi_next_cycle = min(fi_next_cycle + random.randint(win_start_index, win_end_index), int(totalcycles)) ficonfig_File.write("fi_next_cycle="+str(fi_next_cycle)+'\n') if fi_next_cycle == int(totalcycles): break ##================================================================== ficonfig_File.close() # print run index before executing. Comma removes newline for prettier # formatting execlist.extend(optionlist) ret = execute(execlist, timeout) if ret == "timed-out": error_File = open(errorfile, 'w') error_File.write("Program hang\n") error_File.close() elif int(ret) < 0: error_File = open(errorfile, 'w') error_File.write("Program crashed, terminated by the system, return code " + ret + '\n') error_File.close() elif int(ret) > 0: error_File = open(errorfile, 'w') error_File.write("Program crashed, terminated by itself, return code " + ret + '\n') error_File.close() # Print updates, print the number of injections finished print_progressbar(index+1, run_number) #print_progressbar(run_number, run_number) print("") # progress bar needs a newline after 100% reached # Print summary if options["verbose"]: print("========== SUMMARY ==========") print("Return codes: (code:\toccurance)") for r in list(return_codes.keys()): print((" %3s: %5d" % (str(r), return_codes[r]))) ################################################################################ if __name__=="__main__": if len(sys.argv) == 1: usage('Must provide the fault injection executable and its options') exit(1) main(sys.argv[1:])
[ "yaml.load", "sys.exit", "os.remove", "os.listdir", "subprocess.Popen", "os.path.isdir", "os.mkdir", "sys.stdout.flush", "random.randint", "os.path.isfile", "os.path.dirname", "time.time", "os.path.join", "random.seed", "os.getcwd", "os.path.realpath", "os.chdir", "os.path.basename...
[((1322, 1351), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (1338, 1351), False, 'import sys, os, subprocess\n'), ((1579, 1595), 'sys.exit', 'sys.exit', (['retval'], {}), '(retval)\n', (1587, 1595), False, 'import sys, os, subprocess\n'), ((1715, 1740), 'os.path.realpath', 'os.path.realpath', (['args[0]'], {}), '(args[0])\n', (1731, 1740), False, 'import sys, os, subprocess\n'), ((3524, 3542), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (3540, 3542), False, 'import sys, os, subprocess\n'), ((3726, 3749), 'os.path.dirname', 'os.path.dirname', (['fi_exe'], {}), '(fi_exe)\n', (3741, 3749), False, 'import sys, os, subprocess\n'), ((3763, 3799), 'os.path.join', 'os.path.join', (['llfi_dir', '"""prog_input"""'], {}), "(llfi_dir, 'prog_input')\n", (3775, 3799), False, 'import sys, os, subprocess\n'), ((3814, 3851), 'os.path.join', 'os.path.join', (['llfi_dir', '"""prog_output"""'], {}), "(llfi_dir, 'prog_output')\n", (3826, 3851), False, 'import sys, os, subprocess\n'), ((3865, 3903), 'os.path.join', 'os.path.join', (['llfi_dir', '"""error_output"""'], {}), "(llfi_dir, 'error_output')\n", (3877, 3903), False, 'import sys, os, subprocess\n'), ((3915, 3951), 'os.path.join', 'os.path.join', (['llfi_dir', '"""std_output"""'], {}), "(llfi_dir, 'std_output')\n", (3927, 3951), False, 'import sys, os, subprocess\n'), ((3970, 4012), 'os.path.join', 'os.path.join', (['llfi_dir', '"""llfi_stat_output"""'], {}), "(llfi_dir, 'llfi_stat_output')\n", (3982, 4012), False, 'import sys, os, subprocess\n'), ((4559, 4609), 'subprocess.Popen', 'subprocess.Popen', (['execlist'], {'stdout': 'subprocess.PIPE'}), '(execlist, stdout=subprocess.PIPE)\n', (4575, 4609), False, 'import sys, os, subprocess\n'), ((2010, 2029), 'os.path.isfile', 'os.path.isfile', (['opt'], {}), '(opt)\n', (2024, 2029), False, 'import sys, os, subprocess\n'), ((2254, 2265), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2263, 2265), False, 'import sys, os, subprocess\n'), ((2314, 2331), 'os.chdir', 'os.chdir', (['basedir'], {}), '(basedir)\n', (2322, 2331), False, 'import sys, os, subprocess\n'), ((2456, 2479), 'os.path.dirname', 'os.path.dirname', (['fi_exe'], {}), '(fi_exe)\n', (2471, 2479), False, 'import sys, os, subprocess\n'), ((2713, 2725), 'yaml.load', 'yaml.load', (['f'], {}), '(f)\n', (2722, 2725), False, 'import yaml\n'), ((4023, 4047), 'os.path.isdir', 'os.path.isdir', (['outputdir'], {}), '(outputdir)\n', (4036, 4047), False, 'import sys, os, subprocess\n'), ((4053, 4072), 'os.mkdir', 'os.mkdir', (['outputdir'], {}), '(outputdir)\n', (4061, 4072), False, 'import sys, os, subprocess\n'), ((4082, 4105), 'os.path.isdir', 'os.path.isdir', (['errordir'], {}), '(errordir)\n', (4095, 4105), False, 'import sys, os, subprocess\n'), ((4111, 4129), 'os.mkdir', 'os.mkdir', (['errordir'], {}), '(errordir)\n', (4119, 4129), False, 'import sys, os, subprocess\n'), ((4139, 4162), 'os.path.isdir', 'os.path.isdir', (['inputdir'], {}), '(inputdir)\n', (4152, 4162), False, 'import sys, os, subprocess\n'), ((4168, 4186), 'os.mkdir', 'os.mkdir', (['inputdir'], {}), '(inputdir)\n', (4176, 4186), False, 'import sys, os, subprocess\n'), ((4196, 4217), 'os.path.isdir', 'os.path.isdir', (['stddir'], {}), '(stddir)\n', (4209, 4217), False, 'import sys, os, subprocess\n'), ((4223, 4239), 'os.mkdir', 'os.mkdir', (['stddir'], {}), '(stddir)\n', (4231, 4239), False, 'import sys, os, subprocess\n'), ((4249, 4277), 'os.path.isdir', 'os.path.isdir', (['llfi_stat_dir'], {}), '(llfi_stat_dir)\n', (4262, 4277), False, 'import sys, os, subprocess\n'), ((4283, 4306), 'os.mkdir', 'os.mkdir', (['llfi_stat_dir'], {}), '(llfi_stat_dir)\n', (4291, 4306), False, 'import sys, os, subprocess\n'), ((4813, 4824), 'time.time', 'time.time', ([], {}), '()\n', (4822, 4824), False, 'import time\n'), ((6659, 6678), 'os.path.isfile', 'os.path.isfile', (['opt'], {}), '(opt)\n', (6673, 6678), False, 'import sys, os, subprocess\n'), ((12778, 12800), 'os.path.isfile', 'os.path.isfile', (['fi_exe'], {}), '(fi_exe)\n', (12792, 12800), False, 'import sys, os, subprocess\n'), ((1795, 1818), 'os.path.dirname', 'os.path.dirname', (['fi_exe'], {}), '(fi_exe)\n', (1810, 1818), False, 'import sys, os, subprocess\n'), ((2501, 2536), 'os.path.join', 'os.path.join', (['basedir', '"""input.yaml"""'], {}), "(basedir, 'input.yaml')\n", (2513, 2536), False, 'import sys, os, subprocess\n'), ((7010, 7030), 'os.path.isfile', 'os.path.isfile', (['each'], {}), '(each)\n', (7024, 7030), False, 'import sys, os, subprocess\n'), ((7292, 7307), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (7302, 7307), False, 'import sys, os, subprocess\n'), ((8117, 8132), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (8127, 8132), False, 'import sys, os, subprocess\n'), ((2215, 2236), 'os.path.basename', 'os.path.basename', (['opt'], {}), '(opt)\n', (2231, 2236), False, 'import sys, os, subprocess\n'), ((6760, 6787), 'os.path.join', 'os.path.join', (['inputdir', 'opt'], {}), '(inputdir, opt)\n', (6772, 6787), False, 'import sys, os, subprocess\n'), ((7091, 7119), 'os.path.join', 'os.path.join', (['inputdir', 'each'], {}), '(inputdir, each)\n', (7103, 7119), False, 'import sys, os, subprocess\n'), ((7380, 7393), 'os.stat', 'os.stat', (['each'], {}), '(each)\n', (7387, 7393), False, 'import sys, os, subprocess\n'), ((7587, 7602), 'os.remove', 'os.remove', (['each'], {}), '(each)\n', (7596, 7602), False, 'import sys, os, subprocess\n'), ((2057, 2077), 'os.path.dirname', 'os.path.dirname', (['opt'], {}), '(opt)\n', (2072, 2077), False, 'import sys, os, subprocess\n'), ((4903, 4914), 'time.time', 'time.time', ([], {}), '()\n', (4912, 4914), False, 'import time\n'), ((23443, 23470), 'random.seed', 'random.seed', (['fi_random_seed'], {}), '(fi_random_seed)\n', (23454, 23470), False, 'import random\n'), ((7791, 7827), 'os.path.join', 'os.path.join', (['llfi_stat_dir', 'newName'], {}), '(llfi_stat_dir, newName)\n', (7803, 7827), False, 'import sys, os, subprocess\n'), ((7869, 7901), 'os.path.join', 'os.path.join', (['outputdir', 'newName'], {}), '(outputdir, newName)\n', (7881, 7901), False, 'import sys, os, subprocess\n'), ((27100, 27146), 'random.randint', 'random.randint', (['win_start_index', 'win_end_index'], {}), '(win_start_index, win_end_index)\n', (27114, 27146), False, 'import random\n')]
import os from pathlib import Path import pytest from jubox import JupyterNotebook, CodeCell from nbformat.notebooknode import NotebookNode def test_creation_from_file(notebook_file_simple): #file = f"{notebook_folder}/nb_simple.ipynb" file = notebook_file_simple nb = JupyterNotebook(file) assert nb.file == file assert isinstance(nb.node, NotebookNode) assert nb.node.cells[0].source == "# This is simple Jupyter Notebook" def test_creation_from_pathlib_file(notebook_file_simple): file = Path(notebook_file_simple) nb = JupyterNotebook(file) assert nb.file == file assert isinstance(nb.node, NotebookNode) assert nb.node.cells[0].source == "# This is simple Jupyter Notebook" def test_creation_empty(): nb = JupyterNotebook() assert isinstance(nb.node, NotebookNode) def test_creation_list_of_cells(): nb = JupyterNotebook([ CodeCell("1 cell"), CodeCell("2 cell"), CodeCell("3 cell"), ]) assert isinstance(nb.node, NotebookNode) assert len(nb.node.cells) == 3 assert isinstance(nb.node.cells[0], NotebookNode) assert "1 cell" == nb.node.cells[0]["source"] assert "2 cell" == nb.node.cells[1]["source"] assert "3 cell" == nb.node.cells[2]["source"] def test_creation_list_of_empty_cells(): nb = JupyterNotebook([ CodeCell(), CodeCell(), CodeCell(), ]) assert isinstance(nb.node, NotebookNode) assert len(nb.node.cells) == 3 assert isinstance(nb.node.cells[0], NotebookNode) def test_creation_another_notebook(): nb_orig = JupyterNotebook([ CodeCell("1 cell"), CodeCell("2 cell"), CodeCell("3 cell"), ]) nb = JupyterNotebook(nb_orig) assert isinstance(nb.node, NotebookNode) assert len(nb.node.cells) == 3 assert isinstance(nb.node.cells[0], NotebookNode) assert nb.node is nb_orig.node
[ "jubox.JupyterNotebook", "jubox.CodeCell", "pathlib.Path" ]
[((284, 305), 'jubox.JupyterNotebook', 'JupyterNotebook', (['file'], {}), '(file)\n', (299, 305), False, 'from jubox import JupyterNotebook, CodeCell\n'), ((523, 549), 'pathlib.Path', 'Path', (['notebook_file_simple'], {}), '(notebook_file_simple)\n', (527, 549), False, 'from pathlib import Path\n'), ((559, 580), 'jubox.JupyterNotebook', 'JupyterNotebook', (['file'], {}), '(file)\n', (574, 580), False, 'from jubox import JupyterNotebook, CodeCell\n'), ((764, 781), 'jubox.JupyterNotebook', 'JupyterNotebook', ([], {}), '()\n', (779, 781), False, 'from jubox import JupyterNotebook, CodeCell\n'), ((1708, 1732), 'jubox.JupyterNotebook', 'JupyterNotebook', (['nb_orig'], {}), '(nb_orig)\n', (1723, 1732), False, 'from jubox import JupyterNotebook, CodeCell\n'), ((898, 916), 'jubox.CodeCell', 'CodeCell', (['"""1 cell"""'], {}), "('1 cell')\n", (906, 916), False, 'from jubox import JupyterNotebook, CodeCell\n'), ((926, 944), 'jubox.CodeCell', 'CodeCell', (['"""2 cell"""'], {}), "('2 cell')\n", (934, 944), False, 'from jubox import JupyterNotebook, CodeCell\n'), ((954, 972), 'jubox.CodeCell', 'CodeCell', (['"""3 cell"""'], {}), "('3 cell')\n", (962, 972), False, 'from jubox import JupyterNotebook, CodeCell\n'), ((1343, 1353), 'jubox.CodeCell', 'CodeCell', ([], {}), '()\n', (1351, 1353), False, 'from jubox import JupyterNotebook, CodeCell\n'), ((1363, 1373), 'jubox.CodeCell', 'CodeCell', ([], {}), '()\n', (1371, 1373), False, 'from jubox import JupyterNotebook, CodeCell\n'), ((1383, 1393), 'jubox.CodeCell', 'CodeCell', ([], {}), '()\n', (1391, 1393), False, 'from jubox import JupyterNotebook, CodeCell\n'), ((1615, 1633), 'jubox.CodeCell', 'CodeCell', (['"""1 cell"""'], {}), "('1 cell')\n", (1623, 1633), False, 'from jubox import JupyterNotebook, CodeCell\n'), ((1643, 1661), 'jubox.CodeCell', 'CodeCell', (['"""2 cell"""'], {}), "('2 cell')\n", (1651, 1661), False, 'from jubox import JupyterNotebook, CodeCell\n'), ((1671, 1689), 'jubox.CodeCell', 'CodeCell', (['"""3 cell"""'], {}), "('3 cell')\n", (1679, 1689), False, 'from jubox import JupyterNotebook, CodeCell\n')]
import spot from crome_logic.specification.temporal import LTL from crome_logic.tools.crome_io import save_to_file from crome_logic.typelement.robotic import BooleanAction, BooleanSensor from crome_logic.typeset import Typeset from crome_synthesis.controller import Controller from crome_synthesis.controller.synthesis import generate_controller def example() -> None: a = LTL( _init_formula="G(F(sens))", _typeset=Typeset({BooleanSensor(name="sens")}), ) g = LTL( _init_formula="G(sens -> act)", _typeset=Typeset({BooleanAction(name="act")}), ) controller = Controller(assumptions=a, guarantees=g) controller.save("dot") controller.save("lbtt") def example_1() -> None: a1: str = "G(F(a1))" g1: str = "G(a1 <-> (b1 | c1))" i1: str = "a1" o1: str = "b1, c1" realizable1, controller1, time1 = generate_controller(a1, g1, i1, o1) print(f"\n\n{controller1}") file_path = save_to_file(controller1, "controller_1") automaton = spot.automaton(file_path) dotfile = automaton.to_str(format="dot") print(f"\n\n{dotfile}") lbtt = automaton.to_str(format="lbtt") print(f"\n\n{lbtt}") sens: str = "G(F(sens))" g2: str = "G(sens -> act)" i2: str = "sens" o2: str = "act" realizable2, controller2, time2 = generate_controller(sens, g2, i2, o2) print(f"\n\n{controller2}") if __name__ == "__main__": example()
[ "crome_synthesis.controller.synthesis.generate_controller", "crome_logic.typelement.robotic.BooleanSensor", "crome_synthesis.controller.Controller", "crome_logic.tools.crome_io.save_to_file", "crome_logic.typelement.robotic.BooleanAction", "spot.automaton" ]
[((614, 653), 'crome_synthesis.controller.Controller', 'Controller', ([], {'assumptions': 'a', 'guarantees': 'g'}), '(assumptions=a, guarantees=g)\n', (624, 653), False, 'from crome_synthesis.controller import Controller\n'), ((878, 913), 'crome_synthesis.controller.synthesis.generate_controller', 'generate_controller', (['a1', 'g1', 'i1', 'o1'], {}), '(a1, g1, i1, o1)\n', (897, 913), False, 'from crome_synthesis.controller.synthesis import generate_controller\n'), ((963, 1004), 'crome_logic.tools.crome_io.save_to_file', 'save_to_file', (['controller1', '"""controller_1"""'], {}), "(controller1, 'controller_1')\n", (975, 1004), False, 'from crome_logic.tools.crome_io import save_to_file\n'), ((1022, 1047), 'spot.automaton', 'spot.automaton', (['file_path'], {}), '(file_path)\n', (1036, 1047), False, 'import spot\n'), ((1331, 1368), 'crome_synthesis.controller.synthesis.generate_controller', 'generate_controller', (['sens', 'g2', 'i2', 'o2'], {}), '(sens, g2, i2, o2)\n', (1350, 1368), False, 'from crome_synthesis.controller.synthesis import generate_controller\n'), ((447, 473), 'crome_logic.typelement.robotic.BooleanSensor', 'BooleanSensor', ([], {'name': '"""sens"""'}), "(name='sens')\n", (460, 473), False, 'from crome_logic.typelement.robotic import BooleanAction, BooleanSensor\n'), ((562, 587), 'crome_logic.typelement.robotic.BooleanAction', 'BooleanAction', ([], {'name': '"""act"""'}), "(name='act')\n", (575, 587), False, 'from crome_logic.typelement.robotic import BooleanAction, BooleanSensor\n')]
from . import db from werkzeug.security import generate_password_hash,check_password_hash from datetime import datetime import time from flask_login import UserMixin from . import login_manager @login_manager.user_loader def load_user(user_id): return User.query.get(int(user_id)) # @login_manager.user_loader # def load_user(seller_id): # return Seller.query.get(int(seller_id)) class User(db.Model,UserMixin): __tablename__='users' id=db.Column(db.Integer, primary_key=True) username=db.Column(db.String) email=db.Column(db.String) pass_secure = db.Column(db.String(255)) phone=db.Column(db.String) profile_picture_path=db.Column(db.String) orders =db.relationship("Orders", backref="users", lazy="dynamic") cart =db.relationship("Cart", backref="users", lazy="dynamic") @property def password(self): raise AttributeError('You cannot read the password attribute') @password.setter def password(self,password): self.pass_secure = generate_password_hash(password) def verify_password(self,password): return check_password_hash(self.pass_secure,password) def __repr__(self): return f'User{self.username}' class Seller(db.Model,UserMixin): __tablename__='sellers' id=db.Column(db.Integer, primary_key=True) username=db.Column(db.String) bio=db.Column(db.String) profile_pic_path=db.Column(db.String) email=db.Column(db.String) pass_secure=db.Column(db.String) phone=db.Column(db.String) products=db.relationship("Product",backref="sellers", lazy="dynamic") orders =db.relationship("Orders", backref="sellers", lazy="dynamic") @property def password(self): raise AttributeError('You cannot read the password attribute') @password.setter def password(self,password): self.pass_secure = generate_password_hash(password) def verify_password(self,password): return check_password_hash(self.pass_secure,password) def __repr__(self): return f'Seller{self.username}' class Product(db.Model): __tablename__="products" id=db.Column(db.Integer, primary_key=True) product_name=db.Column(db.String) product_picture=db.Column(db.String) description=db.Column(db.String) seller_id=db.Column(db.Integer, db.ForeignKey('sellers.id',ondelete='SET NULL'),nullable = True) orders=db.relationship("Orders", backref="products", lazy="dynamic") cart=db.relationship("Cart", backref="products", lazy="dynamic") def save_new_product(self): db.session.add(self) db.session.commit() def __repr__(self): return f'Product{self.product}' class Orders(db.Model): __tablename__="orders" id=db.Column(db.Integer, primary_key=True) pizza_name=db.Column(db.String) pizza_size=db.Column(db.String) price=db.Column(db.Integer) isAccepted=db.Column(db.Boolean,default=False, server_default="false") product_id=db.Column(db.Integer, db.ForeignKey('products.id',ondelete='SET NULL'),nullable = True) time=db.Column(db.DateTime(),default=datetime.utcnow) user_id=db.Column(db.Integer, db.ForeignKey("users.id",ondelete='SET NULL'),nullable = True) seller_id=db.Column(db.Integer, db.ForeignKey('sellers.id',ondelete='SET NULL'),nullable = True) def add_order(self): db.session.add(self) db.session.commit() def __repr__(self): return f'Orders{self.pizza_name}' class Cart(db.Model): __tablename__="cart" id=db.Column(db.Integer, primary_key=True) product=db.Column(db.String) product_picture=db.Column(db.String) product_price = db.Column(db.Integer) amount = db.Column(db.Integer,default=0) size=db.Column(db.String) product_cost = db.Column(db.Integer,default=0) product_id=db.Column(db.Integer, db.ForeignKey('products.id',ondelete='SET NULL'),nullable = True) user_id=db.Column(db.Integer, db.ForeignKey("users.id",ondelete='SET NULL'),nullable = True) def add_item_to_cart(self): db.session.add(self) db.session.commit() def __repr__(self): return f'Cart{self.product}'
[ "werkzeug.security.generate_password_hash", "werkzeug.security.check_password_hash" ]
[((1023, 1055), 'werkzeug.security.generate_password_hash', 'generate_password_hash', (['password'], {}), '(password)\n', (1045, 1055), False, 'from werkzeug.security import generate_password_hash, check_password_hash\n'), ((1120, 1167), 'werkzeug.security.check_password_hash', 'check_password_hash', (['self.pass_secure', 'password'], {}), '(self.pass_secure, password)\n', (1139, 1167), False, 'from werkzeug.security import generate_password_hash, check_password_hash\n'), ((1893, 1925), 'werkzeug.security.generate_password_hash', 'generate_password_hash', (['password'], {}), '(password)\n', (1915, 1925), False, 'from werkzeug.security import generate_password_hash, check_password_hash\n'), ((1990, 2037), 'werkzeug.security.check_password_hash', 'check_password_hash', (['self.pass_secure', 'password'], {}), '(self.pass_secure, password)\n', (2009, 2037), False, 'from werkzeug.security import generate_password_hash, check_password_hash\n')]
import logging from django.db import connection from prometheus_client import Gauge from zentral.utils.prometheus import BasePrometheusMetricsView from .models import Status logger = logging.getLogger("zentral.core.compliance_checks.metrics_views") class MetricsView(BasePrometheusMetricsView): def add_compliance_checks_gauge(self): g = Gauge('zentral_compliance_checks', 'Zentral compliance checks', ['model'], registry=self.registry) query = ( "select model, count(*) " "from compliance_checks_compliancecheck " "group by model" ) with connection.cursor() as cursor: cursor.execute(query) for model, count in cursor.fetchall(): g.labels(model=model).set(count) def add_machine_statuses_gauge(self): g = Gauge('zentral_compliance_checks_statuses_bucket', 'Zentral compliance checks statuses', ['model', 'name', 'status', 'le'], registry=self.registry) query = ( "with machine_statuses as (" " select compliance_check_id, compliance_check_version, status," " date_part('days', now() - status_time) as age" " from compliance_checks_machinestatus" ") select " "cc.model, cc.name, ms.status," 'count(*) filter (where ms.age < 1) as "1",' 'count(*) filter (where ms.age < 7) as "7",' 'count(*) filter (where ms.age < 14) as "14",' 'count(*) filter (where ms.age < 30) as "30",' 'count(*) filter (where ms.age < 45) as "45",' 'count(*) filter (where ms.age < 90) as "90",' 'count(*) as "+Inf" ' "from compliance_checks_compliancecheck as cc " "join machine_statuses as ms on " "(ms.compliance_check_id = cc.id and ms.compliance_check_version = cc.version) " "group by cc.model, cc.name, ms.status" ) with connection.cursor() as cursor: cursor.execute(query) columns = [c.name for c in cursor.description] for row in cursor.fetchall(): row_d = dict(zip(columns, row)) for le in ("1", "7", "14", "30", "45", "90", "+Inf"): g.labels( model=row_d["model"], name=row_d["name"], status=Status(row_d["status"]).name, le=le ).set( row_d[le] ) def populate_registry(self): self.add_compliance_checks_gauge() self.add_machine_statuses_gauge()
[ "logging.getLogger", "django.db.connection.cursor", "prometheus_client.Gauge" ]
[((185, 250), 'logging.getLogger', 'logging.getLogger', (['"""zentral.core.compliance_checks.metrics_views"""'], {}), "('zentral.core.compliance_checks.metrics_views')\n", (202, 250), False, 'import logging\n'), ((354, 456), 'prometheus_client.Gauge', 'Gauge', (['"""zentral_compliance_checks"""', '"""Zentral compliance checks"""', "['model']"], {'registry': 'self.registry'}), "('zentral_compliance_checks', 'Zentral compliance checks', ['model'],\n registry=self.registry)\n", (359, 456), False, 'from prometheus_client import Gauge\n'), ((853, 1008), 'prometheus_client.Gauge', 'Gauge', (['"""zentral_compliance_checks_statuses_bucket"""', '"""Zentral compliance checks statuses"""', "['model', 'name', 'status', 'le']"], {'registry': 'self.registry'}), "('zentral_compliance_checks_statuses_bucket',\n 'Zentral compliance checks statuses', ['model', 'name', 'status', 'le'],\n registry=self.registry)\n", (858, 1008), False, 'from prometheus_client import Gauge\n'), ((633, 652), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (650, 652), False, 'from django.db import connection\n'), ((2015, 2034), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (2032, 2034), False, 'from django.db import connection\n')]
import click import ast import requests import pandas as pd import numpy as np from pysradb.sraweb import SRAweb import requests import pysam import re import pyfastx import pkg_resources # part of setuptools import json import warnings import sys import os from .run_workflow import make_snakes # Allows passing strings to CLI and eval as python objects # From https://stackoverflow.com/questions/47631914/how-to-pass-several-list-of-arguments-to-click-option # Had to add the "str(value)" part or default would throw ValueErrors. class PythonLiteralOption(click.Option): def type_cast_value(self, ctx, value): try: return ast.literal_eval(str(value)) except ValueError: raise click.BadParameter(value) # Constants AVAILABLE_MODES = [ "DRIP", "DRIPc", "qDRIP", "sDRIP", "ssDRIP", "R-ChIP", "RR-ChIP", "RDIP", "S1-DRIP", "DRIVE", "RNH-CnR", "MapR", "RNA-Seq" ] SRA_URL = "https://www.ncbi.nlm.nih.gov/sra/" SRA_COLS = [ "experiment", "study_accession", "experiment_title", "experiment_accession", "organism_taxid ", "run_accession", "library_layout", "run_total_bases", "run_total_spots", ] redict = { "fastq": "^.+\\.[fastq]+[\\.gz]*$|^.+\\.[fastq]+[\\.gz]*\\~.+\\.[fastq]+[\\.gz]*$", "bam": "^.+\\.bam$", "public": "^GSM[0-9]+$|^SRX[0-9]+$", } # __file__=os.path.abspath("../RLPipes/rlpipes/cli.py") this_dir = os.path.dirname(__file__) DATA_PATH = os.path.abspath( os.path.join(this_dir, "src", "data", "available_genomes.tsv.xz") ) GENSIZE_PATH = os.path.abspath( os.path.join(this_dir, "src", "data", "eff_gen_size.tsv.xz") ) SRC_DIR = os.path.abspath(os.path.join(this_dir, "src")) N_BAM_READS_CHECK = 1000 # Set verion __version__ = pkg_resources.require("rlpipes")[0].version # Help text snakeHelp = """ Dict of arguments passed to the snakemake python API. Default: "{'use_conda': True}". Read the snakemake API reference for the full list of options. """ modeHelp = """ The type of sequencing (e.g., "DRIP"). The available options are currently: DRIP, DRIPc, qDRIP, sDRIP, ssDRIP, R-ChIP, RR-ChIP, RDIP, S1-DRIP, DRIVE, RNH-CnR, and MapR """ groupHelp = """ Column(s) which identify biologically-meaningful grouping(s) of samples (i.e., conditions). Can be any column name from the `samples` CSV file. If using public data accessions, it may also include "study". NOTE: If --groupby is set and there R-loop-mapping and expression samples within groups, expression-matched analysis will be run. This can be disabled with the --noexp flag.\n Example #1: "RSeqCLI build outdir/ samples.csv --groupcols tissue"\n samples.csv:\n experiment, mode, tissue\n \tGSM1720615, DRIP, NT2\n \tGSM1720616, DRIP, NT2\n \tGSM1720619, DRIP, K562\n \n Example #2: "RSeqCLI build outdir/ samples.csv --groupby tissue"\n samples.csv:\n experiment, mode, tissue\n \tGSM1720615, DRIP, NT2\n \tGSM1720616, DRIP, NT2\n \tGSM1720613, DRIPc, NT2\n \tGSM1720614, DRIPc, NT2\n \tGSM1720622, RNA-seq, NT2\n \tGSM1720623, RNA-seq, NT2\n \n """ expHelp = """ If set, no expression-matched analysis will be performed. """ # Get the shared options # From https://stackoverflow.com/questions/40182157/shared-options-and-flags-between-commands verify_run_options = [ click.option( "--smargs", "-s", cls=PythonLiteralOption, help=snakeHelp, default="{'use_conda': True}", ), click.option( "--threads", "-t", help="Number of threads to use. Default: 1", default=1 ), click.option( "--bwamem2", is_flag=True, help="Align with BWA-MEM2 instead of BWA. BWA MEM2 Needs > 70GB RAM avaialble to build index, but shows > 3x speed increase. Default: False.", default=False, ), click.option( "--macs3", help="Call peaks using macs3 instead of macs2", is_flag=True, default=False, ), click.option( "--groupby", "-G", help=groupHelp ), click.option( "--noexp", help=expHelp, is_flag=True, default=False ), click.option( "--noreport", help="If set, RSeq reports will not be generated.", is_flag=True, default=False ), click.option( "--debug", is_flag=True, help="Run pipeline on subsampled number of reads (for testing).", default=False, ), click.option( "--tsv", is_flag=True, help="Obtain config from config.tsv file instead of config.json.", default=False, ), click.option( "--useaws", is_flag=True, help="If set, prefetch from SRA tools will be used to download any public SRA data instead of AWS S3.", default=False, ) ] # Function for addint these options to the click command def add_options(options): def _add_options(func): for option in reversed(options): func = option(func) return func return _add_options def validate_genome(ctx, param, value): """Validate genome input""" if value is not None: available_genomes = pd.read_table(DATA_PATH) try: assert value in available_genomes.UCSC_orgID.to_list() except AssertionError: raise click.BadParameter( "'" + value + "' is not a valid UCSC genome ID (e.g., 'hg38' is valid)." ) return value def validate_mode(ctx, param, value): """Validate mode input""" if value is not None: try: assert value in AVAILABLE_MODES except AssertionError: raise click.BadParameter( "'" + value + "' is not a valid mode. (RSeqCLI build --help for more info)" ) return value def validate_run_dir(ctx, param, value): try: os.makedirs(value, exist_ok=True) except FileNotFoundError: raise click.BadParameter( "'" + value + "' could not be created using `os.makedirs(" + value + ", exist_ok=True)` please re-check this path." ) except FileExistsError: raise click.BadParameter( "RUN_DIR must be a directory. User supplied '" + value + "'" ) return os.path.abspath(value) def validate_run_dir_prepped(ctx, param, value): try: assert os.path.exists(value) and os.path.exists( os.path.join(value, "config.json") ) except AssertionError: raise click.BadParameter( "Configuration file '" + os.path.join(value, "config.json") + "' is not found. Have you run 'RSeqCLI build' yet?" ) return os.path.abspath(value) def bam_info(bamfile, n_bam_reads_check=1000): """Tests whether bam file is paired end and checks read length. Requires pysam.""" save = pysam.set_verbosity(0) samfile = pysam.AlignmentFile(bamfile, "rb") pysam.set_verbosity(save) numPair = sum([x.is_paired for x in samfile.head(n=n_bam_reads_check)]) read_len = ( sum([x.infer_read_length() for x in samfile.head(n=n_bam_reads_check)]) // n_bam_reads_check ) return {"paired_end": numPair > n_bam_reads_check / 2, "read_len": read_len} def validate_samples(ctx, param, value): """Validate and wrangle sampels input""" # value = "../RLPipes/tests/test_data/fq_test_samples_1.csv" samps = pd.read_csv(value) # First, check for matching pattern exp = samps.experiment[0] try: samptype = [key for key, val in redict.items() if re.match(val, exp)][0] except IndexError: raise click.BadParameter( message="Unable to detect data format for file " + exp ) samps["file_type"] = samptype # Wrangle controls if provided if "control" in samps.columns: controls = True samps = pd.concat( [ samps, samps.assign(experiment=samps.control).assign(condition="Input").assign(control=pd.NA).dropna(subset=["experiment"]), ] ) samps = samps.assign( control=samps.control.apply(lambda x: pd.NA if pd.isna(x) else x) ).drop_duplicates() else: controls = False samps["control"] = "" if samptype == "public": # Init SRAdb db = SRAweb(os.environ.get("NCBI_API_KEY", None)) def getsra(x): """Except for unreachable accessions in SRA""" try: data=db.sra_metadata(x) data['experiment'] = x except SystemExit: data=pd.DataFrame({ 'experiment': x, 'study_accession': pd.NA, 'experiment_title': pd.NA, 'experiment_accession': pd.NA, 'organism_taxid ': pd.NA, 'run_accession': pd.NA, 'library_layout': pd.NA, 'run_total_bases': pd.NA, 'run_total_spots': pd.NA }, index=[0]) return data # Query the SRAdb and wrangle with original data newSamps = pd.concat( samps.experiment.progress_apply(lambda x: getsra(x)).values.tolist() )[SRA_COLS] # Drop NaNs newSamps.dropna(subset=["experiment_accession"], inplace=True) newSamps.dropna(subset=["run_total_bases"], inplace=True) # Remove samples which have been retracted newSamps = newSamps[newSamps['run_total_bases'] != ''] # Get the read length newSamps = newSamps.astype( {"run_total_bases": "int64", "run_total_spots": "int64"} ) newSamps["read_length"] = newSamps.run_total_bases // newSamps.run_total_spots # Get the latest genomes. # From https://stackoverflow.com/questions/15705630/get-the-rows-which-have-the-max-value-in-groups-using-groupby available_genome = pd.read_table(DATA_PATH) latest_genomes = available_genome[ available_genome.groupby(axis=0, by=["taxId"])["year"].transform(max) == available_genome["year"] ] latest_genomes = latest_genomes.rename(columns={"taxId": "organism_taxid "}) newSamps["organism_taxid "] = newSamps["organism_taxid "].astype(np.int64) # Necessary to avoid taxid conflict between sacCer2/3 newSamps.loc[newSamps["organism_taxid "] == 4932, "organism_taxid "] = 559292 newSamps = newSamps.set_index("organism_taxid ") latest_genomes = latest_genomes.set_index("organism_taxid ") newSamps = newSamps.join(latest_genomes, how="left") newSamps = ( newSamps[ [ "experiment", "study_accession", "experiment_title", "experiment_accession", "run_accession", "library_layout", "UCSC_orgID", "read_length", ] ] .rename( columns={ "experiment": "experiment_original", "study_accession": "study", "experiment_title": "name", "library_layout": "paired_end", "experiment_accession": "experiment", "run_accession": "run", "UCSC_orgID": "genome", } ) ) # Set paired end newSamps["paired_end"] = newSamps["paired_end"] == "PAIRED" # Get srx to orig mapping srx_to_orig=newSamps[['experiment', 'experiment_original']] # Set index as exp orig newSamps=newSamps.set_index("experiment_original") if "genome" in samps.columns: newSamps = newSamps.drop("genome", axis=1) # Finally, join the dataframes by experiment... samps['experiment_original'] = samps['experiment'] samps['control_original'] = samps['control'] samps=samps.drop('experiment', axis=1).drop('control', axis=1) samps=pd.merge( samps, newSamps, on = "experiment_original" ).drop_duplicates() # And control srx_to_origctr=srx_to_orig.rename( columns={ "experiment": "control", "experiment_original": "control_original", } ) samps=pd.merge( samps, srx_to_origctr, how="left", on = "control_original" ).drop_duplicates() samps = samps.assign( control=samps.control.apply(lambda x: pd.NA if pd.isna(x) else x) ).drop_duplicates() else: if samptype == "bam": # Check which are paired-end samps["paired_end"] = [ bam_info(bam, N_BAM_READS_CHECK)["paired_end"] for bam in samps["experiment"] ] samps["read_length"] = [ bam_info(bam, N_BAM_READS_CHECK)["read_len"] for bam in samps["experiment"] ] samps["name"] = [ os.path.splitext(os.path.basename(exp))[0] for exp in samps["experiment"] ] if controls: samps["control"] = [ os.path.splitext(os.path.basename(exp))[0] if not pd.isna(exp) else exp for exp in samps["control"] ] else: samps["control"] = "" samps["run"] = [os.path.abspath(bam) for bam in samps["experiment"]] elif samptype == "fastq": # Check which are paired-end samps["paired_end"] = [bool(re.match(".+\\~.+", exp)) for exp in samps["experiment"]] def get_readlen(fq, lines=500): """ Get Read Length from a fastq file params: fq: Path to a FASTQ file line: Number of lines to scan. Default: 500 """ seqlst=[] for name,seq,qual in pyfastx.Fastq(fq, build_index=False): seqlst.append(seq) if len(seqlst) > lines: break toavg = [len(x) for x in seqlst] return round(sum(toavg) / len(toavg)) samps["read_length"] = [ get_readlen(re.sub('\\~.+', "", exp )) for exp in samps["experiment"] ] def get_samplename(fq): splt = os.path.splitext(os.path.basename(fq)) if splt[1] == ".gz": nm=os.path.splitext(splt[0])[0] else: nm=splt[0] return re.sub('[\\._]{1}[R1-2]+$', "", nm) samps["name"] = [ get_samplename(re.sub('\\~.+', "", exp)) for exp in samps["experiment"] ] if controls: samps["control"] = [ get_samplename(re.sub('\\~.+', "", exp)) if not pd.isna(exp) else exp for exp in samps["control"] ] else: samps["control"] = "" def get_fq_path(fq, pe): if pe: fq1=os.path.abspath(re.sub('\\~.+', "", fq)) fq2=os.path.abspath(re.sub('.+\\~', "", fq)) return fq1 + "~" + fq2 else: return os.path.abspath(fq) samps["run"] = [get_fq_path(fq, pe) for idx, fq, pe in samps[["experiment", "paired_end"]].itertuples()] samps["experiment"] = samps["name"] return samps @click.group() @click.version_option(__version__) @click.pass_context def cli(ctx, **kwargs): """ RLPipes: A standardized R-loop-mapping pipeline. """ pass @cli.command("build") @click.argument("run_dir", type=click.Path(), callback=validate_run_dir) @click.argument("samples", type=click.File("rb"), callback=validate_samples) @click.option("--mode", "-m", callback=validate_mode, help=modeHelp) @click.option( "--genome", "-g", callback=validate_genome, help="UCSC genome for samples (e.g., 'hg38'). Not required if providing public data accessions.", ) @click.option( "--name", "-n", help="Sample names for use in output report. By default, inferred from inputs.", ) @click.pass_context def build(ctx, samples, mode, genome, run_dir, name): """ Configure an RLPipes workflow. RUN_DIR: Directory for RLPipes Execution. Will be created if it does not exist. SAMPLES: A CSV file with at least one column "experiment" that provides the path to either local fastq files, bam files, or public sample accessions (SRX or GSM). Input controls should be in the "control" column. \n If providing paired-end fastq files, enter: "exp_1.fastq~exp_2.fastq".\n Columns may also include "genome" and "mode" columns. These will override the -g, -m, and -n options.\n "genome" (-g/--genome) is not required if providing public data accessions.\n \n Example #1: "RLPipes build -m DRIP outdir/ samples.csv"\n samples.csv:\n \texperiment\n \tSRX113812\n \tSRX113813\n \n Example #2: "RLPipes build outdir/ samples.csv"\n samples.csv:\n \texperiment, control, genome, mode\n \tqDRIP_siGL3_1.fq~qDRIP_siGL3_2.fq, , hg38, qDRIP\n \tDRIPc_3T3.fq, Input_3T3.fq, mm10, DRIPc\n """ # Add in potentially, missing columns if not "mode" in samples.columns: samples["mode"] = mode if not "genome" in samples.columns: try: assert genome is not None except AssertionError: raise click.BadParameter( message="Genome cannot be missing when running local files." ) samples["genome"] = genome if not "name" in samples.columns: samples["name"] = name # Get the effective genome sizes eff_gen_size = pd.read_table(GENSIZE_PATH) sizes = eff_gen_size["read_length"].unique() samples["read_length"] = [ min(sizes, key=lambda x: abs(x - sizeCheck)) for sizeCheck in samples["read_length"] ] samples = pd.merge( samples, eff_gen_size.rename(columns={"UCSC_orgID": "genome"}), how="inner" ) # Compile to json for snakemake outtsv = os.path.join(run_dir, "config.tsv") outdf = samples.fillna("").reset_index().drop("index", axis=1) outdf.to_csv(outtsv, sep="\t", index=False) outjson = os.path.join(run_dir, "config.json") outdict = samples.fillna("").reset_index().drop("index", axis=1).to_dict("list") with open(outjson, "w") as f: json.dump(outdict, f, ensure_ascii=False) print("\nSuccess! RLPipes has been initialized at the specified directory: " + run_dir) print("\nRun 'RLPipes check " + run_dir + "' to verify the configuration.\n") @cli.command("check") @click.argument("run_dir", type=click.Path(), callback=validate_run_dir_prepped) @add_options(verify_run_options) def check(run_dir, threads, debug, bwamem2, macs3, groupby, noexp, noreport, tsv, useaws, **kwargs): """ Validate an RLPipes workflow. RUN_DIR: Directory configured with `RLPipes build` and ready for checking and execution. """ smargs = kwargs["smargs"] dagfile = make_snakes( snake_args=smargs, run_dir=run_dir, src_dir=SRC_DIR, threads=threads, bwamem2=bwamem2, macs3=macs3, groupby=groupby, noexp=noexp, noreport=noreport, debug=debug, tsv=tsv, useaws=useaws, verify=True, ) print( "\nSuccess! The DAG has been generated successfully. You can view it here: " + dagfile ) print("\nRun 'RLPipes run " + run_dir + "' to execute the workflow.\n") @cli.command("run") @click.argument("run_dir", type=click.Path(), callback=validate_run_dir_prepped) @add_options(verify_run_options) def run(run_dir, threads, debug, bwamem2, macs3, groupby, noexp, noreport, tsv, useaws, **kwargs): """ Execute an RLPipes workflow. RUN_DIR: Directory configured with `RLPipes build` and ready for checking and execution. """ smargs = kwargs["smargs"] exitcode = make_snakes( snake_args=smargs, run_dir=run_dir, src_dir=SRC_DIR, threads=threads, bwamem2=bwamem2, macs3=macs3, groupby=groupby, noexp=noexp, noreport=noreport, debug=debug, tsv=tsv, useaws=useaws, verify=False, ) print(exitcode) print("Success! RLPipes will now close.")
[ "pandas.read_csv", "pkg_resources.require", "click.File", "pysam.AlignmentFile", "click.BadParameter", "os.path.exists", "click.group", "click.option", "pysam.set_verbosity", "pandas.DataFrame", "pandas.merge", "re.match", "os.path.splitext", "os.path.dirname", "click.version_option", ...
[((1461, 1486), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1476, 1486), False, 'import os\n'), ((15858, 15871), 'click.group', 'click.group', ([], {}), '()\n', (15869, 15871), False, 'import click\n'), ((15873, 15906), 'click.version_option', 'click.version_option', (['__version__'], {}), '(__version__)\n', (15893, 15906), False, 'import click\n'), ((16204, 16271), 'click.option', 'click.option', (['"""--mode"""', '"""-m"""'], {'callback': 'validate_mode', 'help': 'modeHelp'}), "('--mode', '-m', callback=validate_mode, help=modeHelp)\n", (16216, 16271), False, 'import click\n'), ((16273, 16437), 'click.option', 'click.option', (['"""--genome"""', '"""-g"""'], {'callback': 'validate_genome', 'help': '"""UCSC genome for samples (e.g., \'hg38\'). Not required if providing public data accessions."""'}), '(\'--genome\', \'-g\', callback=validate_genome, help=\n "UCSC genome for samples (e.g., \'hg38\'). Not required if providing public data accessions."\n )\n', (16285, 16437), False, 'import click\n'), ((16448, 16562), 'click.option', 'click.option', (['"""--name"""', '"""-n"""'], {'help': '"""Sample names for use in output report. By default, inferred from inputs."""'}), "('--name', '-n', help=\n 'Sample names for use in output report. By default, inferred from inputs.')\n", (16460, 16562), False, 'import click\n'), ((1520, 1585), 'os.path.join', 'os.path.join', (['this_dir', '"""src"""', '"""data"""', '"""available_genomes.tsv.xz"""'], {}), "(this_dir, 'src', 'data', 'available_genomes.tsv.xz')\n", (1532, 1585), False, 'import os\n'), ((1624, 1684), 'os.path.join', 'os.path.join', (['this_dir', '"""src"""', '"""data"""', '"""eff_gen_size.tsv.xz"""'], {}), "(this_dir, 'src', 'data', 'eff_gen_size.tsv.xz')\n", (1636, 1684), False, 'import os\n'), ((1713, 1742), 'os.path.join', 'os.path.join', (['this_dir', '"""src"""'], {}), "(this_dir, 'src')\n", (1725, 1742), False, 'import os\n'), ((3421, 3527), 'click.option', 'click.option', (['"""--smargs"""', '"""-s"""'], {'cls': 'PythonLiteralOption', 'help': 'snakeHelp', 'default': '"""{\'use_conda\': True}"""'}), '(\'--smargs\', \'-s\', cls=PythonLiteralOption, help=snakeHelp,\n default="{\'use_conda\': True}")\n', (3433, 3527), False, 'import click\n'), ((3576, 3667), 'click.option', 'click.option', (['"""--threads"""', '"""-t"""'], {'help': '"""Number of threads to use. Default: 1"""', 'default': '(1)'}), "('--threads', '-t', help='Number of threads to use. Default: 1',\n default=1)\n", (3588, 3667), False, 'import click\n'), ((3683, 3890), 'click.option', 'click.option', (['"""--bwamem2"""'], {'is_flag': '(True)', 'help': '"""Align with BWA-MEM2 instead of BWA. BWA MEM2 Needs > 70GB RAM avaialble to build index, but shows > 3x speed increase. Default: False."""', 'default': '(False)'}), "('--bwamem2', is_flag=True, help=\n 'Align with BWA-MEM2 instead of BWA. BWA MEM2 Needs > 70GB RAM avaialble to build index, but shows > 3x speed increase. Default: False.'\n , default=False)\n", (3695, 3890), False, 'import click\n'), ((3925, 4029), 'click.option', 'click.option', (['"""--macs3"""'], {'help': '"""Call peaks using macs3 instead of macs2"""', 'is_flag': '(True)', 'default': '(False)'}), "('--macs3', help='Call peaks using macs3 instead of macs2',\n is_flag=True, default=False)\n", (3937, 4029), False, 'import click\n'), ((4070, 4117), 'click.option', 'click.option', (['"""--groupby"""', '"""-G"""'], {'help': 'groupHelp'}), "('--groupby', '-G', help=groupHelp)\n", (4082, 4117), False, 'import click\n'), ((4147, 4213), 'click.option', 'click.option', (['"""--noexp"""'], {'help': 'expHelp', 'is_flag': '(True)', 'default': '(False)'}), "('--noexp', help=expHelp, is_flag=True, default=False)\n", (4159, 4213), False, 'import click\n'), ((4257, 4369), 'click.option', 'click.option', (['"""--noreport"""'], {'help': '"""If set, RSeq reports will not be generated."""', 'is_flag': '(True)', 'default': '(False)'}), "('--noreport', help=\n 'If set, RSeq reports will not be generated.', is_flag=True, default=False)\n", (4269, 4369), False, 'import click\n'), ((4408, 4531), 'click.option', 'click.option', (['"""--debug"""'], {'is_flag': '(True)', 'help': '"""Run pipeline on subsampled number of reads (for testing)."""', 'default': '(False)'}), "('--debug', is_flag=True, help=\n 'Run pipeline on subsampled number of reads (for testing).', default=False)\n", (4420, 4531), False, 'import click\n'), ((4571, 4698), 'click.option', 'click.option', (['"""--tsv"""'], {'is_flag': '(True)', 'help': '"""Obtain config from config.tsv file instead of config.json."""', 'default': '(False)'}), "('--tsv', is_flag=True, help=\n 'Obtain config from config.tsv file instead of config.json.', default=False\n )\n", (4583, 4698), False, 'import click\n'), ((4733, 4900), 'click.option', 'click.option', (['"""--useaws"""'], {'is_flag': '(True)', 'help': '"""If set, prefetch from SRA tools will be used to download any public SRA data instead of AWS S3."""', 'default': '(False)'}), "('--useaws', is_flag=True, help=\n 'If set, prefetch from SRA tools will be used to download any public SRA data instead of AWS S3.'\n , default=False)\n", (4745, 4900), False, 'import click\n'), ((6461, 6483), 'os.path.abspath', 'os.path.abspath', (['value'], {}), '(value)\n', (6476, 6483), False, 'import os\n'), ((6890, 6912), 'os.path.abspath', 'os.path.abspath', (['value'], {}), '(value)\n', (6905, 6912), False, 'import os\n'), ((7060, 7082), 'pysam.set_verbosity', 'pysam.set_verbosity', (['(0)'], {}), '(0)\n', (7079, 7082), False, 'import pysam\n'), ((7097, 7131), 'pysam.AlignmentFile', 'pysam.AlignmentFile', (['bamfile', '"""rb"""'], {}), "(bamfile, 'rb')\n", (7116, 7131), False, 'import pysam\n'), ((7136, 7161), 'pysam.set_verbosity', 'pysam.set_verbosity', (['save'], {}), '(save)\n', (7155, 7161), False, 'import pysam\n'), ((7616, 7634), 'pandas.read_csv', 'pd.read_csv', (['value'], {}), '(value)\n', (7627, 7634), True, 'import pandas as pd\n'), ((18199, 18226), 'pandas.read_table', 'pd.read_table', (['GENSIZE_PATH'], {}), '(GENSIZE_PATH)\n', (18212, 18226), True, 'import pandas as pd\n'), ((18578, 18613), 'os.path.join', 'os.path.join', (['run_dir', '"""config.tsv"""'], {}), "(run_dir, 'config.tsv')\n", (18590, 18613), False, 'import os\n'), ((18743, 18779), 'os.path.join', 'os.path.join', (['run_dir', '"""config.json"""'], {}), "(run_dir, 'config.json')\n", (18755, 18779), False, 'import os\n'), ((1797, 1829), 'pkg_resources.require', 'pkg_resources.require', (['"""rlpipes"""'], {}), "('rlpipes')\n", (1818, 1829), False, 'import pkg_resources\n'), ((5290, 5314), 'pandas.read_table', 'pd.read_table', (['DATA_PATH'], {}), '(DATA_PATH)\n', (5303, 5314), True, 'import pandas as pd\n'), ((6021, 6054), 'os.makedirs', 'os.makedirs', (['value'], {'exist_ok': '(True)'}), '(value, exist_ok=True)\n', (6032, 6054), False, 'import os\n'), ((10156, 10180), 'pandas.read_table', 'pd.read_table', (['DATA_PATH'], {}), '(DATA_PATH)\n', (10169, 10180), True, 'import pandas as pd\n'), ((18912, 18953), 'json.dump', 'json.dump', (['outdict', 'f'], {'ensure_ascii': '(False)'}), '(outdict, f, ensure_ascii=False)\n', (18921, 18953), False, 'import json\n'), ((16085, 16097), 'click.Path', 'click.Path', ([], {}), '()\n', (16095, 16097), False, 'import click\n'), ((16158, 16174), 'click.File', 'click.File', (['"""rb"""'], {}), "('rb')\n", (16168, 16174), False, 'import click\n'), ((19185, 19197), 'click.Path', 'click.Path', ([], {}), '()\n', (19195, 19197), False, 'import click\n'), ((20129, 20141), 'click.Path', 'click.Path', ([], {}), '()\n', (20139, 20141), False, 'import click\n'), ((6099, 6242), 'click.BadParameter', 'click.BadParameter', (['("\'" + value + "\' could not be created using `os.makedirs(" + value +\n \', exist_ok=True)` please re-check this path.\')'], {}), '("\'" + value +\n "\' could not be created using `os.makedirs(" + value +\n \', exist_ok=True)` please re-check this path.\')\n', (6117, 6242), False, 'import click\n'), ((6347, 6432), 'click.BadParameter', 'click.BadParameter', (['("RUN_DIR must be a directory. User supplied \'" + value + "\'")'], {}), '("RUN_DIR must be a directory. User supplied \'" + value + "\'"\n )\n', (6365, 6432), False, 'import click\n'), ((6559, 6580), 'os.path.exists', 'os.path.exists', (['value'], {}), '(value)\n', (6573, 6580), False, 'import os\n'), ((7836, 7910), 'click.BadParameter', 'click.BadParameter', ([], {'message': "('Unable to detect data format for file ' + exp)"}), "(message='Unable to detect data format for file ' + exp)\n", (7854, 7910), False, 'import click\n'), ((8565, 8601), 'os.environ.get', 'os.environ.get', (['"""NCBI_API_KEY"""', 'None'], {}), "('NCBI_API_KEY', None)\n", (8579, 8601), False, 'import os\n'), ((724, 749), 'click.BadParameter', 'click.BadParameter', (['value'], {}), '(value)\n', (742, 749), False, 'import click\n'), ((5444, 5540), 'click.BadParameter', 'click.BadParameter', (['("\'" + value + "\' is not a valid UCSC genome ID (e.g., \'hg38\' is valid).")'], {}), '("\'" + value +\n "\' is not a valid UCSC genome ID (e.g., \'hg38\' is valid).")\n', (5462, 5540), False, 'import click\n'), ((5786, 5885), 'click.BadParameter', 'click.BadParameter', (['("\'" + value + "\' is not a valid mode. (RSeqCLI build --help for more info)")'], {}), '("\'" + value +\n "\' is not a valid mode. (RSeqCLI build --help for more info)")\n', (5804, 5885), False, 'import click\n'), ((6613, 6647), 'os.path.join', 'os.path.join', (['value', '"""config.json"""'], {}), "(value, 'config.json')\n", (6625, 6647), False, 'import os\n'), ((12348, 12399), 'pandas.merge', 'pd.merge', (['samps', 'newSamps'], {'on': '"""experiment_original"""'}), "(samps, newSamps, on='experiment_original')\n", (12356, 12399), True, 'import pandas as pd\n'), ((12714, 12780), 'pandas.merge', 'pd.merge', (['samps', 'srx_to_origctr'], {'how': '"""left"""', 'on': '"""control_original"""'}), "(samps, srx_to_origctr, how='left', on='control_original')\n", (12722, 12780), True, 'import pandas as pd\n'), ((13778, 13798), 'os.path.abspath', 'os.path.abspath', (['bam'], {}), '(bam)\n', (13793, 13798), False, 'import os\n'), ((17927, 18012), 'click.BadParameter', 'click.BadParameter', ([], {'message': '"""Genome cannot be missing when running local files."""'}), "(message='Genome cannot be missing when running local files.'\n )\n", (17945, 18012), False, 'import click\n'), ((7778, 7796), 're.match', 're.match', (['val', 'exp'], {}), '(val, exp)\n', (7786, 7796), False, 'import re\n'), ((8824, 9090), 'pandas.DataFrame', 'pd.DataFrame', (["{'experiment': x, 'study_accession': pd.NA, 'experiment_title': pd.NA,\n 'experiment_accession': pd.NA, 'organism_taxid ': pd.NA,\n 'run_accession': pd.NA, 'library_layout': pd.NA, 'run_total_bases': pd.\n NA, 'run_total_spots': pd.NA}"], {'index': '[0]'}), "({'experiment': x, 'study_accession': pd.NA, 'experiment_title':\n pd.NA, 'experiment_accession': pd.NA, 'organism_taxid ': pd.NA,\n 'run_accession': pd.NA, 'library_layout': pd.NA, 'run_total_bases': pd.\n NA, 'run_total_spots': pd.NA}, index=[0])\n", (8836, 9090), True, 'import pandas as pd\n'), ((14329, 14365), 'pyfastx.Fastq', 'pyfastx.Fastq', (['fq'], {'build_index': '(False)'}), '(fq, build_index=False)\n', (14342, 14365), False, 'import pyfastx\n'), ((14941, 14976), 're.sub', 're.sub', (['"""[\\\\._]{1}[R1-2]+$"""', '""""""', 'nm'], {}), "('[\\\\._]{1}[R1-2]+$', '', nm)\n", (14947, 14976), False, 'import re\n'), ((6768, 6802), 'os.path.join', 'os.path.join', (['value', '"""config.json"""'], {}), "(value, 'config.json')\n", (6780, 6802), False, 'import os\n'), ((13423, 13444), 'os.path.basename', 'os.path.basename', (['exp'], {}), '(exp)\n', (13439, 13444), False, 'import os\n'), ((13942, 13966), 're.match', 're.match', (['""".+\\\\~.+"""', 'exp'], {}), "('.+\\\\~.+', exp)\n", (13950, 13966), False, 'import re\n'), ((14627, 14651), 're.sub', 're.sub', (['"""\\\\~.+"""', '""""""', 'exp'], {}), "('\\\\~.+', '', exp)\n", (14633, 14651), False, 'import re\n'), ((14778, 14798), 'os.path.basename', 'os.path.basename', (['fq'], {}), '(fq)\n', (14794, 14798), False, 'import os\n'), ((15043, 15067), 're.sub', 're.sub', (['"""\\\\~.+"""', '""""""', 'exp'], {}), "('\\\\~.+', '', exp)\n", (15049, 15067), False, 'import re\n'), ((15644, 15663), 'os.path.abspath', 'os.path.abspath', (['fq'], {}), '(fq)\n', (15659, 15663), False, 'import os\n'), ((13616, 13628), 'pandas.isna', 'pd.isna', (['exp'], {}), '(exp)\n', (13623, 13628), True, 'import pandas as pd\n'), ((14850, 14875), 'os.path.splitext', 'os.path.splitext', (['splt[0]'], {}), '(splt[0])\n', (14866, 14875), False, 'import os\n'), ((15484, 15507), 're.sub', 're.sub', (['"""\\\\~.+"""', '""""""', 'fq'], {}), "('\\\\~.+', '', fq)\n", (15490, 15507), False, 'import re\n'), ((15543, 15566), 're.sub', 're.sub', (['""".+\\\\~"""', '""""""', 'fq'], {}), "('.+\\\\~', '', fq)\n", (15549, 15566), False, 'import re\n'), ((13583, 13604), 'os.path.basename', 'os.path.basename', (['exp'], {}), '(exp)\n', (13599, 13604), False, 'import os\n'), ((15245, 15257), 'pandas.isna', 'pd.isna', (['exp'], {}), '(exp)\n', (15252, 15257), True, 'import pandas as pd\n'), ((15212, 15236), 're.sub', 're.sub', (['"""\\\\~.+"""', '""""""', 'exp'], {}), "('\\\\~.+', '', exp)\n", (15218, 15236), False, 'import re\n'), ((8381, 8391), 'pandas.isna', 'pd.isna', (['x'], {}), '(x)\n', (8388, 8391), True, 'import pandas as pd\n'), ((12940, 12950), 'pandas.isna', 'pd.isna', (['x'], {}), '(x)\n', (12947, 12950), True, 'import pandas as pd\n')]
""" Copyright (c) 2015 Red Hat, Inc All rights reserved. This software may be modified and distributed under the terms of the BSD license. See the LICENSE file for details. """ from __future__ import print_function, unicode_literals import logging import os import sys import pytest from atomic_reactor.buildimage import BuildImageBuilder from atomic_reactor.core import DockerTasker import atomic_reactor.cli.main from tests.fixtures import is_registry_running, temp_image_name, get_uuid from tests.constants import LOCALHOST_REGISTRY, DOCKERFILE_GIT, DOCKERFILE_OK_PATH, FILES, MOCK if MOCK: from tests.docker_mock import mock_docker PRIV_BUILD_IMAGE = None DH_BUILD_IMAGE = None logger = logging.getLogger('atomic_reactor.tests') if MOCK: mock_docker() dt = DockerTasker() reactor_root = os.path.dirname(os.path.dirname(__file__)) with_all_sources = pytest.mark.parametrize('source_provider, uri', [ ('git', DOCKERFILE_GIT), ('path', DOCKERFILE_OK_PATH), ]) # TEST-SUITE SETUP def setup_module(module): global PRIV_BUILD_IMAGE, DH_BUILD_IMAGE PRIV_BUILD_IMAGE = get_uuid() DH_BUILD_IMAGE = get_uuid() if MOCK: return b = BuildImageBuilder(reactor_local_path=reactor_root) b.create_image(os.path.join(reactor_root, 'images', 'privileged-builder'), PRIV_BUILD_IMAGE, use_cache=True) b2 = BuildImageBuilder(reactor_local_path=reactor_root) b2.create_image(os.path.join(reactor_root, 'images', 'dockerhost-builder'), DH_BUILD_IMAGE, use_cache=True) def teardown_module(module): if MOCK: return dt.remove_image(PRIV_BUILD_IMAGE, force=True) dt.remove_image(DH_BUILD_IMAGE, force=True) # TESTS class TestCLISuite(object): def exec_cli(self, command): saved_args = sys.argv sys.argv = command atomic_reactor.cli.main.run() sys.argv = saved_args @with_all_sources def test_simple_privileged_build(self, is_registry_running, temp_image_name, source_provider, uri): if MOCK: mock_docker() temp_image = temp_image_name command = [ "main.py", "--verbose", "build", source_provider, "--method", "privileged", "--build-image", PRIV_BUILD_IMAGE, "--image", temp_image.to_str(), "--uri", uri, ] if is_registry_running: logger.info("registry is running") command += ["--source-registry", LOCALHOST_REGISTRY] else: logger.info("registry is NOT running") with pytest.raises(SystemExit) as excinfo: self.exec_cli(command) assert excinfo.value.code == 0 @with_all_sources def test_simple_dh_build(self, is_registry_running, temp_image_name, source_provider, uri): if MOCK: mock_docker() temp_image = temp_image_name command = [ "main.py", "--verbose", "build", source_provider, "--method", "hostdocker", "--build-image", DH_BUILD_IMAGE, "--image", temp_image.to_str(), "--uri", uri, ] if is_registry_running: logger.info("registry is running") command += ["--source-registry", LOCALHOST_REGISTRY] else: logger.info("registry is NOT running") with pytest.raises(SystemExit) as excinfo: self.exec_cli(command) assert excinfo.value.code == 0 dt.remove_image(temp_image, noprune=True) def test_building_from_json_source_provider(self, is_registry_running, temp_image_name): if MOCK: mock_docker() temp_image = temp_image_name command = [ "main.py", "--verbose", "build", "json", "--method", "hostdocker", "--build-image", DH_BUILD_IMAGE, os.path.join(FILES, 'example-build.json'), "--substitute", "image={0}".format(temp_image), "source.uri={0}".format(DOCKERFILE_OK_PATH) ] if is_registry_running: logger.info("registry is running") command += ["--source-registry", LOCALHOST_REGISTRY] else: logger.info("registry is NOT running") with pytest.raises(SystemExit) as excinfo: self.exec_cli(command) assert excinfo.value.code == 0 dt.remove_image(temp_image, noprune=True) def test_create_build_image(self, temp_image_name): if MOCK: mock_docker() temp_image = temp_image_name priv_builder_path = os.path.join(reactor_root, 'images', 'privileged-builder') command = [ "main.py", "--verbose", "create-build-image", "--reactor-local-path", reactor_root, priv_builder_path, temp_image.to_str(), ] with pytest.raises(SystemExit) as excinfo: self.exec_cli(command) assert excinfo.value.code == 0 dt.remove_image(temp_image, noprune=True)
[ "logging.getLogger", "atomic_reactor.core.DockerTasker", "os.path.join", "tests.fixtures.get_uuid", "pytest.mark.parametrize", "os.path.dirname", "pytest.raises", "tests.docker_mock.mock_docker", "atomic_reactor.buildimage.BuildImageBuilder" ]
[((705, 746), 'logging.getLogger', 'logging.getLogger', (['"""atomic_reactor.tests"""'], {}), "('atomic_reactor.tests')\n", (722, 746), False, 'import logging\n'), ((780, 794), 'atomic_reactor.core.DockerTasker', 'DockerTasker', ([], {}), '()\n', (792, 794), False, 'from atomic_reactor.core import DockerTasker\n'), ((873, 982), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""source_provider, uri"""', "[('git', DOCKERFILE_GIT), ('path', DOCKERFILE_OK_PATH)]"], {}), "('source_provider, uri', [('git', DOCKERFILE_GIT), (\n 'path', DOCKERFILE_OK_PATH)])\n", (896, 982), False, 'import pytest\n'), ((761, 774), 'tests.docker_mock.mock_docker', 'mock_docker', ([], {}), '()\n', (772, 774), False, 'from tests.docker_mock import mock_docker\n'), ((826, 851), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (841, 851), False, 'import os\n'), ((1103, 1113), 'tests.fixtures.get_uuid', 'get_uuid', ([], {}), '()\n', (1111, 1113), False, 'from tests.fixtures import is_registry_running, temp_image_name, get_uuid\n'), ((1135, 1145), 'tests.fixtures.get_uuid', 'get_uuid', ([], {}), '()\n', (1143, 1145), False, 'from tests.fixtures import is_registry_running, temp_image_name, get_uuid\n'), ((1183, 1233), 'atomic_reactor.buildimage.BuildImageBuilder', 'BuildImageBuilder', ([], {'reactor_local_path': 'reactor_root'}), '(reactor_local_path=reactor_root)\n', (1200, 1233), False, 'from atomic_reactor.buildimage import BuildImageBuilder\n'), ((1376, 1426), 'atomic_reactor.buildimage.BuildImageBuilder', 'BuildImageBuilder', ([], {'reactor_local_path': 'reactor_root'}), '(reactor_local_path=reactor_root)\n', (1393, 1426), False, 'from atomic_reactor.buildimage import BuildImageBuilder\n'), ((1253, 1311), 'os.path.join', 'os.path.join', (['reactor_root', '"""images"""', '"""privileged-builder"""'], {}), "(reactor_root, 'images', 'privileged-builder')\n", (1265, 1311), False, 'import os\n'), ((1447, 1505), 'os.path.join', 'os.path.join', (['reactor_root', '"""images"""', '"""dockerhost-builder"""'], {}), "(reactor_root, 'images', 'dockerhost-builder')\n", (1459, 1505), False, 'import os\n'), ((4715, 4773), 'os.path.join', 'os.path.join', (['reactor_root', '"""images"""', '"""privileged-builder"""'], {}), "(reactor_root, 'images', 'privileged-builder')\n", (4727, 4773), False, 'import os\n'), ((2082, 2095), 'tests.docker_mock.mock_docker', 'mock_docker', ([], {}), '()\n', (2093, 2095), False, 'from tests.docker_mock import mock_docker\n'), ((2639, 2664), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (2652, 2664), False, 'import pytest\n'), ((2900, 2913), 'tests.docker_mock.mock_docker', 'mock_docker', ([], {}), '()\n', (2911, 2913), False, 'from tests.docker_mock import mock_docker\n'), ((3455, 3480), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (3468, 3480), False, 'import pytest\n'), ((3740, 3753), 'tests.docker_mock.mock_docker', 'mock_docker', ([], {}), '()\n', (3751, 3753), False, 'from tests.docker_mock import mock_docker\n'), ((3996, 4037), 'os.path.join', 'os.path.join', (['FILES', '"""example-build.json"""'], {}), "(FILES, 'example-build.json')\n", (4008, 4037), False, 'import os\n'), ((4387, 4412), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (4400, 4412), False, 'import pytest\n'), ((4635, 4648), 'tests.docker_mock.mock_docker', 'mock_docker', ([], {}), '()\n', (4646, 4648), False, 'from tests.docker_mock import mock_docker\n'), ((5013, 5038), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (5026, 5038), False, 'import pytest\n')]
# Copyright (c) 2017-2018 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from django.conf.urls import url from rest_framework.urlpatterns import format_suffix_patterns from titanium_cloud.swagger.views import SwaggerJsonViewDepreciated from titanium_cloud.swagger.views import APIv1SwaggerJsonViewDepreciated from titanium_cloud.swagger.views import SwaggerJsonView from titanium_cloud.swagger.views import APIv1SwaggerJsonView URLPATTERNS = [ # API v0, depreciated url(r'^api/multicloud-titanium_cloud/v0/swagger.json$', SwaggerJsonViewDepreciated.as_view()), # API v1, depreciated url(r'^api/multicloud-titanium_cloud/v1/swagger.json$', APIv1SwaggerJsonViewDepreciated.as_view()), # API v0, new namespace: MULTICLOUD-335 url(r'^api/multicloud-titaniumcloud/v0/swagger.json$', SwaggerJsonView.as_view()), # API v1, new namespace: MULTICLOUD-335 url(r'^api/multicloud-titaniumcloud/v1/swagger.json$', APIv1SwaggerJsonView.as_view()), ] urlpatterns = format_suffix_patterns(URLPATTERNS)
[ "titanium_cloud.swagger.views.APIv1SwaggerJsonView.as_view", "titanium_cloud.swagger.views.APIv1SwaggerJsonViewDepreciated.as_view", "titanium_cloud.swagger.views.SwaggerJsonViewDepreciated.as_view", "titanium_cloud.swagger.views.SwaggerJsonView.as_view", "rest_framework.urlpatterns.format_suffix_patterns" ...
[((1518, 1553), 'rest_framework.urlpatterns.format_suffix_patterns', 'format_suffix_patterns', (['URLPATTERNS'], {}), '(URLPATTERNS)\n', (1540, 1553), False, 'from rest_framework.urlpatterns import format_suffix_patterns\n'), ((1061, 1097), 'titanium_cloud.swagger.views.SwaggerJsonViewDepreciated.as_view', 'SwaggerJsonViewDepreciated.as_view', ([], {}), '()\n', (1095, 1097), False, 'from titanium_cloud.swagger.views import SwaggerJsonViewDepreciated\n'), ((1187, 1228), 'titanium_cloud.swagger.views.APIv1SwaggerJsonViewDepreciated.as_view', 'APIv1SwaggerJsonViewDepreciated.as_view', ([], {}), '()\n', (1226, 1228), False, 'from titanium_cloud.swagger.views import APIv1SwaggerJsonViewDepreciated\n'), ((1335, 1360), 'titanium_cloud.swagger.views.SwaggerJsonView.as_view', 'SwaggerJsonView.as_view', ([], {}), '()\n', (1358, 1360), False, 'from titanium_cloud.swagger.views import SwaggerJsonView\n'), ((1467, 1497), 'titanium_cloud.swagger.views.APIv1SwaggerJsonView.as_view', 'APIv1SwaggerJsonView.as_view', ([], {}), '()\n', (1495, 1497), False, 'from titanium_cloud.swagger.views import APIv1SwaggerJsonView\n')]
from ofptHeader import ofptHeader def ofptBarrierReq(): header = ofptHeader(20) return header
[ "ofptHeader.ofptHeader" ]
[((67, 81), 'ofptHeader.ofptHeader', 'ofptHeader', (['(20)'], {}), '(20)\n', (77, 81), False, 'from ofptHeader import ofptHeader\n')]
import pytest from whylogs.core.metrics.nlp_metrics import NLPMetrics from whylogs.proto import NLPMetricsMessage def test_nlp_metrics(): nlp_metric = NLPMetrics() assert nlp_metric.mer is not None assert nlp_metric.mer.count == 0 nlp_metric.update("hello", "hello") assert nlp_metric.mer.histogram.get_min_value() == 0.0 assert nlp_metric.mer.histogram.get_max_value() == 0.0 assert nlp_metric.mer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.0, 0.0, 0.0] assert nlp_metric.mer.count == 1 assert nlp_metric.wer.histogram.get_min_value() == 0.0 assert nlp_metric.wer.histogram.get_max_value() == 0.0 assert nlp_metric.wer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.0, 0.0, 0.0] assert nlp_metric.wer.count == 1 assert nlp_metric.wil.histogram.get_min_value() == 0.0 assert nlp_metric.wil.histogram.get_max_value() == 0.0 assert nlp_metric.wil.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.0, 0.0, 0.0] assert nlp_metric.wil.count == 1 def test_nlp_metrics_message(): nlp_metric = NLPMetrics() assert nlp_metric.mer is not None assert nlp_metric.mer.count == 0 nlp_metric.update(["hello brother"], ["hello sister"]) nlp_metric.update(["bye brother"], ["bye sister"]) nlp_metric.update(["what up brother"], ["what up ii sister"]) assert nlp_metric.mer.histogram.get_min_value() == 0.5 assert nlp_metric.mer.histogram.get_max_value() == 0.5 assert nlp_metric.mer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.5, 0.5, 0.5000] assert nlp_metric.mer.count == 3 assert nlp_metric.wer.histogram.get_min_value() == 0.5 assert nlp_metric.wer.histogram.get_max_value() == 0.5 assert nlp_metric.wer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.5, 0.5, 0.5000] assert nlp_metric.wer.count == 3 assert pytest.approx(nlp_metric.wil.histogram.get_min_value(), 0.001) == 0.6666 assert nlp_metric.wil.histogram.get_max_value() == 0.75 assert pytest.approx(nlp_metric.wil.histogram.get_quantiles([0.25, 0.5, 0.75]), 0.001) == [0.666666, 0.75, 0.75] assert nlp_metric.wil.count == 3 nlp_message = nlp_metric.to_protobuf() nlp_metric_new = NLPMetrics.from_protobuf(nlp_message) assert nlp_metric_new.wil.histogram.get_min_value() == nlp_metric.wil.histogram.get_min_value() assert nlp_metric_new.wil.histogram.get_max_value() == nlp_metric.wil.histogram.get_max_value() assert nlp_metric_new.wil.histogram.get_quantiles([0.25, 0.5, 0.75]) == nlp_metric.wil.histogram.get_quantiles([0.25, 0.5, 0.75]) assert nlp_metric_new.wil.count == nlp_metric.wil.count assert nlp_metric_new.mer.histogram.get_min_value() == nlp_metric.mer.histogram.get_min_value() assert nlp_metric_new.mer.histogram.get_max_value() == nlp_metric.mer.histogram.get_max_value() assert nlp_metric_new.mer.histogram.get_quantiles([0.25, 0.5, 0.75]) == nlp_metric.mer.histogram.get_quantiles([0.25, 0.5, 0.75]) assert nlp_metric_new.mer.count == nlp_metric.mer.count assert nlp_metric_new.wer.histogram.get_min_value() == nlp_metric.wer.histogram.get_min_value() assert nlp_metric_new.wer.histogram.get_max_value() == nlp_metric.wer.histogram.get_max_value() assert nlp_metric_new.wer.histogram.get_quantiles([0.25, 0.5, 0.75]) == nlp_metric.wer.histogram.get_quantiles([0.25, 0.5, 0.75]) assert nlp_metric_new.wer.count == nlp_metric.wer.count def test_nlp_merge(): nlp_metric = NLPMetrics() nlp_metric.update("hello", "hello") nlp_initial_test = None nlp_merge_metrics = nlp_metric.merge(nlp_initial_test) assert nlp_merge_metrics.mer.histogram.get_min_value() == 0.0 assert nlp_merge_metrics.mer.histogram.get_max_value() == 0.0 assert nlp_merge_metrics.mer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.0, 0.0, 0.0] assert nlp_merge_metrics.mer.count == 1 assert nlp_merge_metrics.wer.histogram.get_min_value() == 0.0 assert nlp_merge_metrics.wer.histogram.get_max_value() == 0.0 assert nlp_merge_metrics.wer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.0, 0.0, 0.0] assert nlp_merge_metrics.wer.count == 1 assert nlp_merge_metrics.wil.histogram.get_min_value() == 0.0 assert nlp_merge_metrics.wil.histogram.get_max_value() == 0.0 assert nlp_merge_metrics.wil.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.0, 0.0, 0.0] assert nlp_merge_metrics.wil.count == 1 nlp_metric_2 = NLPMetrics() nlp_metric_2.update(["hello brother"], ["hello sister"]) nlp_metric_2.update(["bye brother"], ["bye sister"]) nlp_metric_2.update(["what up brother"], ["what up ii sister"]) nlp_merge_metrics = nlp_merge_metrics.merge(nlp_metric_2) assert nlp_merge_metrics.mer.histogram.get_min_value() == 0.0 assert nlp_merge_metrics.mer.histogram.get_max_value() == 0.5 assert nlp_merge_metrics.mer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.5, 0.5, 0.5000] assert nlp_merge_metrics.mer.count == 4 assert nlp_merge_metrics.wer.histogram.get_min_value() == 0.0 assert nlp_merge_metrics.wer.histogram.get_max_value() == 0.5 assert nlp_merge_metrics.wer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.5, 0.5, 0.5000] assert nlp_merge_metrics.wer.count == 4 assert pytest.approx(nlp_merge_metrics.wil.histogram.get_min_value(), 0.001) == 0.0 assert nlp_merge_metrics.wil.histogram.get_max_value() == 0.75 assert pytest.approx(nlp_merge_metrics.wil.histogram.get_quantiles([0.25, 0.5, 0.75]), 0.001) == [0.666666, 0.75, 0.75] assert nlp_merge_metrics.wil.count == 4
[ "whylogs.core.metrics.nlp_metrics.NLPMetrics", "whylogs.core.metrics.nlp_metrics.NLPMetrics.from_protobuf" ]
[((158, 170), 'whylogs.core.metrics.nlp_metrics.NLPMetrics', 'NLPMetrics', ([], {}), '()\n', (168, 170), False, 'from whylogs.core.metrics.nlp_metrics import NLPMetrics\n'), ((1069, 1081), 'whylogs.core.metrics.nlp_metrics.NLPMetrics', 'NLPMetrics', ([], {}), '()\n', (1079, 1081), False, 'from whylogs.core.metrics.nlp_metrics import NLPMetrics\n'), ((2195, 2232), 'whylogs.core.metrics.nlp_metrics.NLPMetrics.from_protobuf', 'NLPMetrics.from_protobuf', (['nlp_message'], {}), '(nlp_message)\n', (2219, 2232), False, 'from whylogs.core.metrics.nlp_metrics import NLPMetrics\n'), ((3459, 3471), 'whylogs.core.metrics.nlp_metrics.NLPMetrics', 'NLPMetrics', ([], {}), '()\n', (3469, 3471), False, 'from whylogs.core.metrics.nlp_metrics import NLPMetrics\n'), ((4435, 4447), 'whylogs.core.metrics.nlp_metrics.NLPMetrics', 'NLPMetrics', ([], {}), '()\n', (4445, 4447), False, 'from whylogs.core.metrics.nlp_metrics import NLPMetrics\n')]
# https://leetcode.com/problems/ransom-note/ from collections import Counter class Solution(object): def canConstruct(self, ransomNote, magazine): """ :type ransomNote: str :type magazine: str :rtype: bool """ available = Counter(magazine) for c in ransomNote: if available[c] <= 0: return False available[c] -= 1 return True
[ "collections.Counter" ]
[((278, 295), 'collections.Counter', 'Counter', (['magazine'], {}), '(magazine)\n', (285, 295), False, 'from collections import Counter\n')]
# InteractionSubmission: import tensorflow as tf import numpy as np from waymo_open_dataset.protos import motion_submission_pb2 def eval_and_generate_submission(model, eval_dataset, num_modes = 6): submission = motion_submission_pb2.MotionChallengeSubmission() submission.submission_type = 2 submission.affiliation = '' submission.description = '' submission.method_link = '' current_scenario_id = "" for i, batch in enumerate(eval_dataset): if i % 100 == 0: print(i) trajectories, confidences = model.predict_step(batch) # (B, K**2, 2, 16, 2), (B, K**2) trajectories = trajectories.numpy() confidences = confidences.numpy() for example_index in range(len(trajectories)): example_scenario_id = batch['scenario_id'][example_index, 0].numpy() if example_scenario_id != current_scenario_id: current_scenario_id = example_scenario_id current_scenario_prediction = submission.scenario_predictions.add() current_scenario_prediction.scenario_id = example_scenario_id # prediction is of type SingleObjectPrediction for mode in range(num_modes): joint_trajectory = current_scenario_prediction.joint_prediction.joint_trajectories.add() joint_trajectory.confidence = confidences[example_index, mode] for j in batch['indices'][example_index]: object_trajectory = joint_trajectory.trajectories.add() object_trajectory.object_id = batch['object_id'][example_index, j].numpy().astype(np.int32) center_x = [] center_y = [] for t in range(16): x = trajectories[example_index, mode, j, t, 0] y = trajectories[example_index, mode, j, t, 1] center_x.append(x) center_y.append(y) trajectory = object_trajectory.trajectory trajectory.center_x.extend(center_x) trajectory.center_y.extend(center_y) return submission def write_submission_to_file(submission, output_filename): f = open(output_filename, "wb") f.write(submission.SerializeToString()) f.close()
[ "waymo_open_dataset.protos.motion_submission_pb2.MotionChallengeSubmission" ]
[((214, 263), 'waymo_open_dataset.protos.motion_submission_pb2.MotionChallengeSubmission', 'motion_submission_pb2.MotionChallengeSubmission', ([], {}), '()\n', (261, 263), False, 'from waymo_open_dataset.protos import motion_submission_pb2\n')]
from django.conf import settings from rest_framework.permissions import IsAdminUser from rest_framework import status, viewsets, decorators from quser.permissions import CURDPermissionsOrReadOnly from rest_framework.response import Response from . import models, serializers from .filters import FileFilter class TagViewSet(viewsets.ModelViewSet): queryset = models.Tag.objects.all() serializer_class = serializers.TagSerializer permission_classes = (IsAdminUser,) class FileViewSet(viewsets.ModelViewSet): queryset = models.File.objects.filter(active=True).order_by("-id") serializer_class = serializers.FileSerializer permission_classes = (CURDPermissionsOrReadOnly,) filterset_class = FileFilter def perform_destroy(self, instance): instance.active = False instance.save() @decorators.action(methods=['delete'], detail=False, serializer_class=serializers.BulkDestroySerializer, permission_classes=(IsAdminUser,)) def bulk_destroy(self, request, *args, **kwargs): serializer = self.serializer_class(data=request.data, context=dict(request=request)) serializer.is_valid(raise_exception=True) serializer.save() return Response(status=status.HTTP_204_NO_CONTENT) @decorators.action(methods=['post',], detail=False, serializer_class=serializers.BulkUploadSerializer, permission_classes=(IsAdminUser,)) def bulk_upload(self, request, *args, **kwargs): serializer = self.serializer_class(data=request.data, context=dict(request=request)) serializer.is_valid(raise_exception=True) res = serializer.save() return Response(res, status=status.HTTP_201_CREATED)
[ "rest_framework.response.Response", "rest_framework.decorators.action" ]
[((836, 979), 'rest_framework.decorators.action', 'decorators.action', ([], {'methods': "['delete']", 'detail': '(False)', 'serializer_class': 'serializers.BulkDestroySerializer', 'permission_classes': '(IsAdminUser,)'}), "(methods=['delete'], detail=False, serializer_class=\n serializers.BulkDestroySerializer, permission_classes=(IsAdminUser,))\n", (853, 979), False, 'from rest_framework import status, viewsets, decorators\n'), ((1286, 1426), 'rest_framework.decorators.action', 'decorators.action', ([], {'methods': "['post']", 'detail': '(False)', 'serializer_class': 'serializers.BulkUploadSerializer', 'permission_classes': '(IsAdminUser,)'}), "(methods=['post'], detail=False, serializer_class=\n serializers.BulkUploadSerializer, permission_classes=(IsAdminUser,))\n", (1303, 1426), False, 'from rest_framework import status, viewsets, decorators\n'), ((1236, 1279), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_204_NO_CONTENT'}), '(status=status.HTTP_204_NO_CONTENT)\n', (1244, 1279), False, 'from rest_framework.response import Response\n'), ((1689, 1734), 'rest_framework.response.Response', 'Response', (['res'], {'status': 'status.HTTP_201_CREATED'}), '(res, status=status.HTTP_201_CREATED)\n', (1697, 1734), False, 'from rest_framework.response import Response\n')]
""" GitFeederController tests """ from django.test import TestCase from django.contrib.auth.models import User from app.logic.gitfeeder.models.FeedModel import FeedEntry from app.logic.gitrepo.models.GitProjectModel import GitProjectEntry from app.logic.gitrepo.models.GitUserModel import GitUserEntry from app.logic.bluesteelworker.models.WorkerModel import WorkerEntry from app.logic.commandrepo.models.CommandGroupModel import CommandGroupEntry class FeedModelTestCase(TestCase): def setUp(self): self.user1 = User.objects.create_user('<EMAIL>', '<EMAIL>', '<PASSWORD>') self.worker1 = WorkerEntry.objects.create( name='worker-name-1', uuid='uuid-worker-1', operative_system='osx', description='long-description-1', user=self.user1, git_feeder=False ) self.git_project1 = GitProjectEntry.objects.create(url='http://test/') self.git_user1 = GitUserEntry.objects.create( project=self.git_project1, name='user1', email='<EMAIL>' ) def tearDown(self): pass def test_feed_is_removed_if_git_project_is_deleted(self): command_group = CommandGroupEntry.objects.create(user=self.user1) FeedEntry.objects.create(command_group=command_group, worker=self.worker1, git_project=self.git_project1) self.assertEqual(1, FeedEntry.objects.all().count()) self.git_project1.delete() self.assertEqual(0, FeedEntry.objects.all().count()) def test_feed_deletion_also_deletes_command_group(self): command_group = CommandGroupEntry.objects.create(user=self.user1) feed_entry = FeedEntry.objects.create(command_group=command_group, worker=self.worker1, git_project=self.git_project1) self.assertEqual(1, FeedEntry.objects.all().count()) self.assertEqual(1, CommandGroupEntry.objects.all().count()) feed_entry.delete() self.assertEqual(0, FeedEntry.objects.all().count()) self.assertEqual(0, CommandGroupEntry.objects.all().count())
[ "app.logic.gitfeeder.models.FeedModel.FeedEntry.objects.all", "app.logic.gitrepo.models.GitUserModel.GitUserEntry.objects.create", "app.logic.commandrepo.models.CommandGroupModel.CommandGroupEntry.objects.create", "app.logic.commandrepo.models.CommandGroupModel.CommandGroupEntry.objects.all", "app.logic.git...
[((528, 588), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['"""<EMAIL>"""', '"""<EMAIL>"""', '"""<PASSWORD>"""'], {}), "('<EMAIL>', '<EMAIL>', '<PASSWORD>')\n", (552, 588), False, 'from django.contrib.auth.models import User\n'), ((613, 785), 'app.logic.bluesteelworker.models.WorkerModel.WorkerEntry.objects.create', 'WorkerEntry.objects.create', ([], {'name': '"""worker-name-1"""', 'uuid': '"""uuid-worker-1"""', 'operative_system': '"""osx"""', 'description': '"""long-description-1"""', 'user': 'self.user1', 'git_feeder': '(False)'}), "(name='worker-name-1', uuid='uuid-worker-1',\n operative_system='osx', description='long-description-1', user=self.\n user1, git_feeder=False)\n", (639, 785), False, 'from app.logic.bluesteelworker.models.WorkerModel import WorkerEntry\n'), ((888, 938), 'app.logic.gitrepo.models.GitProjectModel.GitProjectEntry.objects.create', 'GitProjectEntry.objects.create', ([], {'url': '"""http://test/"""'}), "(url='http://test/')\n", (918, 938), False, 'from app.logic.gitrepo.models.GitProjectModel import GitProjectEntry\n'), ((964, 1054), 'app.logic.gitrepo.models.GitUserModel.GitUserEntry.objects.create', 'GitUserEntry.objects.create', ([], {'project': 'self.git_project1', 'name': '"""user1"""', 'email': '"""<EMAIL>"""'}), "(project=self.git_project1, name='user1', email=\n '<EMAIL>')\n", (991, 1054), False, 'from app.logic.gitrepo.models.GitUserModel import GitUserEntry\n'), ((1221, 1270), 'app.logic.commandrepo.models.CommandGroupModel.CommandGroupEntry.objects.create', 'CommandGroupEntry.objects.create', ([], {'user': 'self.user1'}), '(user=self.user1)\n', (1253, 1270), False, 'from app.logic.commandrepo.models.CommandGroupModel import CommandGroupEntry\n'), ((1280, 1389), 'app.logic.gitfeeder.models.FeedModel.FeedEntry.objects.create', 'FeedEntry.objects.create', ([], {'command_group': 'command_group', 'worker': 'self.worker1', 'git_project': 'self.git_project1'}), '(command_group=command_group, worker=self.worker1,\n git_project=self.git_project1)\n', (1304, 1389), False, 'from app.logic.gitfeeder.models.FeedModel import FeedEntry\n'), ((1632, 1681), 'app.logic.commandrepo.models.CommandGroupModel.CommandGroupEntry.objects.create', 'CommandGroupEntry.objects.create', ([], {'user': 'self.user1'}), '(user=self.user1)\n', (1664, 1681), False, 'from app.logic.commandrepo.models.CommandGroupModel import CommandGroupEntry\n'), ((1703, 1812), 'app.logic.gitfeeder.models.FeedModel.FeedEntry.objects.create', 'FeedEntry.objects.create', ([], {'command_group': 'command_group', 'worker': 'self.worker1', 'git_project': 'self.git_project1'}), '(command_group=command_group, worker=self.worker1,\n git_project=self.git_project1)\n', (1727, 1812), False, 'from app.logic.gitfeeder.models.FeedModel import FeedEntry\n'), ((1415, 1438), 'app.logic.gitfeeder.models.FeedModel.FeedEntry.objects.all', 'FeedEntry.objects.all', ([], {}), '()\n', (1436, 1438), False, 'from app.logic.gitfeeder.models.FeedModel import FeedEntry\n'), ((1513, 1536), 'app.logic.gitfeeder.models.FeedModel.FeedEntry.objects.all', 'FeedEntry.objects.all', ([], {}), '()\n', (1534, 1536), False, 'from app.logic.gitfeeder.models.FeedModel import FeedEntry\n'), ((1838, 1861), 'app.logic.gitfeeder.models.FeedModel.FeedEntry.objects.all', 'FeedEntry.objects.all', ([], {}), '()\n', (1859, 1861), False, 'from app.logic.gitfeeder.models.FeedModel import FeedEntry\n'), ((1899, 1930), 'app.logic.commandrepo.models.CommandGroupModel.CommandGroupEntry.objects.all', 'CommandGroupEntry.objects.all', ([], {}), '()\n', (1928, 1930), False, 'from app.logic.commandrepo.models.CommandGroupModel import CommandGroupEntry\n'), ((1998, 2021), 'app.logic.gitfeeder.models.FeedModel.FeedEntry.objects.all', 'FeedEntry.objects.all', ([], {}), '()\n', (2019, 2021), False, 'from app.logic.gitfeeder.models.FeedModel import FeedEntry\n'), ((2059, 2090), 'app.logic.commandrepo.models.CommandGroupModel.CommandGroupEntry.objects.all', 'CommandGroupEntry.objects.all', ([], {}), '()\n', (2088, 2090), False, 'from app.logic.commandrepo.models.CommandGroupModel import CommandGroupEntry\n')]
from django.conf.urls import patterns, url from irods_browser_app import views urlpatterns = patterns('', url(r'^login/$',views.login, name='irods_login'), url(r'^store/$',views.store, name='irods_store'), url(r'^upload/$',views.upload, name='irods_upload'), url(r'^upload_add/$',views.upload_add, name='irods_upload_add'), )
[ "django.conf.urls.url" ]
[((111, 159), 'django.conf.urls.url', 'url', (['"""^login/$"""', 'views.login'], {'name': '"""irods_login"""'}), "('^login/$', views.login, name='irods_login')\n", (114, 159), False, 'from django.conf.urls import patterns, url\n'), ((165, 213), 'django.conf.urls.url', 'url', (['"""^store/$"""', 'views.store'], {'name': '"""irods_store"""'}), "('^store/$', views.store, name='irods_store')\n", (168, 213), False, 'from django.conf.urls import patterns, url\n'), ((219, 270), 'django.conf.urls.url', 'url', (['"""^upload/$"""', 'views.upload'], {'name': '"""irods_upload"""'}), "('^upload/$', views.upload, name='irods_upload')\n", (222, 270), False, 'from django.conf.urls import patterns, url\n'), ((276, 339), 'django.conf.urls.url', 'url', (['"""^upload_add/$"""', 'views.upload_add'], {'name': '"""irods_upload_add"""'}), "('^upload_add/$', views.upload_add, name='irods_upload_add')\n", (279, 339), False, 'from django.conf.urls import patterns, url\n')]
#!/usr/bin/python """ Script to initialise CROP database. """ import sys from crop.constants import SQL_CONNECTION_STRING, SQL_DBNAME from crop.db import create_database def confirm(question): """ Ask user to enter Y or N (case-insensitive). :return: True if the answer is Y. """ answer = "" while answer not in ["y", "n"]: answer = input("{0} [Y/N]? ".format(question)).lower() return answer == "y" if __name__ == "__main__": if confirm("Create DB?"): create_database(SQL_CONNECTION_STRING, SQL_DBNAME) print("Finished.")
[ "crop.db.create_database" ]
[((518, 568), 'crop.db.create_database', 'create_database', (['SQL_CONNECTION_STRING', 'SQL_DBNAME'], {}), '(SQL_CONNECTION_STRING, SQL_DBNAME)\n', (533, 568), False, 'from crop.db import create_database\n')]
import rules # Projectroles dependency from projectroles import rules as pr_rules # To access common predicates # Predicates ------------------------------------------------------------- # TODO: If we need to assign new predicates, we do it here # Rules ------------------------------------------------------------------ # TODO: Rules should not be needed, use permissions for user rights # Permissions ------------------------------------------------------------ # Allow viewing the sample sheet of the project rules.add_perm( 'samplesheets.view_sheet', pr_rules.is_project_owner | pr_rules.is_project_delegate | pr_rules.is_project_contributor | pr_rules.is_project_guest, ) # Allow creating, importing or modifying the project's sample sheet rules.add_perm( 'samplesheets.edit_sheet', pr_rules.is_project_owner | pr_rules.is_project_delegate | pr_rules.is_project_contributor, ) # Allow managing sample sheet configuration and editing rules.add_perm( 'samplesheets.manage_sheet', pr_rules.is_project_owner | pr_rules.is_project_delegate, ) # Allow exporting a sample sheet from project rules.add_perm( 'samplesheets.export_sheet', pr_rules.is_project_owner | pr_rules.is_project_delegate | pr_rules.is_project_contributor | pr_rules.is_project_guest, ) # Allow viewing sample sheet versions rules.add_perm( 'samplesheets.view_versions', pr_rules.is_project_owner | pr_rules.is_project_delegate | pr_rules.is_project_contributor | pr_rules.is_project_guest, ) # Allow creating collection structure in iRODS rules.add_perm( 'samplesheets.create_colls', pr_rules.is_project_owner | pr_rules.is_project_delegate | pr_rules.is_project_contributor, ) # Allow deleting the project sample sheet rules.add_perm( 'samplesheets.delete_sheet', pr_rules.is_project_owner | pr_rules.is_project_delegate | pr_rules.is_project_contributor, )
[ "rules.add_perm" ]
[((526, 695), 'rules.add_perm', 'rules.add_perm', (['"""samplesheets.view_sheet"""', '(pr_rules.is_project_owner | pr_rules.is_project_delegate | pr_rules.\n is_project_contributor | pr_rules.is_project_guest)'], {}), "('samplesheets.view_sheet', pr_rules.is_project_owner |\n pr_rules.is_project_delegate | pr_rules.is_project_contributor |\n pr_rules.is_project_guest)\n", (540, 695), False, 'import rules\n'), ((780, 917), 'rules.add_perm', 'rules.add_perm', (['"""samplesheets.edit_sheet"""', '(pr_rules.is_project_owner | pr_rules.is_project_delegate | pr_rules.\n is_project_contributor)'], {}), "('samplesheets.edit_sheet', pr_rules.is_project_owner |\n pr_rules.is_project_delegate | pr_rules.is_project_contributor)\n", (794, 917), False, 'import rules\n'), ((990, 1095), 'rules.add_perm', 'rules.add_perm', (['"""samplesheets.manage_sheet"""', '(pr_rules.is_project_owner | pr_rules.is_project_delegate)'], {}), "('samplesheets.manage_sheet', pr_rules.is_project_owner |\n pr_rules.is_project_delegate)\n", (1004, 1095), False, 'import rules\n'), ((1150, 1321), 'rules.add_perm', 'rules.add_perm', (['"""samplesheets.export_sheet"""', '(pr_rules.is_project_owner | pr_rules.is_project_delegate | pr_rules.\n is_project_contributor | pr_rules.is_project_guest)'], {}), "('samplesheets.export_sheet', pr_rules.is_project_owner |\n pr_rules.is_project_delegate | pr_rules.is_project_contributor |\n pr_rules.is_project_guest)\n", (1164, 1321), False, 'import rules\n'), ((1376, 1548), 'rules.add_perm', 'rules.add_perm', (['"""samplesheets.view_versions"""', '(pr_rules.is_project_owner | pr_rules.is_project_delegate | pr_rules.\n is_project_contributor | pr_rules.is_project_guest)'], {}), "('samplesheets.view_versions', pr_rules.is_project_owner |\n pr_rules.is_project_delegate | pr_rules.is_project_contributor |\n pr_rules.is_project_guest)\n", (1390, 1548), False, 'import rules\n'), ((1612, 1751), 'rules.add_perm', 'rules.add_perm', (['"""samplesheets.create_colls"""', '(pr_rules.is_project_owner | pr_rules.is_project_delegate | pr_rules.\n is_project_contributor)'], {}), "('samplesheets.create_colls', pr_rules.is_project_owner |\n pr_rules.is_project_delegate | pr_rules.is_project_contributor)\n", (1626, 1751), False, 'import rules\n'), ((1810, 1949), 'rules.add_perm', 'rules.add_perm', (['"""samplesheets.delete_sheet"""', '(pr_rules.is_project_owner | pr_rules.is_project_delegate | pr_rules.\n is_project_contributor)'], {}), "('samplesheets.delete_sheet', pr_rules.is_project_owner |\n pr_rules.is_project_delegate | pr_rules.is_project_contributor)\n", (1824, 1949), False, 'import rules\n')]
from django.conf.urls.defaults import patterns, url, include from pycash.controllers import TaxController as controller urlpatterns = patterns('', (r'^upcomingList$', controller.upcomingList), (r'^upcoming$', controller.upcoming), url(r'^pay$', controller.pay, name="tax_pay"), (r'^list$', controller.list), url(r'^save$', controller.save_or_update, name="tax_save"), (r'^update$', controller.save_or_update), url(r'^delete$', controller.delete, name="tax_delete"), (r'^$', controller.index) )
[ "django.conf.urls.defaults.url" ]
[((244, 288), 'django.conf.urls.defaults.url', 'url', (['"""^pay$"""', 'controller.pay'], {'name': '"""tax_pay"""'}), "('^pay$', controller.pay, name='tax_pay')\n", (247, 288), False, 'from django.conf.urls.defaults import patterns, url, include\n'), ((329, 386), 'django.conf.urls.defaults.url', 'url', (['"""^save$"""', 'controller.save_or_update'], {'name': '"""tax_save"""'}), "('^save$', controller.save_or_update, name='tax_save')\n", (332, 386), False, 'from django.conf.urls.defaults import patterns, url, include\n'), ((439, 492), 'django.conf.urls.defaults.url', 'url', (['"""^delete$"""', 'controller.delete'], {'name': '"""tax_delete"""'}), "('^delete$', controller.delete, name='tax_delete')\n", (442, 492), False, 'from django.conf.urls.defaults import patterns, url, include\n')]
from abc import abstractmethod from functools import wraps from dd.api.workflow.utils import normalize_columns class CallableBuilder(object): # TODO : remove reference to dataset # replace with lighter reference to dataset.output_table def __init__(self, context, dataset=None): self.context = context self.dataset = dataset @abstractmethod def build(self): raise NotImplementedError("Callable Builder is an Abstract Class") class TransformCallableBuilder(CallableBuilder): def __init__(self, context, dataset=None, transformation=None, function_args=None, function_kwargs=None, write_options=None): super(TransformCallableBuilder, self).__init__(context, dataset) self.write_options = write_options or {} self.transformation = transformation self.function_kwargs = function_kwargs or {} self.function_args = function_args or [] def bind_to(self, dataset): return TransformCallableBuilder(self.context, dataset, transformation=self.transformation, function_args=self.function_args, function_kwargs=self.function_kwargs, write_options=self.write_options) def build(self): @wraps(self.transformation) def run(): function_args = [p.collect() for p in self.dataset.predecessors if hasattr(p, 'collect')] function_args += self.function_args new_df = self.transformation(*function_args, **self.function_kwargs) if type(new_df) is tuple: if type(self.dataset.output_table) is not tuple and len(self.dataset.output_table) != len(new_df): raise ValueError("Invalid argument output_table: output table must be specified when function" " returns multiple dataframes") gen_write_options = [] if type(self.write_options) is list and len(new_df) != len(self.write_options): raise ValueError("Invalid argument write_options: if a list of write_options is specified, this" " list must be of the same size than the number of dataframes") elif type(self.write_options) is not list: for i in range(len(self.dataset.output_table)): gen_write_options.append(self.write_options) for i in range(len(self.dataset.output_table)): df = new_df[i] name = self.dataset.output_table[i] opts = gen_write_options[i] self.context._save_dataframe(df, name, **opts) else: self.context._save_dataframe(new_df, self.dataset.output_table, **self.write_options) return new_df return run def with_transformation(self, transformation): return TransformCallableBuilder(self.context, self.dataset, transformation=transformation, function_args=self.function_args, function_kwargs=self.function_kwargs, write_options=self.write_options) def with_kwargs(self, **kwargs): return TransformCallableBuilder(self.context, self.dataset, transformation=self.transformation, function_args=self.function_args, function_kwargs=kwargs, write_options=self.write_options) def with_write_options(self, **write_options): return TransformCallableBuilder(self.context, self.dataset, transformation=self.transformation, function_args=self.function_args, function_kwargs=self.function_kwargs, write_options=write_options) def with_args(self, *args): return TransformCallableBuilder(self.context, self.dataset, transformation=self.transformation, function_args=args, function_kwargs=self.function_kwargs, write_options=self.write_options) class LoadFileCallableBuilder(CallableBuilder): def __init__(self, context, dataset=None, path=None, reader=None, write_options=None, normalization=False): super(LoadFileCallableBuilder, self).__init__(context, dataset) self.path = path self.reader = reader self.write_options = write_options or {} self.normalization = normalization def bind_to(self, dataset): return LoadFileCallableBuilder(self.context, dataset, path=self.path, reader=self.reader, write_options=self.write_options, normalization=self.normalization) def build(self): def run(): df = self.reader.read(self.path) if self.normalization: df.columns = normalize_columns(df.columns) self.context._save_dataframe(df, self.dataset.output_table, **self.write_options) return df return run def load(self): raise NotImplementedError def with_path(self, path): return LoadFileCallableBuilder(self.context, self.dataset, path=path, reader=self.reader, write_options=self.write_options, normalization=self.normalization) def with_reader(self, reader): return LoadFileCallableBuilder(self.context, self.dataset, path=self.path, reader=reader, write_options=self.write_options, normalization=self.normalization) def with_write_options(self, **write_options): return LoadFileCallableBuilder(self.context, self.dataset, path=self.path, reader=self.reader, write_options=write_options, normalization=self.normalization) def with_normalization(self, boolean): return LoadFileCallableBuilder(self.context, self.dataset, path=self.path, reader=self.reader, write_options=self.write_options, normalization=boolean) class ModelFitCallableBuilder(CallableBuilder): def __init__(self, context, model=None, model_address=None, dataset=None, target=None, columns=None, write_options=None): super(ModelFitCallableBuilder, self).__init__(context, dataset) self.model = model self.model_address = model_address self.target = target self._columns = columns self.write_options = write_options or {} def bind_to(self, dataset): return ModelFitCallableBuilder(context=self.context, model=self.model, model_address=self.model_address, dataset=dataset, target=self.target, columns=self._columns, write_options=self.write_options) def build(self): def run(): train_set = self.dataset.collect() model = self.model.collect() model_address = self.model.model_address columns = train_set.columns if self._columns is None \ else self._columns if self.target is not None: X = train_set[columns].drop(self.target, axis=1) y = train_set[self.target] model.fit(X, y) else: X = train_set[columns] model.fit(X) self.context._save_model(model, model_address, **self.write_options) return model return run def with_columns(self, columns): return ModelFitCallableBuilder(context=self.context, model=self.model, model_address=self.model_address, dataset=self.dataset, target=self.target, columns=columns, write_options=self.write_options) def with_model(self, model): return ModelFitCallableBuilder(context=self.context, model=model, model_address=self.model_address, dataset=self.dataset, target=self.target, columns=self._columns, write_options=self.write_options) def with_target(self, target): return ModelFitCallableBuilder(context=self.context, model=self.model, model_address=self.model_address, dataset=self.dataset, target=target, columns=self._columns, write_options=self.write_options) def with_write_options(self, **write_options): return ModelFitCallableBuilder(context=self.context, model=self.model, model_address=self.model_address, dataset=self.dataset, target=self.target, columns=self._columns, write_options=write_options) class QueryCallableBuilder(CallableBuilder): def __init__(self, context, query=None, dataset=None, create_table=False, if_exists='replace'): super(QueryCallableBuilder, self).__init__(context) self.query = query self.dataset = dataset self.create_table = create_table self.if_exists = if_exists def bind_to(self, dataset): return QueryCallableBuilder(context=self.context, query=self.query, dataset=dataset, create_table=self.create_table, if_exists=self.if_exists) def build(self): def query(): if not self.create_table: dataframe = self.context._query(self.query) self.context._save_dataframe(dataframe, self.dataset.output_table, index=False, if_exists=self.if_exists) return dataframe else: query = "" if self.if_exists == 'replace': query = "DROP TABLE IF EXISTS {output_table};".format( output_table=self.dataset.output_table) query += "CREATE TABLE {output_table} AS {query};".format( output_table=self.dataset.output_table, query=self.query) self.context._execute(query) return query def with_query(self, query): return QueryCallableBuilder(context=self.context, query=query, dataset=self.dataset, create_table=self.create_table, if_exists=self.if_exists) class ActionCallableBuilder(CallableBuilder): def __init__(self, context, dataset=None, operation=None, operation_args=None, operation_kwargs=None): super(ActionCallableBuilder, self).__init__(context, dataset) self.operation = operation self.operation_args = operation_args or [] self.operation_kwargs = operation_kwargs or {} def bind_to(self, dataset): return ActionCallableBuilder(context=self.context, dataset=dataset, operation=self.operation, operation_args=self.operation_args, operation_kwargs=self.operation_kwargs) def build(self): @wraps(self.operation) def run(): operation_args = [p.collect() for p in self.dataset.predecessors if hasattr(p, 'collect')] operation_args += self.operation_args self.operation(*operation_args, **self.operation_kwargs) return run def with_operation(self, operation): return ActionCallableBuilder(context=self.context, dataset=self.dataset, operation=operation, operation_args=self.operation_args, operation_kwargs=self.operation_kwargs) def with_kwargs(self, **kwargs): return ActionCallableBuilder(context=self.context, dataset=self.dataset, operation=self.operation, operation_args=self.operation_args, operation_kwargs=kwargs) def with_args(self, *args): return ActionCallableBuilder(context=self.context, dataset=self.dataset, operation=self.operation, operation_args=args, operation_kwargs=self.operation_kwargs) class LoadDataFrameCallableBuilder(CallableBuilder): def __init__(self, context, dataset=None, dataframe=None, write_options=None): super(LoadDataFrameCallableBuilder, self).__init__(context, dataset) self.dataframe = dataframe self.write_options = write_options or {} def build(self): def run(): self.context._save_dataframe(self.dataframe, self.dataset.output_table, **self.write_options) return self.dataframe return run def with_dataframe(self, dataframe): return LoadDataFrameCallableBuilder(context=self.context, dataset=self.dataset, dataframe=dataframe, write_options=self.write_options) def with_write_options(self, **write_options): return LoadDataFrameCallableBuilder(context=self.context, dataset=self.dataset, dataframe=self.dataframe, write_options=write_options) def bind_to(self, dataset): return LoadDataFrameCallableBuilder(context=self.context, dataset=dataset, dataframe=self.dataframe, write_options=self.write_options) operators_map = {"transform": TransformCallableBuilder, "load": LoadFileCallableBuilder, "model_fit": ModelFitCallableBuilder, "query": QueryCallableBuilder}
[ "dd.api.workflow.utils.normalize_columns", "functools.wraps" ]
[((1409, 1435), 'functools.wraps', 'wraps', (['self.transformation'], {}), '(self.transformation)\n', (1414, 1435), False, 'from functools import wraps\n'), ((14081, 14102), 'functools.wraps', 'wraps', (['self.operation'], {}), '(self.operation)\n', (14086, 14102), False, 'from functools import wraps\n'), ((5911, 5940), 'dd.api.workflow.utils.normalize_columns', 'normalize_columns', (['df.columns'], {}), '(df.columns)\n', (5928, 5940), False, 'from dd.api.workflow.utils import normalize_columns\n')]
# Generated by Django 2.1.15 on 2021-05-21 08:24 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Company', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='title')), ], ), migrations.CreateModel( name='Person', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('first_name', models.CharField(max_length=255, verbose_name='title')), ('last_name', models.CharField(max_length=255, verbose_name='title')), ], ), migrations.CreateModel( name='Staff', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('position', models.CharField(max_length=255, verbose_name='position')), ('order', models.IntegerField(default=0, editable=False, verbose_name='order')), ('company', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='staff', to='sortables.Company')), ('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='companies', to='sortables.Person')), ], ), ]
[ "django.db.models.ForeignKey", "django.db.models.AutoField", "django.db.models.CharField", "django.db.models.IntegerField" ]
[((337, 430), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (353, 430), False, 'from django.db import migrations, models\n'), ((455, 509), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""title"""'}), "(max_length=255, verbose_name='title')\n", (471, 509), False, 'from django.db import migrations, models\n'), ((641, 734), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (657, 734), False, 'from django.db import migrations, models\n'), ((764, 818), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""title"""'}), "(max_length=255, verbose_name='title')\n", (780, 818), False, 'from django.db import migrations, models\n'), ((851, 905), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""title"""'}), "(max_length=255, verbose_name='title')\n", (867, 905), False, 'from django.db import migrations, models\n'), ((1036, 1129), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1052, 1129), False, 'from django.db import migrations, models\n'), ((1157, 1214), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""position"""'}), "(max_length=255, verbose_name='position')\n", (1173, 1214), False, 'from django.db import migrations, models\n'), ((1243, 1311), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'editable': '(False)', 'verbose_name': '"""order"""'}), "(default=0, editable=False, verbose_name='order')\n", (1262, 1311), False, 'from django.db import migrations, models\n'), ((1342, 1455), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""staff"""', 'to': '"""sortables.Company"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='staff', to='sortables.Company')\n", (1359, 1455), False, 'from django.db import migrations, models\n'), ((1480, 1596), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""companies"""', 'to': '"""sortables.Person"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='companies', to='sortables.Person')\n", (1497, 1596), False, 'from django.db import migrations, models\n')]
r""" Echelle Spectrum ---------------- An abstract base class for a high resolution spectrum, for some echelle order :math:`m \in ` out of :math:`M` total orders, each with vectors for wavelength, flux, and uncertainty, e.g. :math:`F_m(\lambda)`. This class is a subclass of specutils' Spectrum1D and is intended to have its methods inherited by specific instrument classes. EchelleSpectrum ############### """ import warnings import logging import numpy as np import astropy import pandas as pd from astropy.io import fits from astropy import units as u from astropy.units import Quantity from astropy.wcs import WCS, FITSFixedWarning from astropy.nddata import StdDevUncertainty from scipy.stats import median_abs_deviation from scipy.interpolate import InterpolatedUnivariateSpline from specutils.analysis import equivalent_width from scipy.interpolate import UnivariateSpline, interp1d from scipy.signal import savgol_filter from astropy.constants import R_jup, R_sun, G, M_jup, R_earth, c from astropy.modeling.physical_models import BlackBody import specutils from muler.utilities import apply_numpy_mask, resample_list # from barycorrpy import get_BC_vel from astropy.coordinates import SkyCoord, EarthLocation from astropy.time import Time from scipy.optimize import minimize import matplotlib.pyplot as plt import os import copy from specutils.spectra.spectral_region import SpectralRegion from specutils.analysis import equivalent_width log = logging.getLogger(__name__) from astropy.io.fits.verify import VerifyWarning warnings.simplefilter("ignore", category=VerifyWarning) # See Issue: https://github.com/astropy/specutils/issues/779 warnings.filterwarnings( "ignore", category=astropy.utils.exceptions.AstropyDeprecationWarning ) warnings.filterwarnings("ignore", category=FITSFixedWarning) # See Issue: https://github.com/astropy/specutils/issues/800 warnings.filterwarnings("ignore", category=RuntimeWarning) with warnings.catch_warnings(): warnings.filterwarnings("ignore") from specutils import Spectrum1D from specutils import SpectrumList class EchelleSpectrum(Spectrum1D): r""" An abstract base class to provide common methods that will be inherited by instrument-specific classes """ def __init__(self, *args, **kwargs): self.ancillary_spectra = None super().__init__(*args, **kwargs) @property def snr(self): """The Signal-to-Noise Ratio :math:`\frac{S}{N}`, the flux divided by the uncertainty The spectrum should have an input uncertainty, otherwise returns NaNs """ if self.uncertainty is not None: if self.uncertainty.uncertainty_type == "std": snr_estimate = self.flux / self.uncertainty.quantity elif self.uncertainty.uncertainty_type == "ivar": snr_estimate = self.flux * np.sqrt(self.uncertainty.quantity) else: message = "SNR only supports standard deviation and inverse variance uncertainty" raise NotImplementedError(message) else: snr_estimate = np.repeat(np.NaN, len(self.flux)) * u.dimensionless_unscaled return snr_estimate @property def available_ancillary_spectra(self): """The list of available ancillary spectra""" output = [] if hasattr(self, "ancillary_spectra"): if self.ancillary_spectra is not None: output = [ ancillary_spectrum for ancillary_spectrum in self.ancillary_spectra if ancillary_spectrum in self.meta.keys() ] return output def estimate_barycorr(self): """Estimate the Barycentric Correction from the Date and Target Coordinates Returns ------- barycentric_corrections : float Barycentric correction for targets in units of m/s """ obstime = self.astropy_time loc = EarthLocation.of_site(self.site_name) sc = SkyCoord(ra=self.RA, dec=self.DEC) barycorr = sc.radial_velocity_correction(obstime=obstime, location=loc) return barycorr def measure_ew(self, lower=None, upper=None): """Measure the equivalent width of a given spectrum Parameters ---------- lower : AstroPy Quantity or float The short wavelength limit at which to define the EW lower bound. If the value is a float, it assume Angstrom units. upper : AstroPy Quantity or float The long wavelength limit at which to define the EW upper bound. If the value is a float, it assume Angstrom units. Returns ------- equivalent width : (scalar) """ if type(lower) is not u.Quantity: # Assume it's Angstroms lower = lower * u.Angstrom upper = upper * u.Angstrom ew = equivalent_width(self, regions=SpectralRegion(lower, upper)) return ew def normalize(self): """Normalize spectrum by its median value Returns ------- normalized_spec : (KeckNIRSPECSpectrum) Normalized Spectrum """ spec = self._copy( spectral_axis=self.wavelength.value * self.wavelength.unit, wcs=None ) median_flux = np.nanmedian(spec.flux.value) # Each ancillary spectrum (e.g. sky) should also be normalized meta_out = copy.deepcopy(spec.meta) for ancillary_spectrum in self.available_ancillary_spectra: meta_out[ancillary_spectrum] = meta_out[ancillary_spectrum].divide( median_flux * spec.flux.unit, handle_meta="ff" ) # spec.meta = meta_out return spec.divide( median_flux * spec.flux.unit, handle_meta="first_found" )._copy(meta=meta_out) def flatten_by_black_body(self, Teff): """Flatten the spectrum by a scaled black body, usually after deblazing Note: This method applies mostly to high-bandwidth stellar spectra. Parameters ---------- Teff : float The effective temperature of the black body in Kelvin units """ blackbody = BlackBody(temperature=Teff * u.K)(self.wavelength) blackbody = blackbody / np.mean(blackbody) wl_scaled = self.wavelength wl_scaled = wl_scaled / np.median(wl_scaled) try: return self.divide(blackbody / wl_scaled ** 2, handle_meta="first_found") except u.UnitConversionError: return self.divide( blackbody / wl_scaled ** 2 * self.unit, handle_meta="first_found" ) def flatten( self, window_length=101, polyorder=2, return_trend=False, break_tolerance=5, niters=3, sigma=3, mask=None, **kwargs, ): """Removes the low frequency trend using scipy's Savitzky-Golay filter. This method wraps `scipy.signal.savgol_filter`. Abridged from the `lightkurve` method with the same name for flux time series. Parameters ---------- window_length : int The length of the filter window (i.e. the number of coefficients). ``window_length`` must be a positive odd integer. polyorder : int The order of the polynomial used to fit the samples. ``polyorder`` must be less than window_length. return_trend : bool If `True`, the method will return a tuple of two elements (flattened_spec, trend_spec) where trend_spec is the removed trend. break_tolerance : int If there are large gaps in wavelength, flatten will split the flux into several sub-spectra and apply `savgol_filter` to each individually. A gap is defined as a region in wavelength larger than `break_tolerance` times the median gap. To disable this feature, set `break_tolerance` to None. niters : int Number of iterations to iteratively sigma clip and flatten. If more than one, will perform the flatten several times, removing outliers each time. sigma : int Number of sigma above which to remove outliers from the flatten mask : boolean array with length of self.wavelength Boolean array to mask data with before flattening. Flux values where mask is True will not be used to flatten the data. An interpolated result will be provided for these points. Use this mask to remove data you want to preserve, e.g. spectral regions of interest. **kwargs : dict Dictionary of arguments to be passed to `scipy.signal.savgol_filter`. Returns ------- flatten_spec : `EchelleSpectrum` New light curve object with long-term trends removed. If ``return_trend`` is set to ``True``, this method will also return: trend_spec : `EchelleSpectrum` New light curve object containing the trend that was removed. """ if mask is None: mask = np.ones(len(self.wavelength), dtype=bool) else: # Deep copy ensures we don't change the original. mask = copy.deepcopy(~mask) # No NaNs mask &= np.isfinite(self.flux) # No outliers mask &= np.nan_to_num(np.abs(self.flux - np.nanmedian(self.flux))) <= ( np.nanstd(self.flux) * sigma ) for iter in np.arange(0, niters): if break_tolerance is None: break_tolerance = np.nan if polyorder >= window_length: polyorder = window_length - 1 log.warning( "polyorder must be smaller than window_length, " "using polyorder={}.".format(polyorder) ) # Split the lightcurve into segments by finding large gaps in time dlam = self.wavelength.value[mask][1:] - self.wavelength.value[mask][0:-1] with warnings.catch_warnings(): # Ignore warnings due to NaNs warnings.simplefilter("ignore", RuntimeWarning) cut = np.where(dlam > break_tolerance * np.nanmedian(dlam))[0] + 1 low = np.append([0], cut) high = np.append(cut, len(self.wavelength[mask])) # Then, apply the savgol_filter to each segment separately trend_signal = Quantity( np.zeros(len(self.wavelength[mask])), unit=self.flux.unit ) for l, h in zip(low, high): # Reduce `window_length` and `polyorder` for short segments; # this prevents `savgol_filter` from raising an exception # If the segment is too short, just take the median if np.any([window_length > (h - l), (h - l) < break_tolerance]): trend_signal[l:h] = np.nanmedian(self.flux[mask][l:h]) else: # Scipy outputs a warning here that is not useful, will be fixed in version 1.2 with warnings.catch_warnings(): warnings.simplefilter("ignore", FutureWarning) trsig = savgol_filter( x=self.flux.value[mask][l:h], window_length=window_length, polyorder=polyorder, **kwargs, ) trend_signal[l:h] = Quantity(trsig, trend_signal.unit) # Ignore outliers; # Note that it's possible numerical noise can cause outliers... # If this happens you can add `1e-14` below to avoid detecting # outliers which are merely caused by numerical noise. mask1 = np.nan_to_num(np.abs(self.flux[mask] - trend_signal)) < ( np.nanstd(self.flux[mask] - trend_signal) * sigma # + Quantity(1e-14, self.flux.unit) ) f = interp1d( self.wavelength.value[mask][mask1], trend_signal[mask1], fill_value="extrapolate", ) trend_signal = Quantity(f(self.wavelength.value), self.flux.unit) mask[mask] &= mask1 flatten_spec = copy.deepcopy(self) trend_spec = self._copy(flux=trend_signal) with warnings.catch_warnings(): # ignore invalid division warnings warnings.simplefilter("ignore", RuntimeWarning) flatten_spec = flatten_spec.divide(trend_spec, handle_meta="ff") if return_trend: return flatten_spec, trend_spec else: return flatten_spec def deblaze(self, method="spline"): """Remove blaze function from spectrum by interpolating a spline function Note: It is recommended to remove NaNs before running this operation, otherwise edge effects can be appear from zero-padded edges. Returns ------- blaze corrrected spectrum """ if method == "spline": if np.any(np.isnan(self.flux)): log.warning( "your spectrum contains NaNs, " "it is highly recommended to run `.remove_nans()` before deblazing" ) spline = UnivariateSpline(self.wavelength, np.nan_to_num(self.flux), k=5) interp_spline = spline(self.wavelength) * self.flux.unit no_blaze = self.divide(interp_spline, handle_meta="first_found") if "sky" in self.meta.keys(): new_sky = self.sky.divide(interp_spline, handle_meta="first_found") no_blaze.meta["sky"] = new_sky return no_blaze else: raise NotImplementedError def barycentric_correct(self): """shift spectrum by barycenter velocity Returns ------- barycenter corrected Spectrum : (KeckNIRSPECSpectrum) """ bcRV = +1.0 * self.estimate_barycorr() return self.rv_shift(bcRV) def rv_shift(self, velocity): """ Shift velocity of spectrum in astropy units (or km/s if input velocity is just a float) """ if ( type(velocity) == float ): # If supplied velocity is not using astropy units, default to km/s velocity = velocity * (u.km / u.s) try: self.radial_velocity = velocity return self._copy( spectral_axis=self.wavelength.value * self.wavelength.unit, wcs=None, ) except: log.error( "rv shift requires specutils version >= 1.2, you have: {}".format( specutils.__version__ ) ) raise def remove_nans(self): """Remove data points that have NaN fluxes By default the method removes NaN's from target, sky, and lfc fibers. Returns ------- finite_spec : (KeckNIRSPECSpectrum) Spectrum with NaNs removed """ keep_indices = (self.mask == False) & (self.flux == self.flux) return self.apply_boolean_mask(keep_indices) def smooth_spectrum( self, return_model=False, optimize_kernel=False, bandwidth=150.0 ): """Smooth the spectrum using Gaussian Process regression Parameters ------- return_model : (bool) Whether or not to return the gp model, which takes a wavelength axis as input and outputs the smooth trend optimize_kernel : (bool) Whether to optimize the GP hyperparameters: correlation scale and amplitude bandwidth : (float) The smoothing bandwidth in Angstroms. Defaults to 150 Angstrom lengthscale. Returns ------- smoothed_spec : (EchelleSpectrum) Smooth version of input Spectrum """ try: from celerite2 import terms import celerite2 except ImportError: raise ImportError( "You need to install celerite2 to use the smoothing='celerite' method." ) if self.uncertainty is not None: unc = self.uncertainty.array else: unc = np.repeat(np.nanmedian(self.flux.value) / 100.0, len(self.flux)) # TODO: change rho to depend on the bandwidth kernel = terms.SHOTerm(sigma=0.01, rho=bandwidth, Q=0.25) gp = celerite2.GaussianProcess(kernel, mean=0.0) gp.compute(self.wavelength, yerr=unc) if optimize_kernel: # Construct the GP model with celerite def set_params(params, gp): gp.mean = params[0] theta = np.exp(params[1:]) gp.kernel = terms.SHOTerm(sigma=theta[0], rho=theta[1], Q=0.5) gp.compute(self.wavelength.value, yerr=unc + theta[2], quiet=True) return gp def neg_log_like(params, gp): gp = set_params(params, gp) return -gp.log_likelihood(self.flux.value) initial_params = [np.log(1), np.log(0.001), np.log(5.0), np.log(0.01)] soln = minimize(neg_log_like, initial_params, method="L-BFGS-B", args=(gp,)) opt_gp = set_params(soln.x, gp) else: opt_gp = gp mean_model = opt_gp.predict(self.flux.value, t=self.wavelength.value) smoothed_spectrum = self.__class__( spectral_axis=self.wavelength.value * self.wavelength.unit, flux=mean_model * self.flux.unit, uncertainty=None, mask=np.zeros_like(mean_model, dtype=np.bool), meta=copy.deepcopy(self.meta), wcs=None, ) if return_model: gp_model = lambda wl: opt_gp.predict(self.flux.value, t=wl) return (smoothed_spectrum, gp_model) else: return smoothed_spectrum def plot(self, ax=None, ylo=0.6, yhi=1.2, figsize=(10, 4), **kwargs): """Plot a quick look of the spectrum" Parameters ---------- ax : `~matplotlib.axes.Axes` A matplotlib axes object to plot into. If no axes is provided, a new one will be generated. ylo : scalar Lower limit of the y axis yhi : scalar Upper limit of the y axis figsize : tuple The figure size for the plot label : str The legend label to for plt.legend() Returns ------- ax : (`~matplotlib.axes.Axes`) The axis to display and/or modify """ if ax is None: fig, ax = plt.subplots(1, figsize=figsize) ax.set_ylim(ylo, yhi) ax.set_xlabel("$\lambda \;(\AA)$") ax.set_ylabel("Flux") if hasattr(self, "spectrographname"): ax.set_title(self.spectrographname + " Spectrum") ax.step(self.wavelength, self.flux, **kwargs, where="mid") else: ax.step(self.wavelength, self.flux, **kwargs, where="mid") return ax def remove_outliers(self, threshold=5): """Remove outliers above threshold Parameters ---------- threshold : float The sigma-clipping threshold (in units of sigma) Returns ------- clean_spec : (KeckNIRSPECSpectrum) Cleaned version of input Spectrum """ residual = self.flux - self.smooth_spectrum().flux mad = median_abs_deviation(residual.value, nan_policy="omit") keep_indices = (np.abs(residual.value) < (threshold * mad)) == True return self.apply_boolean_mask(keep_indices) def trim_edges(self, limits=None): """Trim the order edges, which falloff in SNR This method applies limits on absolute x pixel values, regardless of the order of previous destructive operations, which may not be the intended behavior in some applications. Parameters ---------- limits : tuple The index bounds (lo, hi) for trimming the order Returns ------- trimmed_spec : (EchelleSpectrum) Trimmed version of input Spectrum """ if limits is None: limits = self.noisy_edges lo, hi = limits if self.meta is not None: if "x_values" in self.meta.keys(): x_values = self.meta["x_values"] else: log.warn( "The spectrum metadata is missing its native pixel location labels. " "Proceeding by assuming contiguous pixel labels, which may not be what you want." ) x_values = np.arange(len(self.wavelength)) keep_indices = (x_values > lo) & (x_values < hi) return self.apply_boolean_mask(keep_indices) def estimate_uncertainty(self): """Estimate the uncertainty based on residual after smoothing Returns ------- uncertainty : (np.float) Typical uncertainty """ residual = self.flux - self.smooth_spectrum().flux return median_abs_deviation(residual.value) def to_HDF5(self, path, file_basename): """Export to spectral order to HDF5 file format This format is required for per-order Starfish input Parameters ---------- path : str The directory destination for the HDF5 file file_basename : str The basename of the file to which the order number and extension are appended. Typically source name that matches a database entry. """ try: import h5py except ImportError: raise ImportError("You need to install h5py to export to the HDF5 format.") grating_order = self.meta["m"] out_path = path + "/" + file_basename + "_m{:03d}.hdf5".format(grating_order) # The mask should be ones everywhere mask_out = np.ones(len(self.wavelength), dtype=int) f_new = h5py.File(out_path, "w") f_new.create_dataset("fls", data=self.flux.value) f_new.create_dataset("wls", data=self.wavelength.to(u.Angstrom).value) f_new.create_dataset("sigmas", data=self.uncertainty.array) f_new.create_dataset("masks", data=mask_out) f_new.close() def resample_list(self, specList, **kwargs): """ Resample a single EchelleSpectrum object into a EchelleSpectrumList object. Useful for converting models into echelle spectra with multiple orders. """ return resample_list(self, specList, **kwargs) def apply_boolean_mask(self, mask): """Apply a boolean mask to the spectrum and any available ancillary spectra Parameters ---------- mask: boolean mask, typically a numpy array The boolean mask with numpy-style masking: True means "keep" that index and False means discard that index """ spec = apply_numpy_mask(self, mask) for ancillary_spectrum in self.available_ancillary_spectra: spec.meta[ancillary_spectrum] = apply_numpy_mask( spec.meta[ancillary_spectrum], mask ) return spec class EchelleSpectrumList(SpectrumList): r""" An enhanced container for a list of Echelle spectral orders """ def __init__(self, *args, **kwargs): self.normalization_order_index = 0 super().__init__(*args, **kwargs) def normalize(self, order_index=0): """Normalize all orders to one of the other orders""" index = self.normalization_order_index median_flux = copy.deepcopy(np.nanmedian(self[index].flux)) for i in range(len(self)): self[i] = self[i].divide(median_flux, handle_meta="first_found") return self def remove_nans(self): """Remove all the NaNs""" # TODO: is this in-place overriding of self allowed? # May have unintended consequences? # Consider making a copy instead... for i in range(len(self)): self[i] = self[i].remove_nans() return self def remove_outliers(self, threshold=5): """Remove all the outliers Parameters ---------- threshold : float The sigma-clipping threshold (in units of sigma) """ for i in range(len(self)): self[i] = self[i].remove_outliers(threshold=threshold) return self def trim_edges(self, limits=None): """Trim all the edges""" for i in range(len(self)): self[i] = self[i].trim_edges(limits) return self def deblaze(self, method="spline"): """Remove blaze function from all orders by interpolating a spline function Note: It is recommended to remove NaNs before running this operation, otherwise effects can be appear from zero-padded edges. """ spec_out = copy.deepcopy(self) for i in range(len(self)): spec_out[i] = self[i].deblaze(method=method) return spec_out def flatten_by_black_body(self, Teff): """Flatten by black body""" spec_out = copy.deepcopy(self) index = self.normalization_order_index median_wl = copy.deepcopy(np.nanmedian(self[index].wavelength)) blackbody_func = BlackBody(temperature=Teff * u.K) blackbody_ref = blackbody_func(median_wl) for i in range(len(spec_out)): blackbody = ( blackbody_func(spec_out[i].wavelength) / blackbody_ref / (spec_out[i].wavelength / median_wl) ** 2 ) try: spec_out[i] = spec_out[i].divide(blackbody, handle_meta="first_found") except u.UnitConversionError: spec_out[i] = spec_out[i].divide( blackbody * self.unit, handle_meta="first_found" ) return spec_out def to_HDF5(self, path, file_basename): """Save all spectral orders to the HDF5 file format""" for i in range(len(self)): self[i].to_HDF5(path, file_basename) def stitch(self): """Stitch all the spectra together, assuming zero overlap in wavelength.""" spec = copy.deepcopy(self) wls = ( np.hstack([spec[i].wavelength.value for i in range(len(spec))]) * spec[0].wavelength.unit ) fluxes = ( np.hstack([spec[i].flux.value for i in range(len(spec))]) * spec[0].flux.unit ) if spec[0].uncertainty is not None: # HACK We assume if one order has it, they all do, and that it's StdDev unc = np.hstack([spec[i].uncertainty.array for i in range(len(self))]) unc_out = StdDevUncertainty(unc) else: unc_out = None # Stack the x_values: x_values = np.hstack([spec[i].meta["x_values"] for i in range(len(spec))]) meta_out = copy.deepcopy(spec[0].meta) meta_out["x_values"] = x_values for ancillary_spectrum in spec[0].available_ancillary_spectra: if spec[0].meta[ancillary_spectrum].meta is not None: meta_of_meta = spec[0].meta[ancillary_spectrum].meta x_values = np.hstack( [ spec[i].meta[ancillary_spectrum].meta["x_values"] for i in range(len(spec)) ] ) meta_of_meta["x_values"] = x_values else: meta_of_meta = None wls_anc = np.hstack( [spec[i].meta[ancillary_spectrum].wavelength for i in range(len(spec))] ) fluxes_anc = np.hstack( [spec[i].meta[ancillary_spectrum].flux for i in range(len(spec))] ) meta_out[ancillary_spectrum] = spec[0].__class__( spectral_axis=wls_anc, flux=fluxes_anc, meta=meta_of_meta ) return spec[0].__class__( spectral_axis=wls, flux=fluxes, uncertainty=unc_out, meta=meta_out, wcs=None ) def plot(self, **kwargs): """Plot the entire spectrum list""" if not "ax" in kwargs: ax = self[0].plot(figsize=(25, 4), **kwargs) for i in range(1, len(self)): self[i].plot(ax=ax, **kwargs) return ax else: for i in range(1, len(self)): self[i].plot(**kwargs) def __add__(self, other): """Bandmath addition""" spec_out = copy.deepcopy(self) for i in range(len(self)): spec_out[i] = self[i] + other[i] return spec_out def __sub__(self, other): """Bandmath subtraction""" spec_out = copy.deepcopy(self) for i in range(len(self)): spec_out[i] = self[i] - other[i] return spec_out def __mul__(self, other): """Bandmath multiplication""" spec_out = copy.deepcopy(self) for i in range(len(self)): spec_out[i] = self[i] * other[i] return spec_out def __truediv__(self, other): """Bandmath division""" spec_out = copy.deepcopy(self) for i in range(len(self)): spec_out[i] = self[i] / other[i] return spec_out def rv_shift(self, velocity): """ Shift velocity of spectrum in km s^-1 """ spec_out = copy.deepcopy(self) for i in range(len(self)): spec_out[i] = self[i].rv_shift(velocity) return spec_out
[ "logging.getLogger", "numpy.sqrt", "celerite2.GaussianProcess", "numpy.log", "scipy.signal.savgol_filter", "scipy.interpolate.interp1d", "numpy.isfinite", "specutils.spectra.spectral_region.SpectralRegion", "copy.deepcopy", "numpy.arange", "numpy.mean", "numpy.exp", "astropy.nddata.StdDevUnc...
[((1464, 1491), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1481, 1491), False, 'import logging\n'), ((1543, 1598), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {'category': 'VerifyWarning'}), "('ignore', category=VerifyWarning)\n", (1564, 1598), False, 'import warnings\n'), ((1662, 1761), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'astropy.utils.exceptions.AstropyDeprecationWarning'}), "('ignore', category=astropy.utils.exceptions.\n AstropyDeprecationWarning)\n", (1685, 1761), False, 'import warnings\n'), ((1763, 1823), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'FITSFixedWarning'}), "('ignore', category=FITSFixedWarning)\n", (1786, 1823), False, 'import warnings\n'), ((1885, 1943), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'RuntimeWarning'}), "('ignore', category=RuntimeWarning)\n", (1908, 1943), False, 'import warnings\n'), ((1950, 1975), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (1973, 1975), False, 'import warnings\n'), ((1981, 2014), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (2004, 2014), False, 'import warnings\n'), ((3986, 4023), 'astropy.coordinates.EarthLocation.of_site', 'EarthLocation.of_site', (['self.site_name'], {}), '(self.site_name)\n', (4007, 4023), False, 'from astropy.coordinates import SkyCoord, EarthLocation\n'), ((4037, 4071), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'ra': 'self.RA', 'dec': 'self.DEC'}), '(ra=self.RA, dec=self.DEC)\n', (4045, 4071), False, 'from astropy.coordinates import SkyCoord, EarthLocation\n'), ((5361, 5390), 'numpy.nanmedian', 'np.nanmedian', (['spec.flux.value'], {}), '(spec.flux.value)\n', (5373, 5390), True, 'import numpy as np\n'), ((5482, 5506), 'copy.deepcopy', 'copy.deepcopy', (['spec.meta'], {}), '(spec.meta)\n', (5495, 5506), False, 'import copy\n'), ((9409, 9431), 'numpy.isfinite', 'np.isfinite', (['self.flux'], {}), '(self.flux)\n', (9420, 9431), True, 'import numpy as np\n'), ((9605, 9625), 'numpy.arange', 'np.arange', (['(0)', 'niters'], {}), '(0, niters)\n', (9614, 9625), True, 'import numpy as np\n'), ((12451, 12470), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (12464, 12470), False, 'import copy\n'), ((16645, 16693), 'celerite2.terms.SHOTerm', 'terms.SHOTerm', ([], {'sigma': '(0.01)', 'rho': 'bandwidth', 'Q': '(0.25)'}), '(sigma=0.01, rho=bandwidth, Q=0.25)\n', (16658, 16693), False, 'from celerite2 import terms\n'), ((16707, 16750), 'celerite2.GaussianProcess', 'celerite2.GaussianProcess', (['kernel'], {'mean': '(0.0)'}), '(kernel, mean=0.0)\n', (16732, 16750), False, 'import celerite2\n'), ((19790, 19845), 'scipy.stats.median_abs_deviation', 'median_abs_deviation', (['residual.value'], {'nan_policy': '"""omit"""'}), "(residual.value, nan_policy='omit')\n", (19810, 19845), False, 'from scipy.stats import median_abs_deviation\n'), ((21461, 21497), 'scipy.stats.median_abs_deviation', 'median_abs_deviation', (['residual.value'], {}), '(residual.value)\n', (21481, 21497), False, 'from scipy.stats import median_abs_deviation\n'), ((22372, 22396), 'h5py.File', 'h5py.File', (['out_path', '"""w"""'], {}), "(out_path, 'w')\n", (22381, 22396), False, 'import h5py\n'), ((22930, 22969), 'muler.utilities.resample_list', 'resample_list', (['self', 'specList'], {}), '(self, specList, **kwargs)\n', (22943, 22969), False, 'from muler.utilities import apply_numpy_mask, resample_list\n'), ((23345, 23373), 'muler.utilities.apply_numpy_mask', 'apply_numpy_mask', (['self', 'mask'], {}), '(self, mask)\n', (23361, 23373), False, 'from muler.utilities import apply_numpy_mask, resample_list\n'), ((25332, 25351), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (25345, 25351), False, 'import copy\n'), ((25567, 25586), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (25580, 25586), False, 'import copy\n'), ((25732, 25765), 'astropy.modeling.physical_models.BlackBody', 'BlackBody', ([], {'temperature': '(Teff * u.K)'}), '(temperature=Teff * u.K)\n', (25741, 25765), False, 'from astropy.modeling.physical_models import BlackBody\n'), ((26665, 26684), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (26678, 26684), False, 'import copy\n'), ((27387, 27414), 'copy.deepcopy', 'copy.deepcopy', (['spec[0].meta'], {}), '(spec[0].meta)\n', (27400, 27414), False, 'import copy\n'), ((28993, 29012), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (29006, 29012), False, 'import copy\n'), ((29202, 29221), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (29215, 29221), False, 'import copy\n'), ((29414, 29433), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (29427, 29433), False, 'import copy\n'), ((29624, 29643), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (29637, 29643), False, 'import copy\n'), ((29872, 29891), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (29885, 29891), False, 'import copy\n'), ((6255, 6288), 'astropy.modeling.physical_models.BlackBody', 'BlackBody', ([], {'temperature': '(Teff * u.K)'}), '(temperature=Teff * u.K)\n', (6264, 6288), False, 'from astropy.modeling.physical_models import BlackBody\n'), ((6338, 6356), 'numpy.mean', 'np.mean', (['blackbody'], {}), '(blackbody)\n', (6345, 6356), True, 'import numpy as np\n'), ((6425, 6445), 'numpy.median', 'np.median', (['wl_scaled'], {}), '(wl_scaled)\n', (6434, 6445), True, 'import numpy as np\n'), ((9354, 9374), 'copy.deepcopy', 'copy.deepcopy', (['(~mask)'], {}), '(~mask)\n', (9367, 9374), False, 'import copy\n'), ((10379, 10398), 'numpy.append', 'np.append', (['[0]', 'cut'], {}), '([0], cut)\n', (10388, 10398), True, 'import numpy as np\n'), ((12162, 12257), 'scipy.interpolate.interp1d', 'interp1d', (['self.wavelength.value[mask][mask1]', 'trend_signal[mask1]'], {'fill_value': '"""extrapolate"""'}), "(self.wavelength.value[mask][mask1], trend_signal[mask1],\n fill_value='extrapolate')\n", (12170, 12257), False, 'from scipy.interpolate import UnivariateSpline, interp1d\n'), ((12535, 12560), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (12558, 12560), False, 'import warnings\n'), ((12621, 12668), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""', 'RuntimeWarning'], {}), "('ignore', RuntimeWarning)\n", (12642, 12668), False, 'import warnings\n'), ((17433, 17502), 'scipy.optimize.minimize', 'minimize', (['neg_log_like', 'initial_params'], {'method': '"""L-BFGS-B"""', 'args': '(gp,)'}), "(neg_log_like, initial_params, method='L-BFGS-B', args=(gp,))\n", (17441, 17502), False, 'from scipy.optimize import minimize\n'), ((18929, 18961), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)'], {'figsize': 'figsize'}), '(1, figsize=figsize)\n', (18941, 18961), True, 'import matplotlib.pyplot as plt\n'), ((23487, 23540), 'muler.utilities.apply_numpy_mask', 'apply_numpy_mask', (['spec.meta[ancillary_spectrum]', 'mask'], {}), '(spec.meta[ancillary_spectrum], mask)\n', (23503, 23540), False, 'from muler.utilities import apply_numpy_mask, resample_list\n'), ((24029, 24059), 'numpy.nanmedian', 'np.nanmedian', (['self[index].flux'], {}), '(self[index].flux)\n', (24041, 24059), True, 'import numpy as np\n'), ((25668, 25704), 'numpy.nanmedian', 'np.nanmedian', (['self[index].wavelength'], {}), '(self[index].wavelength)\n', (25680, 25704), True, 'import numpy as np\n'), ((27189, 27211), 'astropy.nddata.StdDevUncertainty', 'StdDevUncertainty', (['unc'], {}), '(unc)\n', (27206, 27211), False, 'from astropy.nddata import StdDevUncertainty\n'), ((4972, 5000), 'specutils.spectra.spectral_region.SpectralRegion', 'SpectralRegion', (['lower', 'upper'], {}), '(lower, upper)\n', (4986, 5000), False, 'from specutils.spectra.spectral_region import SpectralRegion\n'), ((9546, 9566), 'numpy.nanstd', 'np.nanstd', (['self.flux'], {}), '(self.flux)\n', (9555, 9566), True, 'import numpy as np\n'), ((10156, 10181), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (10179, 10181), False, 'import warnings\n'), ((10230, 10277), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""', 'RuntimeWarning'], {}), "('ignore', RuntimeWarning)\n", (10251, 10277), False, 'import warnings\n'), ((10935, 10991), 'numpy.any', 'np.any', (['[window_length > h - l, h - l < break_tolerance]'], {}), '([window_length > h - l, h - l < break_tolerance])\n', (10941, 10991), True, 'import numpy as np\n'), ((13281, 13300), 'numpy.isnan', 'np.isnan', (['self.flux'], {}), '(self.flux)\n', (13289, 13300), True, 'import numpy as np\n'), ((13546, 13570), 'numpy.nan_to_num', 'np.nan_to_num', (['self.flux'], {}), '(self.flux)\n', (13559, 13570), True, 'import numpy as np\n'), ((16977, 16995), 'numpy.exp', 'np.exp', (['params[1:]'], {}), '(params[1:])\n', (16983, 16995), True, 'import numpy as np\n'), ((17024, 17074), 'celerite2.terms.SHOTerm', 'terms.SHOTerm', ([], {'sigma': 'theta[0]', 'rho': 'theta[1]', 'Q': '(0.5)'}), '(sigma=theta[0], rho=theta[1], Q=0.5)\n', (17037, 17074), False, 'from celerite2 import terms\n'), ((17361, 17370), 'numpy.log', 'np.log', (['(1)'], {}), '(1)\n', (17367, 17370), True, 'import numpy as np\n'), ((17372, 17385), 'numpy.log', 'np.log', (['(0.001)'], {}), '(0.001)\n', (17378, 17385), True, 'import numpy as np\n'), ((17387, 17398), 'numpy.log', 'np.log', (['(5.0)'], {}), '(5.0)\n', (17393, 17398), True, 'import numpy as np\n'), ((17400, 17412), 'numpy.log', 'np.log', (['(0.01)'], {}), '(0.01)\n', (17406, 17412), True, 'import numpy as np\n'), ((17874, 17914), 'numpy.zeros_like', 'np.zeros_like', (['mean_model'], {'dtype': 'np.bool'}), '(mean_model, dtype=np.bool)\n', (17887, 17914), True, 'import numpy as np\n'), ((17933, 17957), 'copy.deepcopy', 'copy.deepcopy', (['self.meta'], {}), '(self.meta)\n', (17946, 17957), False, 'import copy\n'), ((19870, 19892), 'numpy.abs', 'np.abs', (['residual.value'], {}), '(residual.value)\n', (19876, 19892), True, 'import numpy as np\n'), ((11037, 11071), 'numpy.nanmedian', 'np.nanmedian', (['self.flux[mask][l:h]'], {}), '(self.flux[mask][l:h])\n', (11049, 11071), True, 'import numpy as np\n'), ((11954, 11992), 'numpy.abs', 'np.abs', (['(self.flux[mask] - trend_signal)'], {}), '(self.flux[mask] - trend_signal)\n', (11960, 11992), True, 'import numpy as np\n'), ((12014, 12055), 'numpy.nanstd', 'np.nanstd', (['(self.flux[mask] - trend_signal)'], {}), '(self.flux[mask] - trend_signal)\n', (12023, 12055), True, 'import numpy as np\n'), ((16518, 16547), 'numpy.nanmedian', 'np.nanmedian', (['self.flux.value'], {}), '(self.flux.value)\n', (16530, 16547), True, 'import numpy as np\n'), ((2869, 2903), 'numpy.sqrt', 'np.sqrt', (['self.uncertainty.quantity'], {}), '(self.uncertainty.quantity)\n', (2876, 2903), True, 'import numpy as np\n'), ((9503, 9526), 'numpy.nanmedian', 'np.nanmedian', (['self.flux'], {}), '(self.flux)\n', (9515, 9526), True, 'import numpy as np\n'), ((11219, 11244), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (11242, 11244), False, 'import warnings\n'), ((11270, 11316), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""', 'FutureWarning'], {}), "('ignore', FutureWarning)\n", (11291, 11316), False, 'import warnings\n'), ((11349, 11456), 'scipy.signal.savgol_filter', 'savgol_filter', ([], {'x': 'self.flux.value[mask][l:h]', 'window_length': 'window_length', 'polyorder': 'polyorder'}), '(x=self.flux.value[mask][l:h], window_length=window_length,\n polyorder=polyorder, **kwargs)\n', (11362, 11456), False, 'from scipy.signal import savgol_filter\n'), ((11636, 11670), 'astropy.units.Quantity', 'Quantity', (['trsig', 'trend_signal.unit'], {}), '(trsig, trend_signal.unit)\n', (11644, 11670), False, 'from astropy.units import Quantity\n'), ((10334, 10352), 'numpy.nanmedian', 'np.nanmedian', (['dlam'], {}), '(dlam)\n', (10346, 10352), True, 'import numpy as np\n')]
import nltk import string import requests import json from io import StringIO from html.parser import HTMLParser import os import time from sys import platform path = "" if platform == "linux" or platform == "linux2": path = os.path.dirname(os.path.realpath(__file__)).replace("test","data/nltk") elif platform == "darwin": path = os.path.dirname(os.path.realpath(__file__)).replace("test","data/nltk") elif platform == "win32": path = os.path.dirname(os.path.realpath(__file__)).replace("test","data\\nltk") nltk.data.path.append(path) def nltk_download(name,find): try: print(nltk.data.find(find)) except LookupError: nltk.download(name, download_dir = path) def main(): nltk_download('punkt','tokenizers/punkt') nltk_download('wordnet','corpora/wordnet') nltk_download('omw-1.4','corpora/omw-1.4') print("Ready") if __name__ == "__main__": main()
[ "os.path.realpath", "nltk.data.find", "nltk.data.path.append", "nltk.download" ]
[((541, 568), 'nltk.data.path.append', 'nltk.data.path.append', (['path'], {}), '(path)\n', (562, 568), False, 'import nltk\n'), ((627, 647), 'nltk.data.find', 'nltk.data.find', (['find'], {}), '(find)\n', (641, 647), False, 'import nltk\n'), ((683, 721), 'nltk.download', 'nltk.download', (['name'], {'download_dir': 'path'}), '(name, download_dir=path)\n', (696, 721), False, 'import nltk\n'), ((258, 284), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (274, 284), False, 'import os\n'), ((370, 396), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (386, 396), False, 'import os\n'), ((481, 507), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (497, 507), False, 'import os\n')]
#!/usr/bin/env python # coding=utf-8 from db import Model __doc__ = "记录数据库层封装" recordModel = Model('record') #类添加函数 # join查找
[ "db.Model" ]
[((96, 111), 'db.Model', 'Model', (['"""record"""'], {}), "('record')\n", (101, 111), False, 'from db import Model\n')]
# Licensed under a 3-clause BSD style license - see LICENSE.rst """Hillas shower parametrization. TODO: ----- - Should have a separate function or option to compute 3rd order moments + asymmetry (which are not always needed) - remove alpha calculation (which is only about (0,0), and make a get alpha function that does it from an arbitrary point given a pre-computed list of parameters """ import numpy as np from astropy.units import Quantity from collections import namedtuple __all__ = [ 'MomentParameters', 'HighOrderMomentParameters', 'hillas_parameters', ] MomentParameters = namedtuple( "MomentParameters", "size,cen_x,cen_y,length,width,r,phi,psi,miss" ) """Shower moment parameters up to second order. See also -------- HighOrderMomentParameters, hillas_parameters, hillas_parameters_2 """ HighOrderMomentParameters = namedtuple( "HighOrderMomentParameters", "skewness,kurtosis,asymmetry" ) """Shower moment parameters of third order. See also -------- MomentParameters, hillas_parameters, hillas_parameters_2 """ def hillas_parameters_1(pix_x, pix_y, image): """Compute Hillas parameters for a given shower image. Reference: Appendix of the Whipple Crab paper Weekes et al. (1998) http://adsabs.harvard.edu/abs/1989ApJ...342..379W (corrected for some obvious typos) Parameters ---------- pix_x : array_like Pixel x-coordinate pix_y : array_like Pixel y-coordinate image : array_like Pixel values corresponding Returns ------- hillas_parameters : `MomentParameters` """ pix_x = Quantity(np.asanyarray(pix_x, dtype=np.float64)).value pix_y = Quantity(np.asanyarray(pix_y, dtype=np.float64)).value image = np.asanyarray(image, dtype=np.float64) assert pix_x.shape == image.shape assert pix_y.shape == image.shape # Compute image moments _s = np.sum(image) m_x = np.sum(image * pix_x) / _s m_y = np.sum(image * pix_y) / _s m_xx = np.sum(image * pix_x * pix_x) / _s # note: typo in paper m_yy = np.sum(image * pix_y * pix_y) / _s m_xy = np.sum(image * pix_x * pix_y) / _s # note: typo in paper # Compute major axis line representation y = a * x + b S_xx = m_xx - m_x * m_x S_yy = m_yy - m_y * m_y S_xy = m_xy - m_x * m_y d = S_yy - S_xx temp = d * d + 4 * S_xy * S_xy a = (d + np.sqrt(temp)) / (2 * S_xy) b = m_y - a * m_x # Compute Hillas parameters width_2 = (S_yy + a * a * S_xx - 2 * a * S_xy) / (1 + a * a) width = np.sqrt(width_2) length_2 = (S_xx + a * a * S_yy + 2 * a * S_xy) / (1 + a * a) length = np.sqrt(length_2) miss = np.abs(b / (1 + a * a)) r = np.sqrt(m_x * m_x + m_y * m_y) phi = np.arctan2(m_y, m_x) # Compute azwidth by transforming to (p, q) coordinates sin_theta = m_y / r cos_theta = m_x / r q = (m_x - pix_x) * sin_theta + (pix_y - m_y) * cos_theta m_q = np.sum(image * q) / _s m_qq = np.sum(image * q * q) / _s azwidth_2 = m_qq - m_q * m_q azwidth = np.sqrt(azwidth_2) return MomentParameters(size=_s, cen_x=m_x, cen_y=m_y, length=length, width=width, r=r, phi=phi, psi=None, miss=miss) def hillas_parameters_2(pix_x, pix_y, image): """Compute Hillas parameters for a given shower image. Alternate implementation of `hillas_parameters` ... in the end we'll just keep one, but we're using Hilllas parameter computation as an example for performance checks. Parameters ---------- pix_x : array_like Pixel x-coordinate pix_y : array_like Pixel y-coordinate image : array_like Pixel values corresponding Returns ------- hillas_parameters : `MomentParameters` """ pix_x = Quantity(np.asanyarray(pix_x, dtype=np.float64)).value pix_y = Quantity(np.asanyarray(pix_y, dtype=np.float64)).value image = np.asanyarray(image, dtype=np.float64) assert pix_x.shape == image.shape assert pix_y.shape == image.shape # Compute image moments (done in a bit faster way, but putting all # into one 2D array, where each row will be summed to calculate a # moment) However, this doesn't avoid a temporary created for the # 2D array size = image.sum() momdata = np.row_stack([pix_x, pix_y, pix_x * pix_x, pix_y * pix_y, pix_x * pix_y]) * image moms = momdata.sum(axis=1) / size # calculate variances vx2 = moms[2] - moms[0] ** 2 vy2 = moms[3] - moms[1] ** 2 vxy = moms[4] - moms[0] * moms[1] # common factors: dd = vy2 - vx2 zz = np.sqrt(dd ** 2 + 4.0 * vxy ** 2) # miss uu = 1.0 + dd / zz vv = 2.0 - uu miss = np.sqrt((uu * moms[0] ** 2 + vv * moms[1] ** 2) / 2.0 - moms[0] * moms[1] * 2.0 * vxy / zz) # shower shape parameters width = np.sqrt(vx2 + vy2 - zz) length = np.sqrt(vx2 + vy2 + zz) azwidth = np.sqrt(moms[2] + moms[3] - zz) # rotation angle of ellipse relative to centroid tanpsi_numer = (dd + zz) * moms[1] + 2.0 * vxy * moms[0] tanpsi_denom = (2 * vxy * moms[1]) - (dd - zz) * moms[0] psi = np.pi / 2.0 + np.arctan2(tanpsi_numer, tanpsi_denom) # polar coordinates of centroid rr = np.hypot(moms[0], moms[1]) phi = np.arctan2(moms[1], moms[0]) return MomentParameters(size=size, cen_x=moms[0], cen_y=moms[1], length=length, width=width, r=rr, phi=phi, psi=psi, miss=miss) # use the 2 version by default hillas_parameters = hillas_parameters_2
[ "numpy.abs", "collections.namedtuple", "numpy.sqrt", "numpy.asanyarray", "numpy.sum", "numpy.arctan2", "numpy.row_stack", "numpy.hypot" ]
[((608, 686), 'collections.namedtuple', 'namedtuple', (['"""MomentParameters"""', '"""size,cen_x,cen_y,length,width,r,phi,psi,miss"""'], {}), "('MomentParameters', 'size,cen_x,cen_y,length,width,r,phi,psi,miss')\n", (618, 686), False, 'from collections import namedtuple\n'), ((863, 933), 'collections.namedtuple', 'namedtuple', (['"""HighOrderMomentParameters"""', '"""skewness,kurtosis,asymmetry"""'], {}), "('HighOrderMomentParameters', 'skewness,kurtosis,asymmetry')\n", (873, 933), False, 'from collections import namedtuple\n'), ((1751, 1789), 'numpy.asanyarray', 'np.asanyarray', (['image'], {'dtype': 'np.float64'}), '(image, dtype=np.float64)\n', (1764, 1789), True, 'import numpy as np\n'), ((1904, 1917), 'numpy.sum', 'np.sum', (['image'], {}), '(image)\n', (1910, 1917), True, 'import numpy as np\n'), ((2548, 2564), 'numpy.sqrt', 'np.sqrt', (['width_2'], {}), '(width_2)\n', (2555, 2564), True, 'import numpy as np\n'), ((2644, 2661), 'numpy.sqrt', 'np.sqrt', (['length_2'], {}), '(length_2)\n', (2651, 2661), True, 'import numpy as np\n'), ((2673, 2696), 'numpy.abs', 'np.abs', (['(b / (1 + a * a))'], {}), '(b / (1 + a * a))\n', (2679, 2696), True, 'import numpy as np\n'), ((2705, 2735), 'numpy.sqrt', 'np.sqrt', (['(m_x * m_x + m_y * m_y)'], {}), '(m_x * m_x + m_y * m_y)\n', (2712, 2735), True, 'import numpy as np\n'), ((2746, 2766), 'numpy.arctan2', 'np.arctan2', (['m_y', 'm_x'], {}), '(m_y, m_x)\n', (2756, 2766), True, 'import numpy as np\n'), ((3056, 3074), 'numpy.sqrt', 'np.sqrt', (['azwidth_2'], {}), '(azwidth_2)\n', (3063, 3074), True, 'import numpy as np\n'), ((3925, 3963), 'numpy.asanyarray', 'np.asanyarray', (['image'], {'dtype': 'np.float64'}), '(image, dtype=np.float64)\n', (3938, 3963), True, 'import numpy as np\n'), ((4722, 4755), 'numpy.sqrt', 'np.sqrt', (['(dd ** 2 + 4.0 * vxy ** 2)'], {}), '(dd ** 2 + 4.0 * vxy ** 2)\n', (4729, 4755), True, 'import numpy as np\n'), ((4821, 4916), 'numpy.sqrt', 'np.sqrt', (['((uu * moms[0] ** 2 + vv * moms[1] ** 2) / 2.0 - moms[0] * moms[1] * 2.0 *\n vxy / zz)'], {}), '((uu * moms[0] ** 2 + vv * moms[1] ** 2) / 2.0 - moms[0] * moms[1] *\n 2.0 * vxy / zz)\n', (4828, 4916), True, 'import numpy as np\n'), ((4976, 4999), 'numpy.sqrt', 'np.sqrt', (['(vx2 + vy2 - zz)'], {}), '(vx2 + vy2 - zz)\n', (4983, 4999), True, 'import numpy as np\n'), ((5013, 5036), 'numpy.sqrt', 'np.sqrt', (['(vx2 + vy2 + zz)'], {}), '(vx2 + vy2 + zz)\n', (5020, 5036), True, 'import numpy as np\n'), ((5051, 5082), 'numpy.sqrt', 'np.sqrt', (['(moms[2] + moms[3] - zz)'], {}), '(moms[2] + moms[3] - zz)\n', (5058, 5082), True, 'import numpy as np\n'), ((5370, 5396), 'numpy.hypot', 'np.hypot', (['moms[0]', 'moms[1]'], {}), '(moms[0], moms[1])\n', (5378, 5396), True, 'import numpy as np\n'), ((5407, 5435), 'numpy.arctan2', 'np.arctan2', (['moms[1]', 'moms[0]'], {}), '(moms[1], moms[0])\n', (5417, 5435), True, 'import numpy as np\n'), ((1928, 1949), 'numpy.sum', 'np.sum', (['(image * pix_x)'], {}), '(image * pix_x)\n', (1934, 1949), True, 'import numpy as np\n'), ((1965, 1986), 'numpy.sum', 'np.sum', (['(image * pix_y)'], {}), '(image * pix_y)\n', (1971, 1986), True, 'import numpy as np\n'), ((2003, 2032), 'numpy.sum', 'np.sum', (['(image * pix_x * pix_x)'], {}), '(image * pix_x * pix_x)\n', (2009, 2032), True, 'import numpy as np\n'), ((2072, 2101), 'numpy.sum', 'np.sum', (['(image * pix_y * pix_y)'], {}), '(image * pix_y * pix_y)\n', (2078, 2101), True, 'import numpy as np\n'), ((2118, 2147), 'numpy.sum', 'np.sum', (['(image * pix_x * pix_y)'], {}), '(image * pix_x * pix_y)\n', (2124, 2147), True, 'import numpy as np\n'), ((2948, 2965), 'numpy.sum', 'np.sum', (['(image * q)'], {}), '(image * q)\n', (2954, 2965), True, 'import numpy as np\n'), ((2982, 3003), 'numpy.sum', 'np.sum', (['(image * q * q)'], {}), '(image * q * q)\n', (2988, 3003), True, 'import numpy as np\n'), ((4305, 4378), 'numpy.row_stack', 'np.row_stack', (['[pix_x, pix_y, pix_x * pix_x, pix_y * pix_y, pix_x * pix_y]'], {}), '([pix_x, pix_y, pix_x * pix_x, pix_y * pix_y, pix_x * pix_y])\n', (4317, 4378), True, 'import numpy as np\n'), ((5284, 5322), 'numpy.arctan2', 'np.arctan2', (['tanpsi_numer', 'tanpsi_denom'], {}), '(tanpsi_numer, tanpsi_denom)\n', (5294, 5322), True, 'import numpy as np\n'), ((1626, 1664), 'numpy.asanyarray', 'np.asanyarray', (['pix_x'], {'dtype': 'np.float64'}), '(pix_x, dtype=np.float64)\n', (1639, 1664), True, 'import numpy as np\n'), ((1693, 1731), 'numpy.asanyarray', 'np.asanyarray', (['pix_y'], {'dtype': 'np.float64'}), '(pix_y, dtype=np.float64)\n', (1706, 1731), True, 'import numpy as np\n'), ((2388, 2401), 'numpy.sqrt', 'np.sqrt', (['temp'], {}), '(temp)\n', (2395, 2401), True, 'import numpy as np\n'), ((3800, 3838), 'numpy.asanyarray', 'np.asanyarray', (['pix_x'], {'dtype': 'np.float64'}), '(pix_x, dtype=np.float64)\n', (3813, 3838), True, 'import numpy as np\n'), ((3867, 3905), 'numpy.asanyarray', 'np.asanyarray', (['pix_y'], {'dtype': 'np.float64'}), '(pix_y, dtype=np.float64)\n', (3880, 3905), True, 'import numpy as np\n')]
# Generated by Django 4.0.1 on 2022-01-12 23:44 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('points', '0003_spend_alter_transaction_timestamp'), ] operations = [ migrations.AddField( model_name='spend', name='receipt', field=models.TextField(blank=True, null=True), ), ]
[ "django.db.models.TextField" ]
[((348, 387), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (364, 387), False, 'from django.db import migrations, models\n')]
import json from multiprocessing import Pool, cpu_count from catti.IO.portfolio import loadPortfolioInfo from catti.IO.general import * from catti.indicators.calculations import indicatorsCalculator from catti.indicators.signals import signalsCalculator from catti.indicators.signalsValidation import signalsValidator from catti.indicators.reporters import signalsReport, validatedSignalsReport import tqdm def calculate(target, chunkPercentage=2, skipCalculated=True, extraArgs=1, verbose=False): # Each target has its own way of processing the data. functionSelection = { 'indicators': indicatorsCalculator, 'signals': signalsCalculator, 'validated signals': signalsValidator, 'signals report': signalsReport, 'validated signals report': validatedSignalsReport } selectedFunction = functionSelection[target] # We load portfolio info, to grab the securities' ticker names. portfolio = loadPortfolioInfo() securitiesToCalculate = portfolio['securities'] # We skip already calculated target data if requested. if skipCalculated: securitiesToCalculate = skip(target, portfolio['securities'], True) # The new size of all the tickers after skipping calculated. size = len(securitiesToCalculate) # A percentage of the total data to be calculated per chunk. chunkSize = int(size * chunkPercentage/100) # If the chunkPercentage is relatively too small, the chunkSize becomes zero. # In that case, the user basically wants to calculate 1 security at a time, # which is actually futile. Nevertheless, we need to avoid 0. if chunkSize == 0: chunkSize = 1 print('Calculating at most {} securites per chunk, which is ~{}% of the total ({})'.format(chunkSize, chunkPercentage, size)) # We create a list of lists that consists of the chunks. chunks = splitter(securitiesToCalculate, chunkSize) # We need the number of splits to track the progress. splits = size // chunkSize # Initialize the results of all the chunks, to generate reports later. chunkResults = [] for chunk in tqdm.tqdm(chunks, desc='Chunk completion', total=splits): # For each chunk, we load whatever is required and # we pass the arguments to the corresponding function. args = parameters(target, chunk, extraArgs) # The chunksize=1 parameter, basically gives 1 security to work on per thread. # This is intented because the calculations are already intensive. Feel free to tweak. with Pool(processes=cpu_count()) as pool: if 'report' in target: results = pool.starmap(selectedFunction, tqdm.tqdm(args, desc='Calculating {}'.format(target)), chunksize=1) chunkResults.extend(results) else: pool.starmap(selectedFunction, tqdm.tqdm(args, desc='Calculating {}'.format(target)), chunksize=1) # Aggregate results of indicators' data for whole portfolio. if 'report' in target: manageReports(target, chunkResults, verbose) def parameters(target, chunk, extraArgs, verbose=False): # Each target we calculate depends on other data to be calculated. targetRequirement = { 'indicators': 'historical', 'signals': 'indicators', 'validated signals': 'signals', 'signals report': 'signals', 'validated signals report': 'validated signals' } requiredTarget = targetRequirement[target] # NOTE: Cyclomatic complexity: HIGH. # We either want to generate reports... if 'report' in target: # We always need to load the data required for the target's function if we proceed below. targetData = loadChunkParallel(requiredTarget, chunk) if target == 'signals report': # The function header is "signalsReport(securityName, signals, verbose)" return [(security, targetData[security], verbose) for security in targetData] elif target == 'validated signals report': # The function header is "signalsReport(securityName, validatedSignals, verbose)" return [(security, targetData[security], verbose) for security in targetData] # ...or we want to either calculate indicator signals or to validate them. else: # We always need to load historical data for comparisons. historicalData = loadChunkParallel('historical', chunk) # If we calculate indicators, our target data is only historical. if target == 'indicators': # The function header is "indicatorsCalculator(securityName, security, verbose)" return [(security, historicalData[security], verbose) for security in historicalData] # We always need to load the data required for the target's function if we proceed below. targetData = loadChunkParallel(requiredTarget, chunk) if target == 'signals': # The function header is "signalsCalculator(securityName, security, indicators)" return [(security, historicalData[security], targetData[security]) for security in historicalData] elif target == 'validated signals': # The function header is "signalsValidator(securityName, security, signals, days)" return [(security, historicalData[security], targetData[security], extraArgs) for security in historicalData] def manageReports(target, chunkResults, verbose=False): aggregatedResults = {} checkCreateFolder(target, 'indicators') # Initialize the percentages for each indicator for security in chunkResults: for indicator in security: aggregatedResults[indicator] = {} for key in security[indicator]: aggregatedResults[indicator][key] = 0.0 # Aggregate them... for security in chunkResults: for indicator in security: for key in security[indicator]: # We divide with the number of total loaded securities, because they might be less than those # included in portfolio. Those with insufficient data are ignored. aggregatedResults[indicator][key] += security[indicator][key] / len(chunkResults) # might carry errors # Save them individually... for indicator in aggregatedResults: subFolder = os.path.join(folders[target], 'indicators') fileName = os.path.join(subFolder, indicator + '.json') with open(fileName, 'w') as f: if verbose: print('[+] Saving aggregated report for {}'.format(indicator)) json.dump(aggregatedResults[indicator], f, indent=1) # ...and all together allTogetherFilename = os.path.join(subFolder, 'combined.json') with open(allTogetherFilename, 'w') as f: if verbose: print('[+] Saving aggregated report for all indicators') json.dump(aggregatedResults, f, indent=1)
[ "multiprocessing.cpu_count", "catti.IO.portfolio.loadPortfolioInfo", "tqdm.tqdm", "json.dump" ]
[((976, 995), 'catti.IO.portfolio.loadPortfolioInfo', 'loadPortfolioInfo', ([], {}), '()\n', (993, 995), False, 'from catti.IO.portfolio import loadPortfolioInfo\n'), ((2184, 2240), 'tqdm.tqdm', 'tqdm.tqdm', (['chunks'], {'desc': '"""Chunk completion"""', 'total': 'splits'}), "(chunks, desc='Chunk completion', total=splits)\n", (2193, 2240), False, 'import tqdm\n'), ((7098, 7139), 'json.dump', 'json.dump', (['aggregatedResults', 'f'], {'indent': '(1)'}), '(aggregatedResults, f, indent=1)\n', (7107, 7139), False, 'import json\n'), ((6797, 6849), 'json.dump', 'json.dump', (['aggregatedResults[indicator]', 'f'], {'indent': '(1)'}), '(aggregatedResults[indicator], f, indent=1)\n', (6806, 6849), False, 'import json\n'), ((2636, 2647), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (2645, 2647), False, 'from multiprocessing import Pool, cpu_count\n')]
import indicoio from celery import Celery from flask import Flask from flask_bootstrap import Bootstrap from flask_googlemaps import GoogleMaps from flask_mail import Mail from flask_mongoengine import MongoEngine from app.celery.factory import init_celery from config import config, DEVELOPMENT_CONFIG_NAME celery = Celery('app') mail = Mail() db = MongoEngine() bootstrap = Bootstrap() def create_app(config_name=DEVELOPMENT_CONFIG_NAME): app = Flask(__name__) app.config.from_object(config[config_name]) register_extensions(app) register_blueprints(app) init_celery(app, celery) GoogleMaps(app) set_indico_key(config[config_name]) return app def set_indico_key(cfg): indicoio.config.api_key = cfg.INDICO_KEY def register_extensions(app): mail.init_app(app) db.init_app(app) bootstrap.init_app(app) def register_blueprints(app): from app.blueprints.main import main from app.blueprints.auth import auth from app.blueprints.account import account from app.blueprints.errors import errors app.register_blueprint(main) app.register_blueprint(auth) app.register_blueprint(account) app.register_blueprint(errors)
[ "flask_mail.Mail", "flask.Flask", "celery.Celery", "flask_googlemaps.GoogleMaps", "app.celery.factory.init_celery", "flask_bootstrap.Bootstrap", "flask_mongoengine.MongoEngine" ]
[((319, 332), 'celery.Celery', 'Celery', (['"""app"""'], {}), "('app')\n", (325, 332), False, 'from celery import Celery\n'), ((340, 346), 'flask_mail.Mail', 'Mail', ([], {}), '()\n', (344, 346), False, 'from flask_mail import Mail\n'), ((352, 365), 'flask_mongoengine.MongoEngine', 'MongoEngine', ([], {}), '()\n', (363, 365), False, 'from flask_mongoengine import MongoEngine\n'), ((378, 389), 'flask_bootstrap.Bootstrap', 'Bootstrap', ([], {}), '()\n', (387, 389), False, 'from flask_bootstrap import Bootstrap\n'), ((455, 470), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (460, 470), False, 'from flask import Flask\n'), ((582, 606), 'app.celery.factory.init_celery', 'init_celery', (['app', 'celery'], {}), '(app, celery)\n', (593, 606), False, 'from app.celery.factory import init_celery\n'), ((612, 627), 'flask_googlemaps.GoogleMaps', 'GoogleMaps', (['app'], {}), '(app)\n', (622, 627), False, 'from flask_googlemaps import GoogleMaps\n')]
# -*- coding:utf-8 -*- import logging import stores release = "0.1.1" logging.getLogger("dbsync") class DBSync(): def __init__(self): pass def syncer(self, syncer): return self def validator(self, validator): return self def serializer(self, serializer): return self def notifier(self, notifier): return self def start(self): pass def stop(self): pass class Engine(): def __init__(self): pass
[ "logging.getLogger" ]
[((72, 99), 'logging.getLogger', 'logging.getLogger', (['"""dbsync"""'], {}), "('dbsync')\n", (89, 99), False, 'import logging\n')]
# -*- coding: utf-8 -*- import asyncio from config import (CHECK_SERVER_INTERVAL, CHECK_SERVER_INTERVAL_MAX, CRON_LOOP_INTERVAL) from discord import Activity, ActivityType from discord.errors import Forbidden, NotFound from discord.ext import commands, tasks from modules.db import Servers from modules.logging import logger from modules.utils import embed_generator, get_server_info, stop_server class ServersCron(commands.Cog): def __init__(self, bot: commands.Bot): self.bot = bot self.loop = asyncio.get_event_loop() self.crontab.start() logger.info("Cron started") @tasks.loop(seconds=CRON_LOOP_INTERVAL, reconnect=True) async def crontab(self): await self.bot.wait_until_ready() channels = await Servers.filter(worked=True).group_by("channel").values_list("channel", flat=True) for channel_id in channels: self.loop.create_task(self.for_channels(channel_id)) servers_count = await Servers.filter(worked=True).count() await self.bot.change_presence(activity=Activity(type=ActivityType.watching, name=f"Use !help | {servers_count} game servers")) async def for_channels(self, channel_id): servers_ids = await Servers.filter(channel=channel_id, worked=True).values_list("id", flat=True) channel = self.bot.get_channel(channel_id) if channel is None: await Servers.filter(channel=channel_id).update(worked=False) return sleep = CHECK_SERVER_INTERVAL_MAX if len(servers_ids) > 3 else CHECK_SERVER_INTERVAL for id in servers_ids: await self.for_id(channel, id) await asyncio.sleep(sleep) async def for_id(self, channel, id): instance = await Servers.filter(id=id).first() server_info, players = await get_server_info(instance.ip, instance.port) if server_info: await Servers.filter(id=id).update(name=server_info.server_name, game=server_info.game) try: msg = await channel.fetch_message(instance.message) embed = await embed_generator(server_info, players, instance) await msg.edit(embed=embed) except (NotFound, Forbidden) as e: user = await self.bot.fetch_user(instance.author) if isinstance(e, Forbidden): await user.send(f"I don't have permission to edit {instance.ip}:{instance.port} in #{channel.name}\n" "Please give me permission to edit messages in this channel.\n" "To resume the server, react with 🔄") elif isinstance(e, NotFound): await user.send(f"Server {instance.ip}:{instance.port} in #{channel.name} has been deleted") #await user.send(f"Server {instance.ip}:{instance.port} in channel <#{instance.channel}> is off if you not delete bot message, check bot permissions") await stop_server(instance.message) def setup(bot: commands.Bot): bot.add_cog(ServersCron(bot))
[ "modules.utils.embed_generator", "asyncio.get_event_loop", "modules.utils.get_server_info", "modules.utils.stop_server", "discord.Activity", "asyncio.sleep", "modules.logging.logger.info", "discord.ext.tasks.loop", "modules.db.Servers.filter" ]
[((635, 689), 'discord.ext.tasks.loop', 'tasks.loop', ([], {'seconds': 'CRON_LOOP_INTERVAL', 'reconnect': '(True)'}), '(seconds=CRON_LOOP_INTERVAL, reconnect=True)\n', (645, 689), False, 'from discord.ext import commands, tasks\n'), ((539, 563), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (561, 563), False, 'import asyncio\n'), ((601, 628), 'modules.logging.logger.info', 'logger.info', (['"""Cron started"""'], {}), "('Cron started')\n", (612, 628), False, 'from modules.logging import logger\n'), ((1893, 1936), 'modules.utils.get_server_info', 'get_server_info', (['instance.ip', 'instance.port'], {}), '(instance.ip, instance.port)\n', (1908, 1936), False, 'from modules.utils import embed_generator, get_server_info, stop_server\n'), ((1738, 1758), 'asyncio.sleep', 'asyncio.sleep', (['sleep'], {}), '(sleep)\n', (1751, 1758), False, 'import asyncio\n'), ((2164, 2211), 'modules.utils.embed_generator', 'embed_generator', (['server_info', 'players', 'instance'], {}), '(server_info, players, instance)\n', (2179, 2211), False, 'from modules.utils import embed_generator, get_server_info, stop_server\n'), ((999, 1026), 'modules.db.Servers.filter', 'Servers.filter', ([], {'worked': '(True)'}), '(worked=True)\n', (1013, 1026), False, 'from modules.db import Servers\n'), ((1083, 1174), 'discord.Activity', 'Activity', ([], {'type': 'ActivityType.watching', 'name': 'f"""Use !help | {servers_count} game servers"""'}), "(type=ActivityType.watching, name=\n f'Use !help | {servers_count} game servers')\n", (1091, 1174), False, 'from discord import Activity, ActivityType\n'), ((1303, 1350), 'modules.db.Servers.filter', 'Servers.filter', ([], {'channel': 'channel_id', 'worked': '(True)'}), '(channel=channel_id, worked=True)\n', (1317, 1350), False, 'from modules.db import Servers\n'), ((1826, 1847), 'modules.db.Servers.filter', 'Servers.filter', ([], {'id': 'id'}), '(id=id)\n', (1840, 1847), False, 'from modules.db import Servers\n'), ((3014, 3043), 'modules.utils.stop_server', 'stop_server', (['instance.message'], {}), '(instance.message)\n', (3025, 3043), False, 'from modules.utils import embed_generator, get_server_info, stop_server\n'), ((1478, 1512), 'modules.db.Servers.filter', 'Servers.filter', ([], {'channel': 'channel_id'}), '(channel=channel_id)\n', (1492, 1512), False, 'from modules.db import Servers\n'), ((1979, 2000), 'modules.db.Servers.filter', 'Servers.filter', ([], {'id': 'id'}), '(id=id)\n', (1993, 2000), False, 'from modules.db import Servers\n'), ((786, 813), 'modules.db.Servers.filter', 'Servers.filter', ([], {'worked': '(True)'}), '(worked=True)\n', (800, 813), False, 'from modules.db import Servers\n')]
from django.urls import path from dashboards.views import DashboardView urlpatterns = [ path( "<slug:organisation>/dashboards/<slug:dashboard>/", DashboardView.as_view(), name="dashboard_detail", ), ]
[ "dashboards.views.DashboardView.as_view" ]
[((168, 191), 'dashboards.views.DashboardView.as_view', 'DashboardView.as_view', ([], {}), '()\n', (189, 191), False, 'from dashboards.views import DashboardView\n')]
from datetime import date from django.http import JsonResponse from rest_framework import viewsets from rest_framework.permissions import IsAdminUser from .models import CampPlan from .serializers import CampPlanSerializer # =========================== # Camp Plan ViewSet # =========================== class CampPlanViewSet(viewsets.ModelViewSet): # Only admin can do CUD (from CRUD) operations permission_classes_by_action = { 'create': [IsAdminUser], 'update': [IsAdminUser], 'destroy': [IsAdminUser], 'partial_update': [IsAdminUser] } queryset = CampPlan.objects.all().order_by('name') serializer_class = CampPlanSerializer # overriding create method to validate camp start & end dates def create(self, request, *agrs, **kwargs): serializer = self.get_serializer(data=request.data) if serializer.is_valid(): # validating camp start & end dates start_date = serializer.validated_data.get('start_date') end_date = serializer.validated_data.get('end_date') if start_date > end_date: return JsonResponse({'error': 'Camp start date should be less than camp end date'}) if date.today() > start_date: return JsonResponse({'error': 'Camp start date should be more than current date'}) if date.today() > end_date: return JsonResponse({'error': 'Camp end date should be more than current date'}) serializer.save() return JsonResponse({'success': 'Successfully created camp plan'}) else: return JsonResponse({'error': serializer.errors}) # handle permissions def get_permissions(self): try: return [permission() for permission in self.permission_classes_by_action[self.action]] except: return [permission() for permission in self.permission_classes]
[ "datetime.date.today", "django.http.JsonResponse" ]
[((1544, 1603), 'django.http.JsonResponse', 'JsonResponse', (["{'success': 'Successfully created camp plan'}"], {}), "({'success': 'Successfully created camp plan'})\n", (1556, 1603), False, 'from django.http import JsonResponse\n'), ((1637, 1679), 'django.http.JsonResponse', 'JsonResponse', (["{'error': serializer.errors}"], {}), "({'error': serializer.errors})\n", (1649, 1679), False, 'from django.http import JsonResponse\n'), ((1139, 1215), 'django.http.JsonResponse', 'JsonResponse', (["{'error': 'Camp start date should be less than camp end date'}"], {}), "({'error': 'Camp start date should be less than camp end date'})\n", (1151, 1215), False, 'from django.http import JsonResponse\n'), ((1231, 1243), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1241, 1243), False, 'from datetime import date\n'), ((1281, 1356), 'django.http.JsonResponse', 'JsonResponse', (["{'error': 'Camp start date should be more than current date'}"], {}), "({'error': 'Camp start date should be more than current date'})\n", (1293, 1356), False, 'from django.http import JsonResponse\n'), ((1372, 1384), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1382, 1384), False, 'from datetime import date\n'), ((1420, 1493), 'django.http.JsonResponse', 'JsonResponse', (["{'error': 'Camp end date should be more than current date'}"], {}), "({'error': 'Camp end date should be more than current date'})\n", (1432, 1493), False, 'from django.http import JsonResponse\n')]
# coding=utf-8 # 导入自己的函数包d2lzh_pytorch,注意要先将目标包的父路径添加到系统路径中 import sys sys.path.append(r".") from d2lzh_pytorch import data_process, plot, train, rnn import torch import time import torchvision from PIL import Image from torch import nn,optim from torch.utils.data import Dataset,DataLoader """ 这一节介绍了如何对图像数据集进行增广以获得更多的可用数据 """ # 加上这个限定才能支持多线程读取 if __name__ == "__main__": # 设置计算设备,让计算在GPU上进行 device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # 先尝试显示一下猫图 plot.set_figsize() img = Image.open(r"./Datasets"+'/Img/cat1.jpg') # plot.plt.imshow(img) # # 记得要显示时需要show一下 # plot.plt.show() print('————————————————————————————') # 翻转和裁剪是最常见的增广方法 # 写一个增广图片的函数,其中aug是对应的增广操作函数,让目标图像增广多次然后显示出来 def apply(img, aug, num_rows=2, num_cols=4, scale=1.5): # 在循环内对图像执行num_rows * num_cols次操作并组合,然后下面显示出来 Y = [aug(img) for _ in range(num_rows * num_cols)] # 这里保存了一个在一图中显示图片集的函数show_images,需要代入网格尺寸和缩放 plot.show_images(Y, num_rows, num_cols, scale) # # 此处应用了随机水平翻转的库函数,翻转是最方便的增广方法,效果有限 # apply(img, torchvision.transforms.RandomHorizontalFlip()) # # 这里是更少用的垂直翻转 # apply(img, torchvision.transforms.RandomVerticalFlip()) # # 然后为了降低网络对图像目标所在位置的敏感性,可进行随机裁剪 # # 这里的参数随机裁剪出面积0.1~1,宽高比0.5~2的区域,然后缩放到200像素来运用 # apply(img, torchvision.transforms.RandomResizedCrop( # 200, scale=(0.1, 1), ratio=(0.5, 2))) print('————————————————————————————') # # 可从亮度,对比度,饱和度,色调,四个方面调整图像 # # 可以同时设置 # apply(img, torchvision.transforms.ColorJitter( # brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5)) print('————————————————————————————') # # 可以将多种图像增广结合起来用,使用Compose函数组合 # apply(img, torchvision.transforms.Compose([ # torchvision.transforms.RandomHorizontalFlip(0.5), # torchvision.transforms.ColorJitter( # brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5), # torchvision.transforms.RandomResizedCrop( # 200, scale=(0.1, 1), ratio=(0.5, 2)) # ])) print('————————————————————————————') # 这里实际测试一下增广的方法,使用CIFAR10数据集,读取后用循环来显示 all_imgs = torchvision.datasets.CIFAR10( train=True, root=r"./Datasets/CIFAR", download=True) # plot.show_images([all_imgs[i][0] for i in range(32)], 4, 8, scale=0.8) # 有处理的增广函数,进行了水平翻转 flip_aug = torchvision.transforms.Compose([ # 这个组合增广进行了随机水平翻转,然后用ToTensor将图像转为Pytorch可用的格式 torchvision.transforms.RandomHorizontalFlip(), torchvision.transforms.ToTensor() ]) # 无处理的增广函数 no_aug = torchvision.transforms.Compose([ torchvision.transforms.ToTensor() ]) num_workers = 4 # 一个简单的图像读取函数 def load_cifar10(is_train, augs, batch_size, root=r"./Datasets/CIFAR"): # 读取数据集,augs控制想要应用在数据集读取时的增广函数 dataset = torchvision.datasets.CIFAR10( root=root, train=is_train, transform=augs, download=True) # 返回对应的读取器 return DataLoader(dataset, batch_size=batch_size, shuffle=is_train, num_workers=num_workers) # 然后把保存一个基本的训练函数train # 接着再写一个包含了读取,增广,训练全过程的接口函数来调用 def train_with_data_aug(train_augs, test_augs, lr=0.001): batch_size, net = 256, rnn.resnet18(10) optimizer = torch.optim.Adam(net.parameters(), lr=lr) loss = torch.nn.CrossEntropyLoss() train_iter = load_cifar10(True, train_augs, batch_size) test_iter = load_cifar10(False, test_augs, batch_size) train.train(train_iter, test_iter, net, loss, optimizer, device, num_epochs=10) # 调用接口函数进行训练,其中测试集不要进行增广保证结果的稳定 train_with_data_aug(flip_aug, no_aug) print('————————————————————————————')
[ "PIL.Image.open", "torch.nn.CrossEntropyLoss", "torch.utils.data.DataLoader", "torchvision.transforms.RandomHorizontalFlip", "d2lzh_pytorch.plot.set_figsize", "torch.cuda.is_available", "d2lzh_pytorch.rnn.resnet18", "torchvision.datasets.CIFAR10", "d2lzh_pytorch.plot.show_images", "torchvision.tra...
[((72, 92), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (87, 92), False, 'import sys\n'), ((496, 514), 'd2lzh_pytorch.plot.set_figsize', 'plot.set_figsize', ([], {}), '()\n', (512, 514), False, 'from d2lzh_pytorch import data_process, plot, train, rnn\n'), ((525, 567), 'PIL.Image.open', 'Image.open', (["('./Datasets' + '/Img/cat1.jpg')"], {}), "('./Datasets' + '/Img/cat1.jpg')\n", (535, 567), False, 'from PIL import Image\n'), ((2148, 2233), 'torchvision.datasets.CIFAR10', 'torchvision.datasets.CIFAR10', ([], {'train': '(True)', 'root': '"""./Datasets/CIFAR"""', 'download': '(True)'}), "(train=True, root='./Datasets/CIFAR', download=True\n )\n", (2176, 2233), False, 'import torchvision\n'), ((986, 1032), 'd2lzh_pytorch.plot.show_images', 'plot.show_images', (['Y', 'num_rows', 'num_cols', 'scale'], {}), '(Y, num_rows, num_cols, scale)\n', (1002, 1032), False, 'from d2lzh_pytorch import data_process, plot, train, rnn\n'), ((2829, 2919), 'torchvision.datasets.CIFAR10', 'torchvision.datasets.CIFAR10', ([], {'root': 'root', 'train': 'is_train', 'transform': 'augs', 'download': '(True)'}), '(root=root, train=is_train, transform=augs,\n download=True)\n', (2857, 2919), False, 'import torchvision\n'), ((2963, 3053), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'batch_size', 'shuffle': 'is_train', 'num_workers': 'num_workers'}), '(dataset, batch_size=batch_size, shuffle=is_train, num_workers=\n num_workers)\n', (2973, 3053), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((3324, 3351), 'torch.nn.CrossEntropyLoss', 'torch.nn.CrossEntropyLoss', ([], {}), '()\n', (3349, 3351), False, 'import torch\n'), ((3487, 3566), 'd2lzh_pytorch.train.train', 'train.train', (['train_iter', 'test_iter', 'net', 'loss', 'optimizer', 'device'], {'num_epochs': '(10)'}), '(train_iter, test_iter, net, loss, optimizer, device, num_epochs=10)\n', (3498, 3566), False, 'from d2lzh_pytorch import data_process, plot, train, rnn\n'), ((437, 462), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (460, 462), False, 'import torch\n'), ((2450, 2495), 'torchvision.transforms.RandomHorizontalFlip', 'torchvision.transforms.RandomHorizontalFlip', ([], {}), '()\n', (2493, 2495), False, 'import torchvision\n'), ((2505, 2538), 'torchvision.transforms.ToTensor', 'torchvision.transforms.ToTensor', ([], {}), '()\n', (2536, 2538), False, 'import torchvision\n'), ((2615, 2648), 'torchvision.transforms.ToTensor', 'torchvision.transforms.ToTensor', ([], {}), '()\n', (2646, 2648), False, 'import torchvision\n'), ((3230, 3246), 'd2lzh_pytorch.rnn.resnet18', 'rnn.resnet18', (['(10)'], {}), '(10)\n', (3242, 3246), False, 'from d2lzh_pytorch import data_process, plot, train, rnn\n')]
import sphinx_rtd_theme project = 'Bitcoin DCA' copyright = '2021, <NAME>' author = '<NAME>' extensions = [] templates_path = ['_templates'] exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] html_theme = 'sphinx_rtd_theme' html_static_path = ['_static'] pygments_style = 'sphinx' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] html_theme_options = { 'navigation_depth': 4, } master_doc = 'index' html_logo = '../resources/images/logo-white.png' # I use a privacy focussed service https://usefathom.com/ to track how the documentation # is being used. This allows me to improve its contents. html_js_files = [('https://krill.jorijn.com/script.js', {'data-site': 'MXGDAIWO'})]
[ "sphinx_rtd_theme.get_html_theme_path" ]
[((306, 344), 'sphinx_rtd_theme.get_html_theme_path', 'sphinx_rtd_theme.get_html_theme_path', ([], {}), '()\n', (342, 344), False, 'import sphinx_rtd_theme\n')]
from django.contrib import admin from app_restaurantes.models import Restaurante # Register your models here. admin.site.register(Restaurante)
[ "django.contrib.admin.site.register" ]
[((112, 144), 'django.contrib.admin.site.register', 'admin.site.register', (['Restaurante'], {}), '(Restaurante)\n', (131, 144), False, 'from django.contrib import admin\n')]
# -*- coding: utf-8 -*- # Generated by Django 1.9.9 on 2016-09-09 15:44 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('wagtailcore', '0029_unicode_slugfield_dj19'), ('people', '0001_initial'), ] operations = [ migrations.CreateModel( name='Group', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100)), ], options={ 'ordering': ('name',), }, ), migrations.CreateModel( name='Membership', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='people.Group')), ], ), migrations.CreateModel( name='PersonIndexPage', fields=[ ('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')), ('groups', models.ManyToManyField(to='people.Group')), ], options={ 'abstract': False, }, bases=('wagtailcore.page',), ), migrations.AlterModelOptions( name='person', options={'ordering': ('last_name',)}, ), migrations.AddField( model_name='person', name='title', field=models.CharField(blank=True, max_length=100, null=True), ), migrations.AddField( model_name='membership', name='person', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='groups', to='people.Person'), ), migrations.AddField( model_name='group', name='people', field=models.ManyToManyField(blank=True, through='people.Membership', to='people.Person'), ), migrations.AlterUniqueTogether( name='membership', unique_together=set([('group', 'person')]), ), ]
[ "django.db.models.OneToOneField", "django.db.models.ForeignKey", "django.db.models.ManyToManyField", "django.db.migrations.AlterModelOptions", "django.db.models.AutoField", "django.db.models.CharField" ]
[((1558, 1644), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""person"""', 'options': "{'ordering': ('last_name',)}"}), "(name='person', options={'ordering': (\n 'last_name',)})\n", (1586, 1644), False, 'from django.db import migrations, models\n'), ((1782, 1837), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)', 'null': '(True)'}), '(blank=True, max_length=100, null=True)\n', (1798, 1837), False, 'from django.db import migrations, models\n'), ((1961, 2071), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""groups"""', 'to': '"""people.Person"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='groups', to='people.Person')\n", (1978, 2071), False, 'from django.db import migrations, models\n'), ((2185, 2273), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'through': '"""people.Membership"""', 'to': '"""people.Person"""'}), "(blank=True, through='people.Membership', to=\n 'people.Person')\n", (2207, 2273), False, 'from django.db import migrations, models\n'), ((470, 563), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (486, 563), False, 'from django.db import migrations, models\n'), ((587, 619), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (603, 619), False, 'from django.db import migrations, models\n'), ((831, 924), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (847, 924), False, 'from django.db import migrations, models\n'), ((949, 1035), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""people.Group"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'people.Group')\n", (966, 1035), False, 'from django.db import migrations, models\n'), ((1177, 1347), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""wagtailcore.Page"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'wagtailcore.Page')\n", (1197, 1347), False, 'from django.db import migrations, models\n'), ((1367, 1408), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""people.Group"""'}), "(to='people.Group')\n", (1389, 1408), False, 'from django.db import migrations, models\n')]
import pandas as pd from matplotlib import pyplot as plt from scipy.interpolate import spline from scipy.ndimage.filters import gaussian_filter1d import numpy as np #%% # TZ numbers via https://www.fangraphs.com/leaders.aspx?pos=3b&stats=fld&lg=all&qual=y&type=0&season=2017&month=0&season1=1961&ind=1&team=0&rost=0&age=0&filter=&players=0 df = pd.read_csv(r'TZ.csv') df = df[['Season','Name','TZ','playerid']] # For Total Zone delete anything past 2002 df = df.loc[df.Season < 2002] # UZR numbers via https://www.fangraphs.com/leaders.aspx?pos=3b&stats=fld&lg=all&qual=y&type=1&season=2017&month=0&season1=2002&ind=1&team=0&rost=0&age=0&filter=&players=0 df2 = pd.read_csv(r'UZR.csv') df2 = df2[['Season','Name','UZR','playerid']] df2.columns = ['Season','Name','TZ','playerid'] df = pd.concat([df,df2]) df = df.sort_values(by='Season') #%% # Figure out which names to use # by looking at max TZ for each year gb = df.groupby(['Season'])['TZ'].max() gb = gb.reset_index() gb = gb.sort_values(by='Season') df3 = df.merge(gb) df3 = df3.sort_values(by='Season') df3 = df3.loc[df3['TZ'] > 15] names = ['<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', 'Evan Longoria', '<NAME>'] colors = ['blue','lightblue','lightblue','blue','lightblue','lightblue','blue','lightblue','gold','lightblue','lightblue'] fig = plt.figure() SPLINE_FACTOR = 1.6 # Go through and plot each player for i,name in enumerate(names): #print(name) col = colors[i] linewidth = 1 if col != 'lightblue': linewidth=2 df_name = df.loc[df['Name'] == name] df_name.index = range(len(df_name)) xnew = np.linspace(df_name.Season.min(),df_name.Season.max(),300) #300 represents number of points to make between T.min and T.max #tz_smooth = spline(df_name.Season,df_name.TZ,xnew) tz_smooth = spline(df_name.Season,gaussian_filter1d(df_name.TZ,SPLINE_FACTOR),xnew) tz_convolved = np.convolve(df_name.TZ,np.ones(3,)/3,mode='same') tz_smooth2 = spline(df_name.Season,gaussian_filter1d(tz_convolved,SPLINE_FACTOR),xnew) #plt.plot(xnew,tz_smooth,label=name)#,color=col) #plt.plot(df_name['Season'],df_name['TZ'],label=name)#,color=col) #plt.plot(df_name['Season'],tz_convolved,label=name)#,color=col) plt.plot(xnew,tz_smooth2,label=name,color=col,linewidth=linewidth) #print df_name['TZ'] # Axis labeling and such #plt.legend(fontsize=8,loc='lower left') plt.ylabel('TZ/UZR') plt.xlabel('Year') plt.xlim((1960,2017)) plt.title('Third Base Defense') plt.tight_layout() # Save the plot as PNG filename = '3b_defense.png' fig.savefig(filename,dpi=400)
[ "numpy.ones", "pandas.read_csv", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.plot", "matplotlib.pyplot.figure", "scipy.ndimage.filters.gaussian_filter1d", "matplotlib.pyplot.tight_layout", "matplotlib.pyplot.title", "matplotlib.pyplot.xlim", "pandas.concat" ]
[((347, 368), 'pandas.read_csv', 'pd.read_csv', (['"""TZ.csv"""'], {}), "('TZ.csv')\n", (358, 368), True, 'import pandas as pd\n'), ((666, 688), 'pandas.read_csv', 'pd.read_csv', (['"""UZR.csv"""'], {}), "('UZR.csv')\n", (677, 688), True, 'import pandas as pd\n'), ((791, 811), 'pandas.concat', 'pd.concat', (['[df, df2]'], {}), '([df, df2])\n', (800, 811), True, 'import pandas as pd\n'), ((1373, 1385), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1383, 1385), True, 'from matplotlib import pyplot as plt\n'), ((2468, 2488), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""TZ/UZR"""'], {}), "('TZ/UZR')\n", (2478, 2488), True, 'from matplotlib import pyplot as plt\n'), ((2489, 2507), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (2499, 2507), True, 'from matplotlib import pyplot as plt\n'), ((2508, 2530), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(1960, 2017)'], {}), '((1960, 2017))\n', (2516, 2530), True, 'from matplotlib import pyplot as plt\n'), ((2530, 2561), 'matplotlib.pyplot.title', 'plt.title', (['"""Third Base Defense"""'], {}), "('Third Base Defense')\n", (2539, 2561), True, 'from matplotlib import pyplot as plt\n'), ((2562, 2580), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2578, 2580), True, 'from matplotlib import pyplot as plt\n'), ((2305, 2375), 'matplotlib.pyplot.plot', 'plt.plot', (['xnew', 'tz_smooth2'], {'label': 'name', 'color': 'col', 'linewidth': 'linewidth'}), '(xnew, tz_smooth2, label=name, color=col, linewidth=linewidth)\n', (2313, 2375), True, 'from matplotlib import pyplot as plt\n'), ((1895, 1939), 'scipy.ndimage.filters.gaussian_filter1d', 'gaussian_filter1d', (['df_name.TZ', 'SPLINE_FACTOR'], {}), '(df_name.TZ, SPLINE_FACTOR)\n', (1912, 1939), False, 'from scipy.ndimage.filters import gaussian_filter1d\n'), ((2053, 2099), 'scipy.ndimage.filters.gaussian_filter1d', 'gaussian_filter1d', (['tz_convolved', 'SPLINE_FACTOR'], {}), '(tz_convolved, SPLINE_FACTOR)\n', (2070, 2099), False, 'from scipy.ndimage.filters import gaussian_filter1d\n'), ((1987, 1997), 'numpy.ones', 'np.ones', (['(3)'], {}), '(3)\n', (1994, 1997), True, 'import numpy as np\n')]
import pytest import numpy as np import pandas as pd from endaq.calc import rotation @pytest.mark.parametrize( 'quat, euler', [ ((0., 0., 0., 1.), (0., 0., 0.)), ((0., 0., 0., -1.), (0., 0., 0.)), ((1., 0., 0., 0.), (np.pi, 0., 0.)), ((0., 1., 0., 0.), (np.pi, 0., np.pi)), ((0., 0., 1., 0.), (0., 0., np.pi)), ] ) def test_quat_to_euler_data(quat, euler): df = pd.DataFrame([quat], index=[0], columns=['X', 'Y', 'Z', 'W']) target = pd.DataFrame([euler], index=[0], columns=['x', 'y', 'z']) pd.testing.assert_frame_equal(rotation.quaternion_to_euler(df), target) @pytest.mark.parametrize( 'mode, columns, raises', [ ('xyz', ['x', 'y', 'z'], None), ('roll-pitch-yaw', ['roll', 'pitch', 'yaw'], None), ('snap-crackle-pop', ['snap', 'crackle', 'pop'], pytest.raises(ValueError)), ] ) def test_quat_to_euler_modes(mode, columns, raises): df = pd.DataFrame([(0., 0., 0., 1.)], index=[0], columns=['X', 'Y', 'Z', 'W']) target = pd.DataFrame([(0., 0., 0.)], index=[0], columns=columns) if raises is None: pd.testing.assert_frame_equal(rotation.quaternion_to_euler(df, mode=mode), target) else: with raises: pd.testing.assert_frame_equal(rotation.quaternion_to_euler(df, mode=mode), target)
[ "pandas.DataFrame", "pytest.mark.parametrize", "endaq.calc.rotation.quaternion_to_euler", "pytest.raises" ]
[((90, 357), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""quat, euler"""', '[((0.0, 0.0, 0.0, 1.0), (0.0, 0.0, 0.0)), ((0.0, 0.0, 0.0, -1.0), (0.0, 0.0,\n 0.0)), ((1.0, 0.0, 0.0, 0.0), (np.pi, 0.0, 0.0)), ((0.0, 1.0, 0.0, 0.0),\n (np.pi, 0.0, np.pi)), ((0.0, 0.0, 1.0, 0.0), (0.0, 0.0, np.pi))]'], {}), "('quat, euler', [((0.0, 0.0, 0.0, 1.0), (0.0, 0.0, \n 0.0)), ((0.0, 0.0, 0.0, -1.0), (0.0, 0.0, 0.0)), ((1.0, 0.0, 0.0, 0.0),\n (np.pi, 0.0, 0.0)), ((0.0, 1.0, 0.0, 0.0), (np.pi, 0.0, np.pi)), ((0.0,\n 0.0, 1.0, 0.0), (0.0, 0.0, np.pi))])\n", (113, 357), False, 'import pytest\n'), ((494, 555), 'pandas.DataFrame', 'pd.DataFrame', (['[quat]'], {'index': '[0]', 'columns': "['X', 'Y', 'Z', 'W']"}), "([quat], index=[0], columns=['X', 'Y', 'Z', 'W'])\n", (506, 555), True, 'import pandas as pd\n'), ((569, 626), 'pandas.DataFrame', 'pd.DataFrame', (['[euler]'], {'index': '[0]', 'columns': "['x', 'y', 'z']"}), "([euler], index=[0], columns=['x', 'y', 'z'])\n", (581, 626), True, 'import pandas as pd\n'), ((1058, 1135), 'pandas.DataFrame', 'pd.DataFrame', (['[(0.0, 0.0, 0.0, 1.0)]'], {'index': '[0]', 'columns': "['X', 'Y', 'Z', 'W']"}), "([(0.0, 0.0, 0.0, 1.0)], index=[0], columns=['X', 'Y', 'Z', 'W'])\n", (1070, 1135), True, 'import pandas as pd\n'), ((1145, 1204), 'pandas.DataFrame', 'pd.DataFrame', (['[(0.0, 0.0, 0.0)]'], {'index': '[0]', 'columns': 'columns'}), '([(0.0, 0.0, 0.0)], index=[0], columns=columns)\n', (1157, 1204), True, 'import pandas as pd\n'), ((662, 694), 'endaq.calc.rotation.quaternion_to_euler', 'rotation.quaternion_to_euler', (['df'], {}), '(df)\n', (690, 694), False, 'from endaq.calc import rotation\n'), ((1270, 1313), 'endaq.calc.rotation.quaternion_to_euler', 'rotation.quaternion_to_euler', (['df'], {'mode': 'mode'}), '(df, mode=mode)\n', (1298, 1313), False, 'from endaq.calc import rotation\n'), ((944, 969), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (957, 969), False, 'import pytest\n'), ((1396, 1439), 'endaq.calc.rotation.quaternion_to_euler', 'rotation.quaternion_to_euler', (['df'], {'mode': 'mode'}), '(df, mode=mode)\n', (1424, 1439), False, 'from endaq.calc import rotation\n')]
# Copyright 2021 <NAME> # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from toshiba_ac.http_api import ToshibaAcHttpApi from toshiba_ac.amqp_api import ToshibaAcAmqpApi from toshiba_ac.device import ToshibaAcDevice import asyncio class ToshibaAcDeviceManager: def __init__(self, user, password): self.http_api = ToshibaAcHttpApi(user, password) self.reg_info = None self.amqp_api = None self.devices = {} async def connect(self): await self.http_api.connect() self.reg_info = await self.http_api.register_client() self.amqp_api = ToshibaAcAmqpApi(self.reg_info.sas_token) self.amqp_api.register_command_handler('CMD_FCU_FROM_AC', self.handle_cmd_fcu_from_ac) self.amqp_api.register_command_handler('CMD_HEARTBEAT', self.handle_cmd_heartbeat) await self.amqp_api.connect() async def get_devices(self, refresh=False): # TODO handle refresh if not self.devices: devices_info = await self.http_api.get_devices() for device_info in devices_info: device = ToshibaAcDevice(device_info.ac_name, self.reg_info.device_id, device_info.ac_id, device_info.ac_unique_id, self.amqp_api,self.http_api ) await device.connect() await self.http_api.get_device_state(device_info.ac_id) self.devices[device.ac_unique_id] = device return list(self.devices.values()) def handle_cmd_fcu_from_ac(self, source_id, message_id, target_id, payload, timestamp): self.devices[source_id].handle_cmd_fcu_from_ac(payload) def handle_cmd_heartbeat(self, source_id, message_id, target_id, payload, timestamp): self.devices[source_id].handle_cmd_heartbeat(payload) async def shutdown(self): await asyncio.gather(*[device.shutdown() for device in self.devices.values()]) if self.amqp_api: await self.amqp_api.shutdown()
[ "toshiba_ac.amqp_api.ToshibaAcAmqpApi", "toshiba_ac.device.ToshibaAcDevice", "toshiba_ac.http_api.ToshibaAcHttpApi" ]
[((823, 855), 'toshiba_ac.http_api.ToshibaAcHttpApi', 'ToshibaAcHttpApi', (['user', 'password'], {}), '(user, password)\n', (839, 855), False, 'from toshiba_ac.http_api import ToshibaAcHttpApi\n'), ((1096, 1137), 'toshiba_ac.amqp_api.ToshibaAcAmqpApi', 'ToshibaAcAmqpApi', (['self.reg_info.sas_token'], {}), '(self.reg_info.sas_token)\n', (1112, 1137), False, 'from toshiba_ac.amqp_api import ToshibaAcAmqpApi\n'), ((1601, 1742), 'toshiba_ac.device.ToshibaAcDevice', 'ToshibaAcDevice', (['device_info.ac_name', 'self.reg_info.device_id', 'device_info.ac_id', 'device_info.ac_unique_id', 'self.amqp_api', 'self.http_api'], {}), '(device_info.ac_name, self.reg_info.device_id, device_info.\n ac_id, device_info.ac_unique_id, self.amqp_api, self.http_api)\n', (1616, 1742), False, 'from toshiba_ac.device import ToshibaAcDevice\n')]
"""JSON-LD utilities. """ __all__ = ('encode_jsonld', 'JsonLdEncoder', 'decode_jsonld') import datetime import json def encode_jsonld(jsonld_dataset, **kwargs): """Encode a JSON-LD dataset into a string. Parameters ---------- jsonld_dataset : `dict` A JSON-LD dataset. kwargs Keyword argument passed to the encoder. See `json.JSONEncoder`. Returns ------- encoded_dataset : `str` The JSON-LD dataset encoded as a string. """ encoder = JsonLdEncoder(**kwargs) return encoder.encode(jsonld_dataset) class JsonLdEncoder(json.JSONEncoder): """Customized JSON encoder (replaces `json.JSONEncoder`) that supports datetime encoding. """ def default(self, obj): """Encode values as JSON strings. This method overrides the default implementation from `json.JSONEncoder`. """ if isinstance(obj, datetime.datetime): return self._encode_datetime(obj) # Fallback to the default encoding return json.JSONEncoder.default(self, obj) def _encode_datetime(self, dt): """Encode a datetime in the format '%Y-%m-%dT%H:%M:%SZ'. The datetime can be naieve (doesn't have timezone info) or aware (it does have a tzinfo attribute set). Regardless, the datetime is transformed into UTC. """ if dt.tzinfo is None: # Force it to be a UTC datetime dt = dt.replace(tzinfo=datetime.timezone.utc) # Convert to UTC (no matter what) dt = dt.astimezone(datetime.timezone.utc) return dt.strftime('%Y-%m-%dT%H:%M:%SZ') def decode_jsonld(jsonld_text): """Decode a JSON-LD dataset, including decoding datetime strings into `datetime.datetime` objects. Parameters ---------- encoded_dataset : `str` The JSON-LD dataset encoded as a string. Returns ------- jsonld_dataset : `dict` A JSON-LD dataset. Examples -------- >>> doc = '{"dt": "2018-01-01T12:00:00Z"}' >>> decode_jsonld(doc) {'dt': datetime.datetime(2018, 1, 1, 12, 0, tzinfo=datetime.timezone.utc)} """ decoder = json.JSONDecoder(object_pairs_hook=_decode_object_pairs) return decoder.decode(jsonld_text) def _decode_object_pairs(pairs): doc = {} for key, value in pairs: if isinstance(value, str): # attempt to parse as a datetime try: value = datetime.datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ') # ensure timezone is UTC if value.tzinfo is None: value = value.replace(tzinfo=datetime.timezone.utc) value = value.astimezone(datetime.timezone.utc) except ValueError: pass doc[key] = value return doc
[ "datetime.datetime.strptime", "json.JSONDecoder", "json.JSONEncoder.default" ]
[((2180, 2236), 'json.JSONDecoder', 'json.JSONDecoder', ([], {'object_pairs_hook': '_decode_object_pairs'}), '(object_pairs_hook=_decode_object_pairs)\n', (2196, 2236), False, 'import json\n'), ((1044, 1079), 'json.JSONEncoder.default', 'json.JSONEncoder.default', (['self', 'obj'], {}), '(self, obj)\n', (1068, 1079), False, 'import json\n'), ((2474, 2529), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['value', '"""%Y-%m-%dT%H:%M:%SZ"""'], {}), "(value, '%Y-%m-%dT%H:%M:%SZ')\n", (2500, 2529), False, 'import datetime\n')]
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf def loss2(logits, labels, num_classes, scope, head=None): with tf.name_scope(scope): logits = tf.reshape(logits, (-1, num_classes)) softmax = tf.nn.softmax(logits) + 1e-4 labels = tf.to_float(tf.one_hot(tf.reshape(labels, [-1]), num_classes)) eps = 1e-2 labels = (1-eps)*tf.to_float(tf.reshape(labels, (-1, num_classes))) + eps/num_classes if head is not None: cross_entropy = -tf.reduce_sum(tf.multiply(labels * tf.log(softmax), head), reduction_indices=[1]) else: cross_entropy = -tf.reduce_sum( labels * tf.log(softmax), reduction_indices=[1]) return tf.reduce_mean(cross_entropy)
[ "tensorflow.name_scope", "tensorflow.nn.softmax", "tensorflow.reshape", "tensorflow.reduce_mean", "tensorflow.log" ]
[((838, 867), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['cross_entropy'], {}), '(cross_entropy)\n', (852, 867), True, 'import tensorflow as tf\n'), ((201, 221), 'tensorflow.name_scope', 'tf.name_scope', (['scope'], {}), '(scope)\n', (214, 221), True, 'import tensorflow as tf\n'), ((240, 277), 'tensorflow.reshape', 'tf.reshape', (['logits', '(-1, num_classes)'], {}), '(logits, (-1, num_classes))\n', (250, 277), True, 'import tensorflow as tf\n'), ((296, 317), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['logits'], {}), '(logits)\n', (309, 317), True, 'import tensorflow as tf\n'), ((366, 390), 'tensorflow.reshape', 'tf.reshape', (['labels', '[-1]'], {}), '(labels, [-1])\n', (376, 390), True, 'import tensorflow as tf\n'), ((462, 499), 'tensorflow.reshape', 'tf.reshape', (['labels', '(-1, num_classes)'], {}), '(labels, (-1, num_classes))\n', (472, 499), True, 'import tensorflow as tf\n'), ((786, 801), 'tensorflow.log', 'tf.log', (['softmax'], {}), '(softmax)\n', (792, 801), True, 'import tensorflow as tf\n'), ((613, 628), 'tensorflow.log', 'tf.log', (['softmax'], {}), '(softmax)\n', (619, 628), True, 'import tensorflow as tf\n')]
import pytest from pretalx.event.models import Event @pytest.mark.django_db def test_shred_used_event(resource, answered_choice_question, personal_answer, rejected_submission, deleted_submission, mail, sent_mail, room_availability, slot, unreleased_slot, past_slot, feedback, canceled_talk, review, information, other_event): assert Event.objects.count() == 2 rejected_submission.event.organiser.shred() assert Event.objects.count() == 1
[ "pretalx.event.models.Event.objects.count" ]
[((340, 361), 'pretalx.event.models.Event.objects.count', 'Event.objects.count', ([], {}), '()\n', (359, 361), False, 'from pretalx.event.models import Event\n'), ((426, 447), 'pretalx.event.models.Event.objects.count', 'Event.objects.count', ([], {}), '()\n', (445, 447), False, 'from pretalx.event.models import Event\n')]
# -*- coding: utf-8 -*- """ Setup for pytrafik """ from distutils.core import setup setup( name='pytrafik', version='0.2.1', description='PyTrafik', long_description='Wrapper for Västtrafik public API.', url='https://github.com/axelniklasson/PyTrafik', download_url = 'https://github.com/axelniklasson/PyTrafik/tarball/0.2', author='<NAME>', author_email='<EMAIL>', license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Topic :: Home Automation', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], keywords='vasttrafik västtrafik', install_requires=['requests>=2.9.1'], packages=['pytrafik'], zip_safe=True)
[ "distutils.core.setup" ]
[((86, 901), 'distutils.core.setup', 'setup', ([], {'name': '"""pytrafik"""', 'version': '"""0.2.1"""', 'description': '"""PyTrafik"""', 'long_description': '"""Wrapper for Västtrafik public API."""', 'url': '"""https://github.com/axelniklasson/PyTrafik"""', 'download_url': '"""https://github.com/axelniklasson/PyTrafik/tarball/0.2"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'classifiers': "['Development Status :: 4 - Beta', 'Intended Audience :: Developers',\n 'Topic :: Home Automation', 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5']", 'keywords': '"""vasttrafik västtrafik"""', 'install_requires': "['requests>=2.9.1']", 'packages': "['pytrafik']", 'zip_safe': '(True)'}), "(name='pytrafik', version='0.2.1', description='PyTrafik',\n long_description='Wrapper for Västtrafik public API.', url=\n 'https://github.com/axelniklasson/PyTrafik', download_url=\n 'https://github.com/axelniklasson/PyTrafik/tarball/0.2', author=\n '<NAME>', author_email='<EMAIL>', license='MIT', classifiers=[\n 'Development Status :: 4 - Beta', 'Intended Audience :: Developers',\n 'Topic :: Home Automation', 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5'], keywords=\n 'vasttrafik västtrafik', install_requires=['requests>=2.9.1'], packages\n =['pytrafik'], zip_safe=True)\n", (91, 901), False, 'from distutils.core import setup\n')]
#coding=utf-8 ''' Created on 2015-10-23 @author: Devuser ''' from doraemon.ci.models import CITask,CITaskHistory from gatesidelib.common.simplelogger import SimpleLogger from django.contrib.admin.models import DELETION,CHANGE,ADDITION from doraemon.project.models import Product,Project,Tag from business.project.project_service import ProjectService from business.project.version_service import VersionService from business.ci.ci_task_parameter_service import CITaskParameterService from business.ci.ci_task_config_service import CITaskConfigService from business.ci.ci_task_history_service import CITaskHistoryService from business.business_service import BusinessService from business.common.redis_service import RedisService from business.common.mongodb_service import MongoDBService from doraemon.api.ci.mongo_models import PackgeMongoFile from doraemon.home.models import TaskQueue from gatesidelib.datetimehelper import DateTimeHelper from business.common.system_config_service import SystemConfigService from doraemon.ci.datamodels.task_queue_command_enum import TaskQueueCommandTypeEnum from doraemon.ci.datamodels.task_queue_status_enum import TaskQueueStatusEnum import uuid from business.common.file_info_service import FileInfoService class CITaskService(BusinessService): ''' classdocs ''' @staticmethod def get_products_include_me(request): my_projects=ProjectService.get_projects_include_me(request) prodcut_ids=list() for project in my_projects: if project.Product not in prodcut_ids: prodcut_ids.append(project.Product) return Product.objects.all().filter(id__in=prodcut_ids) @staticmethod def get_ci_tasks_recently_build(request,task_type,product_id): result = list() try: result=CITaskService.get_product_ci_tasks(request, task_type, product_id).filter(LastHistory__exclude=(0,)) except Exception as ex: SimpleLogger.exception(ex) return result @staticmethod def get_my_ci_tasks(request,task_type,product_id): prodcut_tasks=CITaskService.get_product_ci_tasks(request,task_type,product_id) return prodcut_tasks @staticmethod def get_latest_history(task_id): result=None historys=CITaskHistory.objects.all().filter(CITaskID=task_id).order_by('-id') if historys: result=historys[0] return result @staticmethod def get_product_ci_tasks(request,task_type,product_id): result = list() try: my_projects=ProjectService.get_projects_include_me(request) my_project_ids=[project.id for project in my_projects] if product_id.upper()=="ALL": if task_type!=0: result=CITask.objects.all().filter(TaskType=task_type).filter(Project__in=my_project_ids).order_by('-LastHistory') else: result=CITask.objects.all().filter(Project__in=my_project_ids).order_by('-LastHistory') else: product_projects=Project.objects.all().filter(Product=int(product_id)).filter(id__in=my_project_ids) result=CITask.objects.all().filter(Project__in=product_projects).filter(TaskType=task_type).order_by('-LastHistory') except Exception as ex: SimpleLogger.error(ex) return result @staticmethod def search_tasks(request): result=list() keyword=request.POST.get('keyword','all') #get search keyword task_type=int(request.POST.get('task_type',0)) #get task type product_id=request.POST.get('product_id','all') # get product type # all my tasks include the built task recently and the tasks belong to product built_tasks=CITaskService.get_ci_tasks_recently_build(request,task_type,product_id) product_tasks=CITaskService.get_product_ci_tasks(request,task_type,product_id) # if keyword is all,then return all my tasks # else filter with keyword and return the rest tasks if keyword.upper() =='ALL': result.extend(built_tasks) for task in product_tasks: if task not in result: result.append(task) else: built_tasks=[task for task in built_tasks if keyword.upper() in task.TaskName.upper()] product_task=product_tasks.filter(TaskName__icontains=keyword) result.extend(built_tasks) for task in product_task: if task not in result: result.append(task) return result @staticmethod def filter_tasks(request): result=CITaskService.search_tasks(request) project_ids=request.POST.get('project_id') tag_ids=request.POST.get('tag_id') if project_ids!="": result=[task for task in result if task.Project in eval(project_ids)] match_tag_task=list() if tag_ids!="": for task in result: for tag in eval(tag_ids): if task.Tags and tag in eval(task.Tags): match_tag_task.append(task) break; result=match_tag_task return result @staticmethod def create_ci_task(validate_data,user): task_type=int(validate_data.get('TaskType')) ci_task=CITask() if task_type: ci_task=CITask() ci_task=CITaskService.init_ci_task(validate_data, ci_task) ci_task.IsActive=1 ci_task.Creator=user.id ci_task.TaskConfig=CITaskConfigService.create_config() ci_task.save() else: ci_task=CITaskService.copy_ci_task(validate_data,user) CITaskService.log_create_activity(user, ci_task) return ci_task @staticmethod def save_task_config(request,ci_taskid): ci_task=CITask.objects.get(int(ci_taskid)) CITaskConfigService.save_task_config(request, ci_task) CITaskService.log_change_activity(request.user, ci_task) @staticmethod def copy_ci_task(validate_data,user): task_id=int(validate_data.get('CopyTaskID')) ci_task=CITask.objects.get(int(task_id)) ci_task.TaskName=validate_data.get('TaskName') ci_task.Project=validate_data.get('Project') ci_task.BuildVersion=0 ci_task.id=None ci_task.Creator=user.id ci_task.save() new_config=CITaskConfigService.copy_config(ci_task.TaskConfig,ci_task.id,ci_task.TaskName) ci_task.TaskConfig=new_config ci_task.save() CITaskParameterService.copy_parameter_group_form_task(task_id,ci_task.id) CITaskService.log_create_activity(user, ci_task) return ci_task @staticmethod def delete_ci_task(request,task_id): ci_task=CITask.objects.get(int(task_id)) CITaskConfigService.delete_config(ci_task.TaskConfig) CITaskHistoryService.clean_all_history(task_id,True) ci_task.IsActive=0 ci_task.save() CITaskService.log_delete_activity(request.user, ci_task) @staticmethod def clean_task_history(request,task_id): ci_task=CITask.objects.get(int(task_id)) CITaskHistoryService.clean_all_history(task_id,False) CITaskService.log_clean_activity(request.user, ci_task) @staticmethod def start_ci_task(request,task_id,parameter_group_id,project_version): ci_task=CITask.objects.get(int(task_id)) queuetask=TaskQueue() queuetask.EnqueueTime=DateTimeHelper.get_local_now() queuetask.TaskType=ci_task.TaskType queuetask.Command=TaskQueueCommandTypeEnum.TaskQueueCommandType_Start queuetask.Priority=2 queuetask.Status =TaskQueueStatusEnum.QueueTaskStatus_New queuetask.TaskID=int(task_id) queuetask.TaskUUID=uuid.uuid1() if parameter_group_id: queuetask.BuildParameterID=parameter_group_id else: queuetask.BuildParameterID=CITaskParameterService.default_parameter_group(int(task_id)) queuetask.save() ci_task.BuildVersion=ci_task.BuildVersion+1 if str(project_version) == '0': project_version = VersionService.get_latest_version(ci_task.Project) if project_version: project_version = project_version.id else: project_version = 0 ci_task.LastHistory = CITaskService.save_ci_taskhistory(request,queuetask,ci_task,project_version) ci_task.save(update_fields=['BuildVersion','LastHistory']) message="任务ID为:"+str(task_id)+"的执行指令已经下发,请耐心等待。" user_id=0 if request.user.id: user_id=request.user.id CITaskService.log_build_activity(user_id, ci_task) CITaskService.send_task_enqueue_message() return [queuetask.id,message,queuetask.TaskUUID] @staticmethod def save_ci_taskhistory(request,task_queue,ci_task,project_version): if not project_version: project_latest_version=CITaskService.get_project_latest_version(ci_task.Project) else: project_latest_version=int(project_version) task_history=CITaskHistory() task_history.CITaskID=ci_task.id task_history.TaskUUID = task_queue.TaskUUID task_history.TaskQueueID=task_queue.id task_history.IsActive=1 task_history.BuildStatus=0 if request.user.id!=None: task_history.StartedBy=request.user.id else: task_history.StartedBy=0 task_history.BuildVersion=ci_task.BuildVersion task_history.ProjectVersion=project_latest_version task_history.BuildParameterID=task_queue.BuildParameterID task_history.save() return task_history.id @staticmethod def update_ci_taskhistory(tq_id,file_id,file_type): task_history=CITaskHistory.objects.get_by_tqid(tq_id) if file_type=='1': if task_history.PackageID: task_history.PackageID=task_history.PackageID+str(file_id)+',' else: task_history.PackageID=str(file_id)+',' if file_type=='2': if task_history.LogFileID: task_history.LogFileID=task_history.LogFileID+str(file_id)+',' else: task_history.LogFileID=str(file_id)+',' task_history.save() @staticmethod def stop_ci_task(request,task_id): command_type=TaskQueueCommandTypeEnum.TaskQueueCommandType_Stop tq_uuid=request.GET.get("TaskUUID","") tq_tasks = TaskQueue.objects.all().filter(TaskUUID=tq_uuid).filter(Command=command_type) if not tq_tasks.exists(): ci_task=CITask.objects.get(int(task_id)) queuetask=TaskQueue() queuetask.EnqueueTime=DateTimeHelper.get_local_now() queuetask.TaskType=ci_task.TaskType queuetask.Command=command_type queuetask.Priority=7 queuetask.Status =TaskQueueStatusEnum.QueueTaskStatus_New queuetask.TaskID=int(task_id) queuetask.TaskUUID=tq_uuid queuetask.save() else: queuetask = tq_tasks[0] message="任务ID为:"+str(task_id)+"的取消执行指令已经下发,请耐心等待。" CITaskService.send_task_enqueue_message() return [queuetask.id,message,queuetask.TaskUUID] @staticmethod def send_task_enqueue_message(): RedisService.websocket_publish_message("TASKSTATUSCHANGE",'Task enqueue now!') @staticmethod def init_ci_task(formdata,ci_task): tmp_ci_task=ci_task tmp_ci_task.TaskName=formdata.get('TaskName') tmp_ci_task.Project=formdata.get('Project') tmp_ci_task.TaskType=formdata.get('TaskType') historyCleanStrategy=formdata.get('HistoryCleanStrategy',5) if historyCleanStrategy=="": historyCleanStrategy=5 tmp_ci_task.HistoryCleanStrategy=historyCleanStrategy tmp_ci_task.Schedule=formdata.get('time_trigger',"") if int(tmp_ci_task.TaskType)==3: tmp_ci_task.DeployService=formdata.get('DeployService',0) tmp_ci_task.Description=formdata.get('Description',"") return tmp_ci_task @staticmethod def upload_package(request): result=0 tq_id=request.POST.get('tq_id','') file_type=request.POST.get('file_type','') upload_file=request.FILES['upload_file'] max_file_size=SystemConfigService.get_upload_file_maxsize() file_wihte_list=SystemConfigService.get_file_type_white_list() if CITaskService.validate_upload_file(upload_file,max_file_size,file_wihte_list): mongo_fileid=MongoDBService.save_file(upload_file,PackgeMongoFile) result=FileInfoService.add_file(0,mongo_fileid, upload_file.name,1,0,upload_file.size) CITaskService.update_ci_taskhistory(tq_id,result,file_type) return result @staticmethod def download_package(request,file_path): return PackgeMongoFile.objects.get(file_path) @staticmethod def update_property(request,taskid): task=CITask.objects.get(taskid) task.Tags=request.POST.get("Tags") update_fields=list() for field in request.POST: update_fields.append(field) task.save(update_fields=update_fields) CITaskService.log_change_activity(request.user,task) @staticmethod def get_avalible_menu_tags(menu_type): ''' menu_type: 1: ci_task,2:task_history,3:agent ''' tag_types=[1] if str(menu_type)=="1": tag_types=[1,4] if str(menu_type)=="2": tag_types=[2] if str(menu_type)=="3": tag_types=[3] if str(menu_type)=="4": tag_types=[4] return Tag.objects.all().filter(TagType__in=tag_types) @staticmethod def get_agent_filter__tags(): return Tag.objects.all().filter(TagType__in=[3]) @staticmethod def log_create_activity(user,ci_task): CITask.objects.log_action(user.id,ci_task.id,ci_task.TaskName,ADDITION,"创建了CI任务",ci_task.Project,CITaskService.ActionLogType.CI) @staticmethod def log_delete_activity(user,ci_task): CITask.objects.log_action(user.id,ci_task.id,ci_task.TaskName,DELETION,"删除了CI任务",ci_task.Project,CITaskService.ActionLogType.CI) @staticmethod def log_clean_activity(user,ci_task): CITask.objects.log_action(user.id,ci_task.id,ci_task.TaskName,DELETION,"删除了历史记录",ci_task.Project,CITaskService.ActionLogType.CI) @staticmethod def log_change_activity(user,ci_task): CITask.objects.log_action(user.id,ci_task.id,ci_task.TaskName,CHANGE,"修改了CI任务",ci_task.Project,CITaskService.ActionLogType.CI) @staticmethod def log_build_activity(user_id,ci_task): CITask.objects.log_action(user_id,ci_task.id,ci_task.TaskName,CHANGE,"构建了CI任务",ci_task.Project,CITaskService.ActionLogType.CI)
[ "doraemon.ci.models.CITaskHistory", "doraemon.project.models.Project.objects.all", "doraemon.ci.models.CITask", "doraemon.home.models.TaskQueue.objects.all", "business.ci.ci_task_config_service.CITaskConfigService.copy_config", "business.project.project_service.ProjectService.get_projects_include_me", "...
[((1414, 1461), 'business.project.project_service.ProjectService.get_projects_include_me', 'ProjectService.get_projects_include_me', (['request'], {}), '(request)\n', (1452, 1461), False, 'from business.project.project_service import ProjectService\n'), ((5529, 5537), 'doraemon.ci.models.CITask', 'CITask', ([], {}), '()\n', (5535, 5537), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((6127, 6181), 'business.ci.ci_task_config_service.CITaskConfigService.save_task_config', 'CITaskConfigService.save_task_config', (['request', 'ci_task'], {}), '(request, ci_task)\n', (6163, 6181), False, 'from business.ci.ci_task_config_service import CITaskConfigService\n'), ((6661, 6747), 'business.ci.ci_task_config_service.CITaskConfigService.copy_config', 'CITaskConfigService.copy_config', (['ci_task.TaskConfig', 'ci_task.id', 'ci_task.TaskName'], {}), '(ci_task.TaskConfig, ci_task.id, ci_task.\n TaskName)\n', (6692, 6747), False, 'from business.ci.ci_task_config_service import CITaskConfigService\n'), ((6810, 6884), 'business.ci.ci_task_parameter_service.CITaskParameterService.copy_parameter_group_form_task', 'CITaskParameterService.copy_parameter_group_form_task', (['task_id', 'ci_task.id'], {}), '(task_id, ci_task.id)\n', (6863, 6884), False, 'from business.ci.ci_task_parameter_service import CITaskParameterService\n'), ((7090, 7143), 'business.ci.ci_task_config_service.CITaskConfigService.delete_config', 'CITaskConfigService.delete_config', (['ci_task.TaskConfig'], {}), '(ci_task.TaskConfig)\n', (7123, 7143), False, 'from business.ci.ci_task_config_service import CITaskConfigService\n'), ((7152, 7205), 'business.ci.ci_task_history_service.CITaskHistoryService.clean_all_history', 'CITaskHistoryService.clean_all_history', (['task_id', '(True)'], {}), '(task_id, True)\n', (7190, 7205), False, 'from business.ci.ci_task_history_service import CITaskHistoryService\n'), ((7445, 7499), 'business.ci.ci_task_history_service.CITaskHistoryService.clean_all_history', 'CITaskHistoryService.clean_all_history', (['task_id', '(False)'], {}), '(task_id, False)\n', (7483, 7499), False, 'from business.ci.ci_task_history_service import CITaskHistoryService\n'), ((7737, 7748), 'doraemon.home.models.TaskQueue', 'TaskQueue', ([], {}), '()\n', (7746, 7748), False, 'from doraemon.home.models import TaskQueue\n'), ((7779, 7809), 'gatesidelib.datetimehelper.DateTimeHelper.get_local_now', 'DateTimeHelper.get_local_now', ([], {}), '()\n', (7807, 7809), False, 'from gatesidelib.datetimehelper import DateTimeHelper\n'), ((8092, 8104), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (8102, 8104), False, 'import uuid\n'), ((9436, 9451), 'doraemon.ci.models.CITaskHistory', 'CITaskHistory', ([], {}), '()\n', (9449, 9451), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((10131, 10171), 'doraemon.ci.models.CITaskHistory.objects.get_by_tqid', 'CITaskHistory.objects.get_by_tqid', (['tq_id'], {}), '(tq_id)\n', (10164, 10171), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((11689, 11768), 'business.common.redis_service.RedisService.websocket_publish_message', 'RedisService.websocket_publish_message', (['"""TASKSTATUSCHANGE"""', '"""Task enqueue now!"""'], {}), "('TASKSTATUSCHANGE', 'Task enqueue now!')\n", (11727, 11768), False, 'from business.common.redis_service import RedisService\n'), ((12732, 12777), 'business.common.system_config_service.SystemConfigService.get_upload_file_maxsize', 'SystemConfigService.get_upload_file_maxsize', ([], {}), '()\n', (12775, 12777), False, 'from business.common.system_config_service import SystemConfigService\n'), ((12802, 12848), 'business.common.system_config_service.SystemConfigService.get_file_type_white_list', 'SystemConfigService.get_file_type_white_list', ([], {}), '()\n', (12846, 12848), False, 'from business.common.system_config_service import SystemConfigService\n'), ((13294, 13332), 'doraemon.api.ci.mongo_models.PackgeMongoFile.objects.get', 'PackgeMongoFile.objects.get', (['file_path'], {}), '(file_path)\n', (13321, 13332), False, 'from doraemon.api.ci.mongo_models import PackgeMongoFile\n'), ((13411, 13437), 'doraemon.ci.models.CITask.objects.get', 'CITask.objects.get', (['taskid'], {}), '(taskid)\n', (13429, 13437), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((14395, 14533), 'doraemon.ci.models.CITask.objects.log_action', 'CITask.objects.log_action', (['user.id', 'ci_task.id', 'ci_task.TaskName', 'ADDITION', '"""创建了CI任务"""', 'ci_task.Project', 'CITaskService.ActionLogType.CI'], {}), "(user.id, ci_task.id, ci_task.TaskName, ADDITION,\n '创建了CI任务', ci_task.Project, CITaskService.ActionLogType.CI)\n", (14420, 14533), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((14598, 14736), 'doraemon.ci.models.CITask.objects.log_action', 'CITask.objects.log_action', (['user.id', 'ci_task.id', 'ci_task.TaskName', 'DELETION', '"""删除了CI任务"""', 'ci_task.Project', 'CITaskService.ActionLogType.CI'], {}), "(user.id, ci_task.id, ci_task.TaskName, DELETION,\n '删除了CI任务', ci_task.Project, CITaskService.ActionLogType.CI)\n", (14623, 14736), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((14800, 14938), 'doraemon.ci.models.CITask.objects.log_action', 'CITask.objects.log_action', (['user.id', 'ci_task.id', 'ci_task.TaskName', 'DELETION', '"""删除了历史记录"""', 'ci_task.Project', 'CITaskService.ActionLogType.CI'], {}), "(user.id, ci_task.id, ci_task.TaskName, DELETION,\n '删除了历史记录', ci_task.Project, CITaskService.ActionLogType.CI)\n", (14825, 14938), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((15008, 15144), 'doraemon.ci.models.CITask.objects.log_action', 'CITask.objects.log_action', (['user.id', 'ci_task.id', 'ci_task.TaskName', 'CHANGE', '"""修改了CI任务"""', 'ci_task.Project', 'CITaskService.ActionLogType.CI'], {}), "(user.id, ci_task.id, ci_task.TaskName, CHANGE,\n '修改了CI任务', ci_task.Project, CITaskService.ActionLogType.CI)\n", (15033, 15144), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((15211, 15347), 'doraemon.ci.models.CITask.objects.log_action', 'CITask.objects.log_action', (['user_id', 'ci_task.id', 'ci_task.TaskName', 'CHANGE', '"""构建了CI任务"""', 'ci_task.Project', 'CITaskService.ActionLogType.CI'], {}), "(user_id, ci_task.id, ci_task.TaskName, CHANGE,\n '构建了CI任务', ci_task.Project, CITaskService.ActionLogType.CI)\n", (15236, 15347), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((2610, 2657), 'business.project.project_service.ProjectService.get_projects_include_me', 'ProjectService.get_projects_include_me', (['request'], {}), '(request)\n', (2648, 2657), False, 'from business.project.project_service import ProjectService\n'), ((5580, 5588), 'doraemon.ci.models.CITask', 'CITask', ([], {}), '()\n', (5586, 5588), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((5758, 5793), 'business.ci.ci_task_config_service.CITaskConfigService.create_config', 'CITaskConfigService.create_config', ([], {}), '()\n', (5791, 5793), False, 'from business.ci.ci_task_config_service import CITaskConfigService\n'), ((8455, 8505), 'business.project.version_service.VersionService.get_latest_version', 'VersionService.get_latest_version', (['ci_task.Project'], {}), '(ci_task.Project)\n', (8488, 8505), False, 'from business.project.version_service import VersionService\n'), ((11026, 11037), 'doraemon.home.models.TaskQueue', 'TaskQueue', ([], {}), '()\n', (11035, 11037), False, 'from doraemon.home.models import TaskQueue\n'), ((11072, 11102), 'gatesidelib.datetimehelper.DateTimeHelper.get_local_now', 'DateTimeHelper.get_local_now', ([], {}), '()\n', (11100, 11102), False, 'from gatesidelib.datetimehelper import DateTimeHelper\n'), ((12964, 13018), 'business.common.mongodb_service.MongoDBService.save_file', 'MongoDBService.save_file', (['upload_file', 'PackgeMongoFile'], {}), '(upload_file, PackgeMongoFile)\n', (12988, 13018), False, 'from business.common.mongodb_service import MongoDBService\n'), ((13037, 13124), 'business.common.file_info_service.FileInfoService.add_file', 'FileInfoService.add_file', (['(0)', 'mongo_fileid', 'upload_file.name', '(1)', '(0)', 'upload_file.size'], {}), '(0, mongo_fileid, upload_file.name, 1, 0,\n upload_file.size)\n', (13061, 13124), False, 'from business.common.file_info_service import FileInfoService\n'), ((1643, 1664), 'doraemon.project.models.Product.objects.all', 'Product.objects.all', ([], {}), '()\n', (1662, 1664), False, 'from doraemon.project.models import Product, Project, Tag\n'), ((1988, 2014), 'gatesidelib.common.simplelogger.SimpleLogger.exception', 'SimpleLogger.exception', (['ex'], {}), '(ex)\n', (2010, 2014), False, 'from gatesidelib.common.simplelogger import SimpleLogger\n'), ((3377, 3399), 'gatesidelib.common.simplelogger.SimpleLogger.error', 'SimpleLogger.error', (['ex'], {}), '(ex)\n', (3395, 3399), False, 'from gatesidelib.common.simplelogger import SimpleLogger\n'), ((14141, 14158), 'doraemon.project.models.Tag.objects.all', 'Tag.objects.all', ([], {}), '()\n', (14156, 14158), False, 'from doraemon.project.models import Product, Project, Tag\n'), ((14261, 14278), 'doraemon.project.models.Tag.objects.all', 'Tag.objects.all', ([], {}), '()\n', (14276, 14278), False, 'from doraemon.project.models import Product, Project, Tag\n'), ((2323, 2350), 'doraemon.ci.models.CITaskHistory.objects.all', 'CITaskHistory.objects.all', ([], {}), '()\n', (2348, 2350), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((10839, 10862), 'doraemon.home.models.TaskQueue.objects.all', 'TaskQueue.objects.all', ([], {}), '()\n', (10860, 10862), False, 'from doraemon.home.models import TaskQueue\n'), ((3116, 3137), 'doraemon.project.models.Project.objects.all', 'Project.objects.all', ([], {}), '()\n', (3135, 3137), False, 'from doraemon.project.models import Product, Project, Tag\n'), ((2984, 3004), 'doraemon.ci.models.CITask.objects.all', 'CITask.objects.all', ([], {}), '()\n', (3002, 3004), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((3223, 3243), 'doraemon.ci.models.CITask.objects.all', 'CITask.objects.all', ([], {}), '()\n', (3241, 3243), False, 'from doraemon.ci.models import CITask, CITaskHistory\n'), ((2827, 2847), 'doraemon.ci.models.CITask.objects.all', 'CITask.objects.all', ([], {}), '()\n', (2845, 2847), False, 'from doraemon.ci.models import CITask, CITaskHistory\n')]
import struct from pox.lib.addresses import EthAddr, IPAddr """ Utility to convert MAC and IP to/from integers. """ def eth_to_int(addr): if not isinstance(addr, EthAddr): return None value = 0 raw = addr.toRaw() for i in range(len(raw)): byte_shift = 5 - i byte = raw[i] byte_value = struct.unpack("B", byte)[0] value += (byte_value << (8 * byte_shift)) return value def int_to_eth(addr): addr = long(addr) val = [] for _ in range(6): val.insert(0, struct.pack("B", (addr & 0xFF))) addr >>= 8 return EthAddr(''.join(val)) def ip_to_uint(addr): if isinstance(addr, tuple): addr = addr[0] if not isinstance(addr, IPAddr): return None return addr.toUnsigned() def uint_to_ip(addr): return IPAddr(addr)
[ "struct.unpack", "pox.lib.addresses.IPAddr", "struct.pack" ]
[((821, 833), 'pox.lib.addresses.IPAddr', 'IPAddr', (['addr'], {}), '(addr)\n', (827, 833), False, 'from pox.lib.addresses import EthAddr, IPAddr\n'), ((337, 361), 'struct.unpack', 'struct.unpack', (['"""B"""', 'byte'], {}), "('B', byte)\n", (350, 361), False, 'import struct\n'), ((536, 564), 'struct.pack', 'struct.pack', (['"""B"""', '(addr & 255)'], {}), "('B', addr & 255)\n", (547, 564), False, 'import struct\n')]
import os import json import glob from rcnn.utils import get_path_with_annotation,get_path_with_annotation_ratio from rcnn.utils import get_weight_path __disease__ = ['Covid-Seg','Lung_Tumor'] __net__ = ['rcnn_unet'] __mode__ = ['cls','seg','mtl'] json_path = { 'Cervical':'/staff/shijun/torch_projects/Med_Seg/converter/dcm_converter/static_files/Cervical_Oar.json', 'Nasopharynx':'/staff/shijun/torch_projects/Med_Seg/converter/dcm_converter/static_files/Nasopharynx_Oar.json', 'Structseg_HaN':'/staff/shijun/torch_projects/Med_Seg/converter/nii_converter/static_files/Structseg_HaN.json', 'Structseg_THOR':'/staff/shijun/torch_projects/Med_Seg/converter/nii_converter/static_files/Structseg_THOR.json', 'SegTHOR':'/staff/shijun/torch_projects/Med_Seg/converter/nii_converter/static_files/SegTHOR.json', 'Covid-Seg':'/staff/shijun/torch_projects/Med_Seg/converter/nii_converter/static_files/Covid-Seg.json', # competition 'Lung':'/staff/shijun/torch_projects/Med_Seg/converter/dcm_converter/static_files/Lung_Oar.json', 'Lung_Tumor':'/staff/shijun/torch_projects/Med_Seg/converter/dcm_converter/static_files/Lung_Tumor.json', 'EGFR':'/staff/shijun/torch_projects/Med_Seg/converter/dcm_converter/static_files/EGFR.json', } DISEASE = 'Lung' MODE = 'seg' NET_NAME = 'rcnn_unet' VERSION = 'v1.3-3x8x512' with open(json_path[DISEASE], 'r') as fp: info = json.load(fp) DEVICE = '0,1' # Must be True when pre-training and inference PRE_TRAINED = False CKPT_POINT = False # 1,2,...,8 CURRENT_FOLD = 1 GPU_NUM = len(DEVICE.split(',')) FOLD_NUM = 5 # Arguments for trainer initialization #--------------------------------- single or multiple ROI_NUMBER = 4# or 0,1,2,3,4,5,6 NUM_CLASSES = info['annotation_num'] + 1 # 2 for binary, more for multiple classes if ROI_NUMBER is not None: NUM_CLASSES = 2 ROI_NAME = info['annotation_list'][ROI_NUMBER - 1] else: ROI_NAME = 'All' SCALE = info['scale'][ROI_NAME] #--------------------------------- #--------------------------------- mode and data path setting #all PATH_LIST = glob.glob(os.path.join(info['npy_path'],'*.hdf5')) # PATH_LIST.extend(glob.glob(os.path.join('/staff/shijun/dataset/Med_Seg/EGFR/2d_data','*.hdf5'))) # PATH_LIST.extend(glob.glob(os.path.join('/staff/shijun/dataset/Med_Seg/Covid-Seg/2d_data','*.hdf5'))) #--------------------------------- others SEQ_LEN = 3 INPUT_SHAPE = (512,512) BATCH_SIZE = 8 # CKPT_PATH = './ckpt/{}/{}/{}/{}/fold{}'.format(DISEASE, 'seg', 'v8.3-zero', ROI_NAME, str(1)) CKPT_PATH = './ckpt/{}/{}/{}/{}/fold{}'.format(DISEASE,MODE,VERSION,ROI_NAME,str(CURRENT_FOLD)) WEIGHT_PATH = get_weight_path(CKPT_PATH) print(WEIGHT_PATH) INIT_TRAINER = { 'net_name':NET_NAME, 'lr':1e-3, 'n_epoch':200, 'channels':1, 'num_classes':NUM_CLASSES, 'roi_number':ROI_NUMBER, 'scale':SCALE, 'input_shape':INPUT_SHAPE, 'seq_len':SEQ_LEN, 'crop':0, 'batch_size':BATCH_SIZE, 'num_workers':2, 'device':DEVICE, 'pre_trained':PRE_TRAINED, 'ckpt_point':CKPT_POINT, 'weight_path':WEIGHT_PATH, 'weight_decay': 0.0001, 'momentum': 0.99, 'gamma': 0.1, 'milestones': [40,80], 'T_max':5, 'mode':MODE, 'topk':20, 'freeze':None, } #--------------------------------- __seg_loss__ = ['DiceLoss','TverskyLoss','FocalTverskyLoss','TopkCEPlusDice','DynamicTopkCEPlusDice','TopkCEPlusTopkShiftDice','TopkCEPlusShiftDice','PowDiceLoss','Cross_Entropy','TopkDiceLoss','DynamicTopKLoss','TopKLoss','CEPlusDice','TopkCEPlusDice','CEPlusTopkDice','TopkCEPlusTopkDice'] __cls_loss__ = ['BCEWithLogitsLoss'] __mtl_loss__ = ['BCEPlusDice'] # Arguments when perform the trainer if MODE == 'cls': LOSS_FUN = 'BCEWithLogitsLoss' elif MODE == 'seg' : LOSS_FUN = 'TopkCEPlusDice' else: LOSS_FUN = 'BCEPlusDice' SETUP_TRAINER = { 'output_dir':'./ckpt/{}/{}/{}/{}'.format(DISEASE,MODE,VERSION,ROI_NAME), 'log_dir':'./log/{}/{}/{}/{}'.format(DISEASE,MODE,VERSION,ROI_NAME), 'optimizer':'Adam', 'loss_fun':LOSS_FUN, 'class_weight':None, #[1,4] 'lr_scheduler':None, #'CosineAnnealingLR' } #--------------------------------- TEST_PATH = None
[ "json.load", "rcnn.utils.get_weight_path", "os.path.join" ]
[((2639, 2665), 'rcnn.utils.get_weight_path', 'get_weight_path', (['CKPT_PATH'], {}), '(CKPT_PATH)\n', (2654, 2665), False, 'from rcnn.utils import get_weight_path\n'), ((1402, 1415), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (1411, 1415), False, 'import json\n'), ((2092, 2132), 'os.path.join', 'os.path.join', (["info['npy_path']", '"""*.hdf5"""'], {}), "(info['npy_path'], '*.hdf5')\n", (2104, 2132), False, 'import os\n')]
# -*- python -*- import math import numpy import Shadow from Shadow.ShadowPreprocessorsXraylib import prerefl, pre_mlayer, bragg from srxraylib.sources import srfunc from sirepo.template import transfer_mat_bl from pykern.pkcollections import PKDict from pykern import pkjson sigmax = 0.0045000000000000005 sigdix = 2.913e-05 sigmaz = 0.0045000000000000005 sigdiz = 2.913e-05 beam_stats = [] epsilon = 1e-06 beam = transfer_mat_bl.create_mat_rays(epsilon) sigma_mat = numpy.matrix([ [sigmax ** 2, 0, 0, 0], [0, sigdix ** 2, 0, 0], [0, 0, sigmaz ** 2, 0], [0, 0, 0, sigdiz ** 2], ]) alpha = 0 def calculate_stats(pos, oe): global alpha Tmat, x_prop_cen, xp_prop_cen, z_prop_cen, zp_prop_cen = transfer_mat_bl.tmat_calc(beam.duplicate(), epsilon) res = Tmat * sigma_mat * numpy.transpose(Tmat) pos += (oe.T_SOURCE if oe else 0) if oe: # oe.ALPHA is in radians after traceOE() alpha = int(alpha + 180 / math.pi * oe.ALPHA) % 360 beam_stats.append(PKDict( isRotated=True if alpha == 90 or alpha == 270 else False, s=pos * 1e-2, x=x_prop_cen, xp=xp_prop_cen, z=z_prop_cen, zp=zp_prop_cen, matrix=Tmat.tolist(), sigmax=math.sqrt(res[0, 0]) * 1e-2, sigdix=math.sqrt(res[1, 1]), sigmaz=math.sqrt(res[2, 2]) * 1e-2, sigdiz=math.sqrt(res[3, 3]), )) return pos pos = calculate_stats(0, None) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 1) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 2) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 3) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 4) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 5) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 6) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 7) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 8) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 9) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 10) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 11) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 12) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 13) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 14) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 15) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 16) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 17) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 18) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 19) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 20) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 21) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 22) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 23) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 24) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 25) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 26) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 27) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 28) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 29) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 30) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 31) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 32) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 33) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 34) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 35) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 36) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 37) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 38) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 39) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 40) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 41) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 42) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 43) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 44) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 45) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 46) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 47) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 48) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 49) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 50) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 51) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 52) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 53) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 54) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 55) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 56) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 57) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 58) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 59) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 60) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 61) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 62) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 63) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 64) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 65) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 66) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 67) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 68) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 69) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 70) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 71) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 72) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 73) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 74) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 75) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 76) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 77) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 78) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 79) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 80) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 81) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 82) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 83) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 84) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 85) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 86) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 87) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 88) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 89) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 90) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 91) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 92) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 93) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 94) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 95) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 96) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 97) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 98) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 99) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.FMIRR = 2 oe.ALPHA = 0 oe.FHIT_C = 0 oe.F_EXT = 0 oe.F_DEFAULT = 0 oe.SSOUR = 2900.0 oe.SIMAG = 1000.0 oe.THETA = 2.0002 oe.F_CONVEX = 0 oe.FCYL = 1 oe.CIL_ANG = 90.0 oe.T_INCIDENCE = 2.0 oe.T_REFLECTION = 2.0 oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 28.5 beam.traceOE(oe, 100) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 101) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 102) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 103) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 104) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 105) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 106) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 107) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 108) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 109) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 110) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 111) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 112) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 113) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 114) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 115) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 116) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 117) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 118) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 119) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 120) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 121) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 122) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 123) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 124) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 125) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 126) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 127) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 128) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 129) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 130) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 131) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 132) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 133) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 134) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 135) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 136) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 137) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 138) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 139) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 140) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 141) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 142) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 143) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 144) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 145) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 146) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 147) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 148) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 149) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 150) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 151) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 152) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 153) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 154) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 155) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 156) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 157) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 158) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 159) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 160) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 161) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 162) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 163) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 164) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 165) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 166) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 167) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 168) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 169) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 170) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 171) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 172) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 173) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 174) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 175) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 176) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 177) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 178) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 179) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 180) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 181) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 182) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 183) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 184) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 185) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 186) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 187) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 188) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 189) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 190) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 191) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 192) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 193) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 194) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 195) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 196) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 197) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 198) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 199) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.FMIRR = 2 oe.ALPHA = 0 oe.FHIT_C = 1 oe.F_EXT = 0 oe.F_DEFAULT = 0 oe.SSOUR = 3000.0 oe.SIMAG = 900.0 oe.THETA = 2.0002 oe.F_CONVEX = 0 oe.FCYL = 1 oe.CIL_ANG = 0.0 oe.FSHAPE = 2 oe.RWIDX2 = 15.0 oe.RLEN2 = 25.0 oe.F_MOVE = 1 oe.OFFX = 1.0 oe.T_INCIDENCE = 2.0 oe.T_REFLECTION = 2.0 oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.0 beam.traceOE(oe, 200) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 201) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 202) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 203) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 204) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 205) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 206) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 207) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 208) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 209) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 210) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 211) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 212) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 213) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 214) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 215) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 216) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 217) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 218) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 219) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 220) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 221) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 222) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 223) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 224) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 225) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 226) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 227) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 228) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 229) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 230) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 231) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 232) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 233) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 234) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 235) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 236) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 237) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 238) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 239) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 240) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 241) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 242) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 243) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 244) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 245) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 246) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 247) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 248) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 249) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 250) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 251) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 252) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 253) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 254) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 255) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 256) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 257) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 258) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 259) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 260) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 261) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 262) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 263) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 264) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 265) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 266) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 267) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 268) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 269) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 270) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 271) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 272) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 273) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 274) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 275) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 276) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 277) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 278) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 279) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 280) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 281) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 282) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 283) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 284) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 285) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 286) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 287) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 288) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 289) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 290) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 291) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 292) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 293) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 294) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 295) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 296) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 297) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 298) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.050000000000182 beam.traceOE(oe, 299) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty() oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 9.049999999981537 beam.traceOE(oe, 300) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 301) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 302) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 303) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 304) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 305) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 306) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 307) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 308) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 309) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 310) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 311) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 312) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 313) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 314) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 315) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 316) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 317) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 318) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 319) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 320) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 321) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 322) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 323) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 324) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 325) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 326) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 327) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 328) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 329) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 330) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 331) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 332) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 333) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 334) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 335) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 336) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 337) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 338) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 339) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 340) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 341) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 342) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 343) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 344) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 345) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 346) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 347) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 348) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 349) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 350) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 351) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 352) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 353) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 354) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 355) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 356) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 357) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 358) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 359) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 360) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 361) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 362) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 363) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 364) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 365) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 366) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 367) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 368) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 369) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 370) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 371) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 372) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 373) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 374) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 375) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 376) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 377) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 378) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 379) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 380) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 381) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 382) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 383) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 384) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 385) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 386) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 387) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 388) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 389) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 390) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 391) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 392) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 393) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 394) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 395) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 396) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 397) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 398) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 399) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty(ALPHA=0) oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 0.9499999999998181 beam.traceOE(oe, 400) pos = calculate_stats(pos, oe) oe = Shadow.OE() oe.DUMMY = 1.0 oe.set_empty() oe.FWRITE = 3 oe.T_IMAGE = 0.0 oe.T_SOURCE = 1.864464138634503e-11 beam.traceOE(oe, 401) pos = calculate_stats(pos, oe) pkjson.dump_pretty(beam_stats, filename='beam_stats.json') import Shadow.ShadowTools Shadow.ShadowTools.plotxy(beam, 1, 3, nbins=100, nolost=1)
[ "pykern.pkjson.dump_pretty", "Shadow.OE", "Shadow.ShadowTools.plotxy", "sirepo.template.transfer_mat_bl.create_mat_rays", "math.sqrt", "numpy.matrix", "numpy.transpose" ]
[((418, 458), 'sirepo.template.transfer_mat_bl.create_mat_rays', 'transfer_mat_bl.create_mat_rays', (['epsilon'], {}), '(epsilon)\n', (449, 458), False, 'from sirepo.template import transfer_mat_bl\n'), ((472, 586), 'numpy.matrix', 'numpy.matrix', (['[[sigmax ** 2, 0, 0, 0], [0, sigdix ** 2, 0, 0], [0, 0, sigmaz ** 2, 0], [0,\n 0, 0, sigdiz ** 2]]'], {}), '([[sigmax ** 2, 0, 0, 0], [0, sigdix ** 2, 0, 0], [0, 0, sigmaz **\n 2, 0], [0, 0, 0, sigdiz ** 2]])\n', (484, 586), False, 'import numpy\n'), ((1446, 1457), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (1455, 1457), False, 'import Shadow\n'), ((1602, 1613), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (1611, 1613), False, 'import Shadow\n'), ((1758, 1769), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (1767, 1769), False, 'import Shadow\n'), ((1914, 1925), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (1923, 1925), False, 'import Shadow\n'), ((2070, 2081), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (2079, 2081), False, 'import Shadow\n'), ((2226, 2237), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (2235, 2237), False, 'import Shadow\n'), ((2382, 2393), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (2391, 2393), False, 'import Shadow\n'), ((2538, 2549), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (2547, 2549), False, 'import Shadow\n'), ((2694, 2705), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (2703, 2705), False, 'import Shadow\n'), ((2850, 2861), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (2859, 2861), False, 'import Shadow\n'), ((3007, 3018), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (3016, 3018), False, 'import Shadow\n'), ((3164, 3175), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (3173, 3175), False, 'import Shadow\n'), ((3321, 3332), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (3330, 3332), False, 'import Shadow\n'), ((3478, 3489), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (3487, 3489), False, 'import Shadow\n'), ((3635, 3646), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (3644, 3646), False, 'import Shadow\n'), ((3792, 3803), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (3801, 3803), False, 'import Shadow\n'), ((3949, 3960), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (3958, 3960), False, 'import Shadow\n'), ((4106, 4117), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (4115, 4117), False, 'import Shadow\n'), ((4263, 4274), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (4272, 4274), False, 'import Shadow\n'), ((4420, 4431), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (4429, 4431), False, 'import Shadow\n'), ((4577, 4588), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (4586, 4588), False, 'import Shadow\n'), ((4734, 4745), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (4743, 4745), False, 'import Shadow\n'), ((4891, 4902), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (4900, 4902), False, 'import Shadow\n'), ((5048, 5059), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (5057, 5059), False, 'import Shadow\n'), ((5205, 5216), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (5214, 5216), False, 'import Shadow\n'), ((5362, 5373), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (5371, 5373), False, 'import Shadow\n'), ((5519, 5530), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (5528, 5530), False, 'import Shadow\n'), ((5676, 5687), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (5685, 5687), False, 'import Shadow\n'), ((5833, 5844), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (5842, 5844), False, 'import Shadow\n'), ((5990, 6001), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (5999, 6001), False, 'import Shadow\n'), ((6147, 6158), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (6156, 6158), False, 'import Shadow\n'), ((6304, 6315), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (6313, 6315), False, 'import Shadow\n'), ((6461, 6472), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (6470, 6472), False, 'import Shadow\n'), ((6618, 6629), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (6627, 6629), False, 'import Shadow\n'), ((6775, 6786), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (6784, 6786), False, 'import Shadow\n'), ((6932, 6943), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (6941, 6943), False, 'import Shadow\n'), ((7089, 7100), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (7098, 7100), False, 'import Shadow\n'), ((7246, 7257), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (7255, 7257), False, 'import Shadow\n'), ((7403, 7414), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (7412, 7414), False, 'import Shadow\n'), ((7560, 7571), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (7569, 7571), False, 'import Shadow\n'), ((7717, 7728), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (7726, 7728), False, 'import Shadow\n'), ((7874, 7885), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (7883, 7885), False, 'import Shadow\n'), ((8031, 8042), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (8040, 8042), False, 'import Shadow\n'), ((8188, 8199), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (8197, 8199), False, 'import Shadow\n'), ((8345, 8356), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (8354, 8356), False, 'import Shadow\n'), ((8502, 8513), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (8511, 8513), False, 'import Shadow\n'), ((8659, 8670), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (8668, 8670), False, 'import Shadow\n'), ((8816, 8827), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (8825, 8827), False, 'import Shadow\n'), ((8973, 8984), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (8982, 8984), False, 'import Shadow\n'), ((9130, 9141), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (9139, 9141), False, 'import Shadow\n'), ((9287, 9298), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (9296, 9298), False, 'import Shadow\n'), ((9444, 9455), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (9453, 9455), False, 'import Shadow\n'), ((9601, 9612), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (9610, 9612), False, 'import Shadow\n'), ((9758, 9769), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (9767, 9769), False, 'import Shadow\n'), ((9915, 9926), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (9924, 9926), False, 'import Shadow\n'), ((10072, 10083), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (10081, 10083), False, 'import Shadow\n'), ((10229, 10240), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (10238, 10240), False, 'import Shadow\n'), ((10386, 10397), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (10395, 10397), False, 'import Shadow\n'), ((10543, 10554), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (10552, 10554), False, 'import Shadow\n'), ((10700, 10711), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (10709, 10711), False, 'import Shadow\n'), ((10857, 10868), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (10866, 10868), False, 'import Shadow\n'), ((11014, 11025), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (11023, 11025), False, 'import Shadow\n'), ((11171, 11182), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (11180, 11182), False, 'import Shadow\n'), ((11328, 11339), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (11337, 11339), False, 'import Shadow\n'), ((11485, 11496), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (11494, 11496), False, 'import Shadow\n'), ((11642, 11653), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (11651, 11653), False, 'import Shadow\n'), ((11799, 11810), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (11808, 11810), False, 'import Shadow\n'), ((11956, 11967), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (11965, 11967), False, 'import Shadow\n'), ((12113, 12124), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (12122, 12124), False, 'import Shadow\n'), ((12270, 12281), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (12279, 12281), False, 'import Shadow\n'), ((12427, 12438), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (12436, 12438), False, 'import Shadow\n'), ((12584, 12595), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (12593, 12595), False, 'import Shadow\n'), ((12741, 12752), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (12750, 12752), False, 'import Shadow\n'), ((12898, 12909), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (12907, 12909), False, 'import Shadow\n'), ((13055, 13066), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (13064, 13066), False, 'import Shadow\n'), ((13212, 13223), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (13221, 13223), False, 'import Shadow\n'), ((13369, 13380), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (13378, 13380), False, 'import Shadow\n'), ((13526, 13537), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (13535, 13537), False, 'import Shadow\n'), ((13683, 13694), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (13692, 13694), False, 'import Shadow\n'), ((13840, 13851), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (13849, 13851), False, 'import Shadow\n'), ((13997, 14008), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (14006, 14008), False, 'import Shadow\n'), ((14154, 14165), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (14163, 14165), False, 'import Shadow\n'), ((14311, 14322), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (14320, 14322), False, 'import Shadow\n'), ((14468, 14479), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (14477, 14479), False, 'import Shadow\n'), ((14625, 14636), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (14634, 14636), False, 'import Shadow\n'), ((14782, 14793), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (14791, 14793), False, 'import Shadow\n'), ((14939, 14950), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (14948, 14950), False, 'import Shadow\n'), ((15096, 15107), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (15105, 15107), False, 'import Shadow\n'), ((15253, 15264), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (15262, 15264), False, 'import Shadow\n'), ((15410, 15421), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (15419, 15421), False, 'import Shadow\n'), ((15567, 15578), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (15576, 15578), False, 'import Shadow\n'), ((15724, 15735), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (15733, 15735), False, 'import Shadow\n'), ((15881, 15892), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (15890, 15892), False, 'import Shadow\n'), ((16038, 16049), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (16047, 16049), False, 'import Shadow\n'), ((16195, 16206), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (16204, 16206), False, 'import Shadow\n'), ((16352, 16363), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (16361, 16363), False, 'import Shadow\n'), ((16509, 16520), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (16518, 16520), False, 'import Shadow\n'), ((16666, 16677), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (16675, 16677), False, 'import Shadow\n'), ((16823, 16834), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (16832, 16834), False, 'import Shadow\n'), ((16980, 16991), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (16989, 16991), False, 'import Shadow\n'), ((17329, 17340), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (17338, 17340), False, 'import Shadow\n'), ((17486, 17497), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (17495, 17497), False, 'import Shadow\n'), ((17643, 17654), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (17652, 17654), False, 'import Shadow\n'), ((17800, 17811), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (17809, 17811), False, 'import Shadow\n'), ((17957, 17968), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (17966, 17968), False, 'import Shadow\n'), ((18114, 18125), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (18123, 18125), False, 'import Shadow\n'), ((18271, 18282), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (18280, 18282), False, 'import Shadow\n'), ((18428, 18439), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (18437, 18439), False, 'import Shadow\n'), ((18585, 18596), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (18594, 18596), False, 'import Shadow\n'), ((18742, 18753), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (18751, 18753), False, 'import Shadow\n'), ((18899, 18910), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (18908, 18910), False, 'import Shadow\n'), ((19056, 19067), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (19065, 19067), False, 'import Shadow\n'), ((19213, 19224), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (19222, 19224), False, 'import Shadow\n'), ((19370, 19381), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (19379, 19381), False, 'import Shadow\n'), ((19527, 19538), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (19536, 19538), False, 'import Shadow\n'), ((19684, 19695), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (19693, 19695), False, 'import Shadow\n'), ((19841, 19852), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (19850, 19852), False, 'import Shadow\n'), ((19998, 20009), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (20007, 20009), False, 'import Shadow\n'), ((20155, 20166), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (20164, 20166), False, 'import Shadow\n'), ((20312, 20323), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (20321, 20323), False, 'import Shadow\n'), ((20469, 20480), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (20478, 20480), False, 'import Shadow\n'), ((20626, 20637), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (20635, 20637), False, 'import Shadow\n'), ((20783, 20794), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (20792, 20794), False, 'import Shadow\n'), ((20940, 20951), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (20949, 20951), False, 'import Shadow\n'), ((21097, 21108), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (21106, 21108), False, 'import Shadow\n'), ((21254, 21265), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (21263, 21265), False, 'import Shadow\n'), ((21411, 21422), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (21420, 21422), False, 'import Shadow\n'), ((21568, 21579), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (21577, 21579), False, 'import Shadow\n'), ((21725, 21736), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (21734, 21736), False, 'import Shadow\n'), ((21882, 21893), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (21891, 21893), False, 'import Shadow\n'), ((22039, 22050), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (22048, 22050), False, 'import Shadow\n'), ((22196, 22207), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (22205, 22207), False, 'import Shadow\n'), ((22353, 22364), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (22362, 22364), False, 'import Shadow\n'), ((22510, 22521), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (22519, 22521), False, 'import Shadow\n'), ((22667, 22678), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (22676, 22678), False, 'import Shadow\n'), ((22824, 22835), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (22833, 22835), False, 'import Shadow\n'), ((22981, 22992), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (22990, 22992), False, 'import Shadow\n'), ((23138, 23149), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (23147, 23149), False, 'import Shadow\n'), ((23295, 23306), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (23304, 23306), False, 'import Shadow\n'), ((23452, 23463), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (23461, 23463), False, 'import Shadow\n'), ((23609, 23620), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (23618, 23620), False, 'import Shadow\n'), ((23766, 23777), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (23775, 23777), False, 'import Shadow\n'), ((23923, 23934), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (23932, 23934), False, 'import Shadow\n'), ((24080, 24091), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (24089, 24091), False, 'import Shadow\n'), ((24237, 24248), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (24246, 24248), False, 'import Shadow\n'), ((24394, 24405), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (24403, 24405), False, 'import Shadow\n'), ((24551, 24562), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (24560, 24562), False, 'import Shadow\n'), ((24708, 24719), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (24717, 24719), False, 'import Shadow\n'), ((24865, 24876), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (24874, 24876), False, 'import Shadow\n'), ((25022, 25033), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (25031, 25033), False, 'import Shadow\n'), ((25179, 25190), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (25188, 25190), False, 'import Shadow\n'), ((25336, 25347), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (25345, 25347), False, 'import Shadow\n'), ((25493, 25504), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (25502, 25504), False, 'import Shadow\n'), ((25650, 25661), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (25659, 25661), False, 'import Shadow\n'), ((25807, 25818), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (25816, 25818), False, 'import Shadow\n'), ((25964, 25975), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (25973, 25975), False, 'import Shadow\n'), ((26121, 26132), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (26130, 26132), False, 'import Shadow\n'), ((26278, 26289), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (26287, 26289), False, 'import Shadow\n'), ((26435, 26446), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (26444, 26446), False, 'import Shadow\n'), ((26592, 26603), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (26601, 26603), False, 'import Shadow\n'), ((26749, 26760), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (26758, 26760), False, 'import Shadow\n'), ((26906, 26917), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (26915, 26917), False, 'import Shadow\n'), ((27063, 27074), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (27072, 27074), False, 'import Shadow\n'), ((27220, 27231), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (27229, 27231), False, 'import Shadow\n'), ((27377, 27388), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (27386, 27388), False, 'import Shadow\n'), ((27534, 27545), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (27543, 27545), False, 'import Shadow\n'), ((27691, 27702), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (27700, 27702), False, 'import Shadow\n'), ((27848, 27859), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (27857, 27859), False, 'import Shadow\n'), ((28005, 28016), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (28014, 28016), False, 'import Shadow\n'), ((28162, 28173), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (28171, 28173), False, 'import Shadow\n'), ((28319, 28330), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (28328, 28330), False, 'import Shadow\n'), ((28476, 28487), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (28485, 28487), False, 'import Shadow\n'), ((28633, 28644), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (28642, 28644), False, 'import Shadow\n'), ((28790, 28801), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (28799, 28801), False, 'import Shadow\n'), ((28947, 28958), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (28956, 28958), False, 'import Shadow\n'), ((29104, 29115), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (29113, 29115), False, 'import Shadow\n'), ((29261, 29272), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (29270, 29272), False, 'import Shadow\n'), ((29418, 29429), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (29427, 29429), False, 'import Shadow\n'), ((29575, 29586), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (29584, 29586), False, 'import Shadow\n'), ((29732, 29743), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (29741, 29743), False, 'import Shadow\n'), ((29889, 29900), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (29898, 29900), False, 'import Shadow\n'), ((30046, 30057), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (30055, 30057), False, 'import Shadow\n'), ((30203, 30214), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (30212, 30214), False, 'import Shadow\n'), ((30360, 30371), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (30369, 30371), False, 'import Shadow\n'), ((30517, 30528), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (30526, 30528), False, 'import Shadow\n'), ((30674, 30685), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (30683, 30685), False, 'import Shadow\n'), ((30831, 30842), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (30840, 30842), False, 'import Shadow\n'), ((30988, 30999), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (30997, 30999), False, 'import Shadow\n'), ((31145, 31156), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (31154, 31156), False, 'import Shadow\n'), ((31302, 31313), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (31311, 31313), False, 'import Shadow\n'), ((31459, 31470), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (31468, 31470), False, 'import Shadow\n'), ((31616, 31627), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (31625, 31627), False, 'import Shadow\n'), ((31773, 31784), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (31782, 31784), False, 'import Shadow\n'), ((31930, 31941), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (31939, 31941), False, 'import Shadow\n'), ((32087, 32098), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (32096, 32098), False, 'import Shadow\n'), ((32244, 32255), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (32253, 32255), False, 'import Shadow\n'), ((32401, 32412), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (32410, 32412), False, 'import Shadow\n'), ((32558, 32569), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (32567, 32569), False, 'import Shadow\n'), ((32715, 32726), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (32724, 32726), False, 'import Shadow\n'), ((32872, 32883), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (32881, 32883), False, 'import Shadow\n'), ((33293, 33304), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (33302, 33304), False, 'import Shadow\n'), ((33464, 33475), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (33473, 33475), False, 'import Shadow\n'), ((33635, 33646), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (33644, 33646), False, 'import Shadow\n'), ((33806, 33817), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (33815, 33817), False, 'import Shadow\n'), ((33977, 33988), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (33986, 33988), False, 'import Shadow\n'), ((34148, 34159), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (34157, 34159), False, 'import Shadow\n'), ((34319, 34330), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (34328, 34330), False, 'import Shadow\n'), ((34490, 34501), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (34499, 34501), False, 'import Shadow\n'), ((34661, 34672), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (34670, 34672), False, 'import Shadow\n'), ((34832, 34843), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (34841, 34843), False, 'import Shadow\n'), ((35003, 35014), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (35012, 35014), False, 'import Shadow\n'), ((35174, 35185), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (35183, 35185), False, 'import Shadow\n'), ((35345, 35356), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (35354, 35356), False, 'import Shadow\n'), ((35516, 35527), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (35525, 35527), False, 'import Shadow\n'), ((35687, 35698), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (35696, 35698), False, 'import Shadow\n'), ((35858, 35869), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (35867, 35869), False, 'import Shadow\n'), ((36029, 36040), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (36038, 36040), False, 'import Shadow\n'), ((36200, 36211), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (36209, 36211), False, 'import Shadow\n'), ((36371, 36382), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (36380, 36382), False, 'import Shadow\n'), ((36542, 36553), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (36551, 36553), False, 'import Shadow\n'), ((36713, 36724), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (36722, 36724), False, 'import Shadow\n'), ((36884, 36895), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (36893, 36895), False, 'import Shadow\n'), ((37055, 37066), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (37064, 37066), False, 'import Shadow\n'), ((37226, 37237), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (37235, 37237), False, 'import Shadow\n'), ((37397, 37408), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (37406, 37408), False, 'import Shadow\n'), ((37568, 37579), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (37577, 37579), False, 'import Shadow\n'), ((37739, 37750), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (37748, 37750), False, 'import Shadow\n'), ((37910, 37921), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (37919, 37921), False, 'import Shadow\n'), ((38081, 38092), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (38090, 38092), False, 'import Shadow\n'), ((38252, 38263), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (38261, 38263), False, 'import Shadow\n'), ((38423, 38434), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (38432, 38434), False, 'import Shadow\n'), ((38594, 38605), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (38603, 38605), False, 'import Shadow\n'), ((38765, 38776), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (38774, 38776), False, 'import Shadow\n'), ((38936, 38947), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (38945, 38947), False, 'import Shadow\n'), ((39107, 39118), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (39116, 39118), False, 'import Shadow\n'), ((39278, 39289), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (39287, 39289), False, 'import Shadow\n'), ((39449, 39460), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (39458, 39460), False, 'import Shadow\n'), ((39620, 39631), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (39629, 39631), False, 'import Shadow\n'), ((39791, 39802), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (39800, 39802), False, 'import Shadow\n'), ((39962, 39973), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (39971, 39973), False, 'import Shadow\n'), ((40133, 40144), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (40142, 40144), False, 'import Shadow\n'), ((40304, 40315), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (40313, 40315), False, 'import Shadow\n'), ((40475, 40486), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (40484, 40486), False, 'import Shadow\n'), ((40646, 40657), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (40655, 40657), False, 'import Shadow\n'), ((40817, 40828), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (40826, 40828), False, 'import Shadow\n'), ((40988, 40999), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (40997, 40999), False, 'import Shadow\n'), ((41159, 41170), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (41168, 41170), False, 'import Shadow\n'), ((41330, 41341), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (41339, 41341), False, 'import Shadow\n'), ((41501, 41512), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (41510, 41512), False, 'import Shadow\n'), ((41672, 41683), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (41681, 41683), False, 'import Shadow\n'), ((41843, 41854), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (41852, 41854), False, 'import Shadow\n'), ((42014, 42025), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (42023, 42025), False, 'import Shadow\n'), ((42185, 42196), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (42194, 42196), False, 'import Shadow\n'), ((42356, 42367), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (42365, 42367), False, 'import Shadow\n'), ((42527, 42538), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (42536, 42538), False, 'import Shadow\n'), ((42698, 42709), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (42707, 42709), False, 'import Shadow\n'), ((42869, 42880), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (42878, 42880), False, 'import Shadow\n'), ((43040, 43051), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (43049, 43051), False, 'import Shadow\n'), ((43211, 43222), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (43220, 43222), False, 'import Shadow\n'), ((43382, 43393), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (43391, 43393), False, 'import Shadow\n'), ((43553, 43564), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (43562, 43564), False, 'import Shadow\n'), ((43724, 43735), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (43733, 43735), False, 'import Shadow\n'), ((43895, 43906), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (43904, 43906), False, 'import Shadow\n'), ((44066, 44077), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (44075, 44077), False, 'import Shadow\n'), ((44237, 44248), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (44246, 44248), False, 'import Shadow\n'), ((44408, 44419), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (44417, 44419), False, 'import Shadow\n'), ((44579, 44590), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (44588, 44590), False, 'import Shadow\n'), ((44750, 44761), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (44759, 44761), False, 'import Shadow\n'), ((44921, 44932), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (44930, 44932), False, 'import Shadow\n'), ((45092, 45103), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (45101, 45103), False, 'import Shadow\n'), ((45263, 45274), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (45272, 45274), False, 'import Shadow\n'), ((45434, 45445), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (45443, 45445), False, 'import Shadow\n'), ((45605, 45616), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (45614, 45616), False, 'import Shadow\n'), ((45776, 45787), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (45785, 45787), False, 'import Shadow\n'), ((45947, 45958), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (45956, 45958), False, 'import Shadow\n'), ((46118, 46129), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (46127, 46129), False, 'import Shadow\n'), ((46289, 46300), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (46298, 46300), False, 'import Shadow\n'), ((46460, 46471), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (46469, 46471), False, 'import Shadow\n'), ((46631, 46642), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (46640, 46642), False, 'import Shadow\n'), ((46802, 46813), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (46811, 46813), False, 'import Shadow\n'), ((46973, 46984), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (46982, 46984), False, 'import Shadow\n'), ((47144, 47155), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (47153, 47155), False, 'import Shadow\n'), ((47315, 47326), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (47324, 47326), False, 'import Shadow\n'), ((47486, 47497), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (47495, 47497), False, 'import Shadow\n'), ((47657, 47668), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (47666, 47668), False, 'import Shadow\n'), ((47828, 47839), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (47837, 47839), False, 'import Shadow\n'), ((47999, 48010), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (48008, 48010), False, 'import Shadow\n'), ((48170, 48181), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (48179, 48181), False, 'import Shadow\n'), ((48341, 48352), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (48350, 48352), False, 'import Shadow\n'), ((48512, 48523), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (48521, 48523), False, 'import Shadow\n'), ((48683, 48694), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (48692, 48694), False, 'import Shadow\n'), ((48854, 48865), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (48863, 48865), False, 'import Shadow\n'), ((49025, 49036), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (49034, 49036), False, 'import Shadow\n'), ((49196, 49207), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (49205, 49207), False, 'import Shadow\n'), ((49367, 49378), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (49376, 49378), False, 'import Shadow\n'), ((49538, 49549), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (49547, 49549), False, 'import Shadow\n'), ((49709, 49720), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (49718, 49720), False, 'import Shadow\n'), ((49880, 49891), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (49889, 49891), False, 'import Shadow\n'), ((50051, 50062), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (50060, 50062), False, 'import Shadow\n'), ((50222, 50233), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (50231, 50233), False, 'import Shadow\n'), ((50386, 50397), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (50395, 50397), False, 'import Shadow\n'), ((50558, 50569), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (50567, 50569), False, 'import Shadow\n'), ((50730, 50741), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (50739, 50741), False, 'import Shadow\n'), ((50902, 50913), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (50911, 50913), False, 'import Shadow\n'), ((51074, 51085), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (51083, 51085), False, 'import Shadow\n'), ((51246, 51257), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (51255, 51257), False, 'import Shadow\n'), ((51418, 51429), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (51427, 51429), False, 'import Shadow\n'), ((51590, 51601), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (51599, 51601), False, 'import Shadow\n'), ((51762, 51773), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (51771, 51773), False, 'import Shadow\n'), ((51934, 51945), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (51943, 51945), False, 'import Shadow\n'), ((52106, 52117), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (52115, 52117), False, 'import Shadow\n'), ((52278, 52289), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (52287, 52289), False, 'import Shadow\n'), ((52450, 52461), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (52459, 52461), False, 'import Shadow\n'), ((52622, 52633), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (52631, 52633), False, 'import Shadow\n'), ((52794, 52805), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (52803, 52805), False, 'import Shadow\n'), ((52966, 52977), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (52975, 52977), False, 'import Shadow\n'), ((53138, 53149), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (53147, 53149), False, 'import Shadow\n'), ((53310, 53321), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (53319, 53321), False, 'import Shadow\n'), ((53482, 53493), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (53491, 53493), False, 'import Shadow\n'), ((53654, 53665), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (53663, 53665), False, 'import Shadow\n'), ((53826, 53837), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (53835, 53837), False, 'import Shadow\n'), ((53998, 54009), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (54007, 54009), False, 'import Shadow\n'), ((54170, 54181), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (54179, 54181), False, 'import Shadow\n'), ((54342, 54353), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (54351, 54353), False, 'import Shadow\n'), ((54514, 54525), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (54523, 54525), False, 'import Shadow\n'), ((54686, 54697), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (54695, 54697), False, 'import Shadow\n'), ((54858, 54869), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (54867, 54869), False, 'import Shadow\n'), ((55030, 55041), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (55039, 55041), False, 'import Shadow\n'), ((55202, 55213), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (55211, 55213), False, 'import Shadow\n'), ((55374, 55385), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (55383, 55385), False, 'import Shadow\n'), ((55546, 55557), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (55555, 55557), False, 'import Shadow\n'), ((55718, 55729), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (55727, 55729), False, 'import Shadow\n'), ((55890, 55901), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (55899, 55901), False, 'import Shadow\n'), ((56062, 56073), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (56071, 56073), False, 'import Shadow\n'), ((56234, 56245), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (56243, 56245), False, 'import Shadow\n'), ((56406, 56417), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (56415, 56417), False, 'import Shadow\n'), ((56578, 56589), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (56587, 56589), False, 'import Shadow\n'), ((56750, 56761), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (56759, 56761), False, 'import Shadow\n'), ((56922, 56933), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (56931, 56933), False, 'import Shadow\n'), ((57094, 57105), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (57103, 57105), False, 'import Shadow\n'), ((57266, 57277), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (57275, 57277), False, 'import Shadow\n'), ((57438, 57449), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (57447, 57449), False, 'import Shadow\n'), ((57610, 57621), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (57619, 57621), False, 'import Shadow\n'), ((57782, 57793), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (57791, 57793), False, 'import Shadow\n'), ((57954, 57965), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (57963, 57965), False, 'import Shadow\n'), ((58126, 58137), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (58135, 58137), False, 'import Shadow\n'), ((58298, 58309), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (58307, 58309), False, 'import Shadow\n'), ((58470, 58481), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (58479, 58481), False, 'import Shadow\n'), ((58642, 58653), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (58651, 58653), False, 'import Shadow\n'), ((58814, 58825), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (58823, 58825), False, 'import Shadow\n'), ((58986, 58997), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (58995, 58997), False, 'import Shadow\n'), ((59158, 59169), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (59167, 59169), False, 'import Shadow\n'), ((59330, 59341), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (59339, 59341), False, 'import Shadow\n'), ((59502, 59513), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (59511, 59513), False, 'import Shadow\n'), ((59674, 59685), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (59683, 59685), False, 'import Shadow\n'), ((59846, 59857), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (59855, 59857), False, 'import Shadow\n'), ((60018, 60029), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (60027, 60029), False, 'import Shadow\n'), ((60190, 60201), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (60199, 60201), False, 'import Shadow\n'), ((60362, 60373), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (60371, 60373), False, 'import Shadow\n'), ((60534, 60545), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (60543, 60545), False, 'import Shadow\n'), ((60706, 60717), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (60715, 60717), False, 'import Shadow\n'), ((60878, 60889), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (60887, 60889), False, 'import Shadow\n'), ((61050, 61061), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (61059, 61061), False, 'import Shadow\n'), ((61222, 61233), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (61231, 61233), False, 'import Shadow\n'), ((61394, 61405), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (61403, 61405), False, 'import Shadow\n'), ((61566, 61577), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (61575, 61577), False, 'import Shadow\n'), ((61738, 61749), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (61747, 61749), False, 'import Shadow\n'), ((61910, 61921), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (61919, 61921), False, 'import Shadow\n'), ((62082, 62093), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (62091, 62093), False, 'import Shadow\n'), ((62254, 62265), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (62263, 62265), False, 'import Shadow\n'), ((62426, 62437), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (62435, 62437), False, 'import Shadow\n'), ((62598, 62609), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (62607, 62609), False, 'import Shadow\n'), ((62770, 62781), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (62779, 62781), False, 'import Shadow\n'), ((62942, 62953), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (62951, 62953), False, 'import Shadow\n'), ((63114, 63125), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (63123, 63125), False, 'import Shadow\n'), ((63286, 63297), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (63295, 63297), False, 'import Shadow\n'), ((63458, 63469), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (63467, 63469), False, 'import Shadow\n'), ((63630, 63641), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (63639, 63641), False, 'import Shadow\n'), ((63802, 63813), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (63811, 63813), False, 'import Shadow\n'), ((63974, 63985), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (63983, 63985), False, 'import Shadow\n'), ((64146, 64157), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (64155, 64157), False, 'import Shadow\n'), ((64318, 64329), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (64327, 64329), False, 'import Shadow\n'), ((64490, 64501), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (64499, 64501), False, 'import Shadow\n'), ((64662, 64673), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (64671, 64673), False, 'import Shadow\n'), ((64834, 64845), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (64843, 64845), False, 'import Shadow\n'), ((65006, 65017), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (65015, 65017), False, 'import Shadow\n'), ((65178, 65189), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (65187, 65189), False, 'import Shadow\n'), ((65350, 65361), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (65359, 65361), False, 'import Shadow\n'), ((65522, 65533), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (65531, 65533), False, 'import Shadow\n'), ((65694, 65705), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (65703, 65705), False, 'import Shadow\n'), ((65866, 65877), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (65875, 65877), False, 'import Shadow\n'), ((66038, 66049), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (66047, 66049), False, 'import Shadow\n'), ((66210, 66221), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (66219, 66221), False, 'import Shadow\n'), ((66382, 66393), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (66391, 66393), False, 'import Shadow\n'), ((66554, 66565), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (66563, 66565), False, 'import Shadow\n'), ((66726, 66737), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (66735, 66737), False, 'import Shadow\n'), ((66898, 66909), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (66907, 66909), False, 'import Shadow\n'), ((67070, 67081), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (67079, 67081), False, 'import Shadow\n'), ((67242, 67253), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (67251, 67253), False, 'import Shadow\n'), ((67414, 67425), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (67423, 67425), False, 'import Shadow\n'), ((67586, 67597), 'Shadow.OE', 'Shadow.OE', ([], {}), '()\n', (67595, 67597), False, 'import Shadow\n'), ((67749, 67807), 'pykern.pkjson.dump_pretty', 'pkjson.dump_pretty', (['beam_stats'], {'filename': '"""beam_stats.json"""'}), "(beam_stats, filename='beam_stats.json')\n", (67767, 67807), False, 'from pykern import pkjson\n'), ((67836, 67894), 'Shadow.ShadowTools.plotxy', 'Shadow.ShadowTools.plotxy', (['beam', '(1)', '(3)'], {'nbins': '(100)', 'nolost': '(1)'}), '(beam, 1, 3, nbins=100, nolost=1)\n', (67861, 67894), False, 'import Shadow\n'), ((803, 824), 'numpy.transpose', 'numpy.transpose', (['Tmat'], {}), '(Tmat)\n', (818, 824), False, 'import numpy\n'), ((1282, 1302), 'math.sqrt', 'math.sqrt', (['res[1, 1]'], {}), '(res[1, 1])\n', (1291, 1302), False, 'import math\n'), ((1363, 1383), 'math.sqrt', 'math.sqrt', (['res[3, 3]'], {}), '(res[3, 3])\n', (1372, 1383), False, 'import math\n'), ((1238, 1258), 'math.sqrt', 'math.sqrt', (['res[0, 0]'], {}), '(res[0, 0])\n', (1247, 1258), False, 'import math\n'), ((1319, 1339), 'math.sqrt', 'math.sqrt', (['res[2, 2]'], {}), '(res[2, 2])\n', (1328, 1339), False, 'import math\n')]
import json import logging import os import re from collections import namedtuple from copy import deepcopy from typing import Any, Dict, List, Tuple import numpy as np import pandas as pd import spacy from scirex_utilities.analyse_pwc_entity_results import * from scirex_utilities.entity_utils import * from spacy.tokens import Doc from tqdm import tqdm tqdm.pandas() LabelSpan = namedtuple("Span", ["start", "end", "token_start", "token_end", "entity", "links", "modified"]) logging.basicConfig(level=logging.INFO) class WhitespaceTokenizer(object): def __init__(self, vocab): self.vocab = vocab def __call__(self, text): words = text.split() # All tokens 'own' a subsequent space character in this tokenizer spaces = [True] * len(words) return Doc(self.vocab, words=words, spaces=spaces) nlp = spacy.load("en") nlp.tokenizer = WhitespaceTokenizer(nlp.vocab) def process_folder(folder: str) -> Tuple[dict, str]: span_labels = {} map_T_to_span = {} if not os.path.isdir(folder) or "document.txt" not in os.listdir(folder): print(folder, " have not document") return None doc_text = open(os.path.join(folder, "document.txt")).read() ann_file = open(os.path.join(folder, "document.ann")).read().strip() annotations = [x.split("\t", 1) for x in ann_file.split("\n")] annotations = sorted(annotations, key=lambda x: 0 if x[0] == "T" else 1) for ann_type, ann in annotations: if ann_type[0] == "T": ann, ann_text = ann.split("\t") if ";" in ann: continue else: enttype, span_start, span_end = ann.split() span_start, span_end = int(span_start), int(span_end) if (span_start, span_end) in span_labels: assert "Span already present" else: span_labels[(span_start, span_end)] = {"E": enttype, "A": set(), "T": ann_text} map_T_to_span[ann_type] = (span_start, span_end) if ann_type[0] == "A": ann, ann_T = ann.split() if ann_T in map_T_to_span: span_labels[map_T_to_span[ann_T]]["A"].add(ann) else: print("Attribute before Trigger") return span_labels, doc_text def get_all_document_annotations(brat_folder: str) -> Dict[str, Tuple[dict, str]]: map_id_to_ann = {} for f in tqdm(os.listdir(brat_folder)): try: map_id_to_ann[f] = process_folder(os.path.join(brat_folder, f)) except Exception as e: print(f) return map_id_to_ann def process_back_to_dataframe(span_labels: Dict[Tuple[int, int], dict], doc_text: str): sentences = doc_text.split("\n ") assert sentences[-1] == "" sentences = [x + "\n " for x in sentences[:-1]] sentence_limits = np.cumsum([len(x) for x in sentences]) sentence_limits = list(zip([0] + list(sentence_limits)[:-1], sentence_limits)) for s, e in sentence_limits: assert doc_text[e - 2 : e] == "\n " assert doc_text[s] != " " span_labels = list(map(lambda x: [list(x[0]), x[1]], sorted(span_labels.items(), key=lambda x: x[0][0]))) sl_ix = 0 map_sentence_limits_to_spans = {} for ss, se in sentence_limits: map_sentence_limits_to_spans[(ss, se)] = [] while sl_ix < len(span_labels) and span_labels[sl_ix][0][0] >= ss and span_labels[sl_ix][0][1] <= se: map_sentence_limits_to_spans[(ss, se)].append(span_labels[sl_ix]) sl_ix += 1 spans_in_l = 0 for k, v in map_sentence_limits_to_spans.items(): for span, _ in v: assert k[0] <= span[0] and k[1] >= span[1] spans_in_l += 1 assert span[1] < k[1] - 1 assert spans_in_l == len(span_labels) for k, v in map_sentence_limits_to_spans.items(): for span, _ in v: span[0] -= k[0] span[1] -= k[0] df = [] for sent_id, ((ss, se), st) in enumerate(zip(sentence_limits, sentences)): for span, d in map_sentence_limits_to_spans[(ss, se)]: assert st[-2:] == "\n ", st[-2:] assert span[1] < len(st) - 2 assert st[span[0] : span[1]] == d["T"] and len(d["T"]) > 0, (st[span[0] : span[1]], d["T"]) df.append({"sentence": st, "spans": map_sentence_limits_to_spans[(ss, se)], "sentence_id": sent_id}) assert df[4]["sentence"].strip() == "", breakpoint() df = df[5:] df = pd.DataFrame(df) return df def get_dataframe_from_folder(brat_folder): logging.info("Generating DataFrame ...") map_changes = get_all_document_annotations(brat_folder) logging.info("Done generating DataFrame") doc_df = [] for k in tqdm(map_changes): if map_changes[k] is None: continue df = process_back_to_dataframe(*map_changes[k]) df["doc_id"] = k doc_df.append(df) doc_df = pd.concat(doc_df) return doc_df def overlap(span_1, span_2): if span_1[0] >= span_2[1] or span_2[0] >= span_1[1]: return False return True def process_cluster(cluster): stats = { "new_spans": len([x for x in cluster if "pre" not in x[1]]), "old_spans": len([x for x in cluster if "pre" in x[1]]), "type_change": 0, "change_attributes": 0, } old_spans = [x for x in cluster if "pre" in x[1]] new_spans = [x for x in cluster if "pre" not in x[1]] old_spans_modified, old_spans_unmodified = [], [] for span, info in old_spans: if [info[k] for k in ["E", "T", "A"]] == [info["pre"][k] for k in ["E", "T", "A"]]: del info["pre"] if any(overlap(span, n_span) for n_span, _ in new_spans): continue old_spans_unmodified.append((span, info)) else: del info["pre"] if any(overlap(span, n_span) for n_span, _ in new_spans): continue old_spans_modified.append((span, info)) assert all((si == sj or not overlap(si[0], sj[0])) for si in new_spans for sj in new_spans), breakpoint() assert len(old_spans_unmodified) == 0 or len(old_spans_modified) == 0, breakpoint() assert all( (not overlap(ospan, nspan)) for ospan, _ in old_spans_modified for nspan, _ in new_spans ), breakpoint() assert all( (not overlap(ospan, nspan)) for ospan, _ in old_spans_unmodified for nspan, _ in new_spans ), breakpoint() if len(old_spans_modified + old_spans_unmodified) > 0 and len(new_spans) > 0: breakpoint() new_spans = [ LabelSpan( start=x[0][0], end=x[0][1], entity=x[1]["E"], links=x[1]["A"], token_start=None, token_end=None, modified=True, )._asdict() for x in new_spans + old_spans_modified ] new_spans += [ LabelSpan( start=x[0][0], end=x[0][1], entity=x[1]["E"], links=x[1]["A"], token_start=None, token_end=None, modified=False, )._asdict() for x in old_spans_unmodified ] stats["spans_kept"] = len(new_spans) return new_spans, stats # Cases 1 : Pre entity have labels / post don't -> copy labels / delete pre entity # Cases 2 : Pre entity have labels / post also have labels -> don't copy labels / delete pre entity # Cases 3 : If post entity have different type than pre entity, remove pre entity def normalize_spans(row): span_list_1, span_list_2 = row["spans_old"], row["spans_new"] map_1_span_to_ix = {tuple(k): v for k, v in span_list_1} if len(span_list_2) == 0: return [], None spans = [tuple(x[0]) for x in span_list_2] if len(spans) != len(set(spans)): assert "Duplicate spans", span_list_2 span_list_2 = sorted(span_list_2, key=lambda x: x[0]) stats = [] clusters = [] curr_cluster = [] cstart, cend = -1, -1 for (start, end), span_info in span_list_2: cspan = ((start, end), span_info) if (start, end) in map_1_span_to_ix: span_info["pre"] = map_1_span_to_ix[(start, end)] if cstart == -1: # (Start First Cluster) curr_cluster.append(cspan) cstart, cend = start, end elif start < cend: # Append to current cluster curr_cluster.append(cspan) cend = max(cend, end) else: # Start new cluster curr_cluster, cluster_stats = process_cluster(curr_cluster) stats.append(cluster_stats) clusters.append(curr_cluster) curr_cluster = [cspan] cstart, cend = start, end curr_cluster, cluster_stats = process_cluster(curr_cluster) stats.append(cluster_stats) clusters.append(curr_cluster) clusters = sorted([z for x in clusters for z in x], key=lambda x: (x["start"], x["end"])) for i in range(len(clusters) - 1): if clusters[i]["end"] > clusters[i + 1]["start"]: breakpoint() stats_reduced = {} for s in stats: for k, v in s.items(): if k not in stats_reduced: stats_reduced[k] = v else: stats_reduced[k] += v return clusters, stats_reduced def add_token_index(row): if len(row["cluster"]) == 0: return [] sentence = row["sentence_old"] words = row["words"] word_indices = row["word_indices"] sentence_start = row["sentence_start"] starts, ends = list(zip(*word_indices)) for i, (start, end) in enumerate(zip(starts, ends)): assert sentence[start:end] == words[i], breakpoint() new_cluster = [] cluster = row["cluster"] for i, span in enumerate(cluster): assert "start" in span, breakpoint() assert "end" in span, breakpoint() if not (span["start"] in starts): if sentence[span["start"]].strip() == "": span["start"] += 1 else: span["start"] = min( starts, key=lambda x: abs(x - span["start"]) if x < span["start"] else float("inf") ) if not (span["end"] in ends): if sentence[span["end"] - 1].strip() == "": span["end"] -= 1 else: span["end"] = min( ends, key=lambda x: abs(x - span["end"]) if x > span["end"] else float("inf") ) span["token_start"] = starts.index(span["start"]) + sentence_start - len(words) span["token_end"] = ends.index(span["end"]) + 1 + sentence_start - len(words) for cleaned_span in new_cluster: if overlap( (span["token_start"], span["token_end"]), (cleaned_span["token_start"], cleaned_span["token_end"]), ): print(row["doc_id"]) print(" ".join(row["words"])) print("=" * 20) new_cluster.append(span) return new_cluster def generate_token_and_indices(sentence): words = sorted( [(m.group(0), (m.start(), m.end())) for m in re.finditer(r"[^\s\+\-/\(\)&\[\],]+", sentence)] + [(m.group(0), (m.start(), m.end())) for m in re.finditer(r"[\+\-/\(\)&\[\],]+", sentence)] + [(m.group(0), (m.start(), m.end())) for m in re.finditer(r"\s+", sentence)], key=lambda x: x[1], ) if len(words) == 0 or sentence.strip() == "": return [], [] try: words, indices = list(zip(*[(t, i) for t, i in words if t.strip() != ""])) except: breakpoint() return words, indices def compare_brat_annotations(ann_old_df, ann_new_df): df_merged = ann_old_df.merge(ann_new_df, on=["doc_id", "sentence_id"], suffixes=("_old", "_new")) logging.info("Applying Normalize Spans ...") output = df_merged.progress_apply(normalize_spans, axis=1) df_merged["cluster"], df_merged["stats"] = list(zip(*output)) df_merged = df_merged.sort_values(["doc_id", "sentence_id"]).reset_index(drop=True) logging.info("Applying Add Token Index ...") df_merged["words"], df_merged["word_indices"] = list( zip(*df_merged["sentence_old"].progress_apply(generate_token_and_indices)) ) df_merged["num_words"] = df_merged["words"].progress_apply(len) df_merged["sentence_start"] = df_merged.groupby("doc_id")["num_words"].cumsum() df_merged["entities"] = df_merged.apply(add_token_index, axis=1) df_merged = ( df_merged.sort_values(["doc_id", "sentence_id"]) .reset_index(drop=True) .drop(columns=["spans_old", "spans_new", "sentence_new", "cluster"]) .rename(columns={"sentence_old": "sentence"}) ) return df_merged def generate_relations_in_pwc_df(pwc_df): pwc_df_keep = pwc_df[["s2_paper_id"] + true_entities + ["score"]].rename( columns=map_true_entity_to_available ) pwc_df_keep = ( pwc_df_keep[(~pwc_df_keep.duplicated()) & (pwc_df_keep.s2_paper_id != "not_found")] .sort_values(["s2_paper_id"] + used_entities + ["score"]) .reset_index(drop=True) ) # pwc_df_keep[used_entities] = pwc_df_keep[used_entities].applymap(lambda x: re.sub(r"[^\w-]", "_", x)) pwc_df_keep = ( pwc_df_keep.groupby("s2_paper_id") .apply(lambda x: list(x[used_entities + ["score"]].itertuples(index=False, name="Relation"))) .reset_index() .rename(columns={0: "Relations"}) ) return pwc_df_keep def combine_brat_to_original_data( pwc_doc_file, pwc_sentence_file, pwc_prediction_file, original_brat_anno_folder, annotated_brat_anno_folder, ): logging.info("Loading pwc docs ... ") pwc_df = load_pwc_full_text(pwc_doc_file) pwc_grouped = ( pwc_df.groupby("s2_paper_id")[["dataset", "task", "model_name", "metric"]] .aggregate(lambda x: list(set(tuple(x)))) .reset_index() ) pwc_df_relations = generate_relations_in_pwc_df(pwc_df) pwc_df_relations = pwc_df_relations.rename(columns={"s2_paper_id": "doc_id"})[["doc_id", "Relations"]] pwc_df_relations.index = pwc_df_relations.doc_id pwc_df_relations = pwc_df_relations.drop(columns=["doc_id"]) pwc_df_relations: Dict[str, Relation] = pwc_df_relations.to_dict()["Relations"] method_breaks = { d: { clean_name(rel.Method): [(i, clean_name(x)) for i, x in chunk_string(rel.Method)] for rel in relations } for d, relations in pwc_df_relations.items() } pwc_df_relations = { d: [{k: clean_name(x) if k != "score" else x for k, x in rel._asdict().items()} for rel in relations] for d, relations in pwc_df_relations.items() } logging.info("Loading PwC Sentence Predictions ... ") pwc_sentences = load_pwc_sentence_predictions(pwc_sentence_file, pwc_prediction_file) pwc_sentences = pwc_sentences.merge(pwc_grouped, left_on="doc_id", right_on="s2_paper_id") pwc_sentences = pwc_sentences.sort_values( by=["doc_id", "section_id", "para_id", "sentence_id"] ).reset_index(drop=True) pwc_sentences["words"] = pwc_sentences["words"].progress_apply( lambda x: generate_token_and_indices(" ".join(x))[0] ) df_changed = get_dataframe_from_folder(annotated_brat_anno_folder) df_original = get_dataframe_from_folder(original_brat_anno_folder) df_merged = compare_brat_annotations(df_original, df_changed) assert ( pwc_sentences.groupby("doc_id")["words"].agg(lambda words: [x for y in words for x in y]) != df_merged.groupby("doc_id")["words"].agg(lambda words: [x for y in words for x in y]) ).sum() == 0, breakpoint() def add_nums(rows, columns, name): rows[name] = list(rows.groupby(columns).grouper.group_info[0]) return rows pwc_sentences["para_num"] = None pwc_sentences["sentence_num"] = None pwc_sentences = pwc_sentences.groupby("doc_id").progress_apply( lambda x: add_nums(x, ["section_id", "para_id"], "para_num") ) pwc_sentences = pwc_sentences.groupby("doc_id").progress_apply( lambda x: add_nums(x, ["section_id", "para_id", "sentence_id"], "sentence_num") ) words: Dict[str, List[str]] = pwc_sentences.groupby("doc_id")["words"].agg( lambda words: [x for y in words for x in y] ).to_dict() pwc_sentences["num_words"] = pwc_sentences["words"].apply(len) sentences = pwc_sentences.groupby(["doc_id", "sentence_num"])["num_words"].agg(sum) sections = pwc_sentences.groupby(["doc_id", "section_id"])["num_words"].agg(sum) sections: Dict[str, Dict[int, int]] = { level: sections.xs(level).to_dict() for level in sections.index.levels[0] } sentences: Dict[str, Dict[int, int]] = { level: sentences.xs(level).to_dict() for level in sentences.index.levels[0] } words_merged = ( df_merged.groupby("doc_id")["words"].agg(lambda words: [x for y in words for x in y]).to_dict() ) entities = ( df_merged.groupby("doc_id")["entities"].agg(lambda ents: [x for y in ents for x in y]).to_dict() ) def compute_start_end(cards): ends = list(np.cumsum(cards)) starts = [0] + ends return list(zip([int(x) for x in starts], [int(x) for x in ends])) combined_information = {} for d in words: assert words[d] == words_merged[d], breakpoint() assert list(sentences[d].keys()) == list(range(max(sentences[d].keys()) + 1)), breakpoint() assert list(sections[d].keys()) == list(range(max(sections[d].keys()) + 1)), breakpoint() sent = compute_start_end([sentences[d][i] for i in range(len(sentences[d]))]) sec = compute_start_end([sections[d][i] for i in range(len(sections[d]))]) for e in entities[d]: del e["start"] del e["end"] combined_information[d] = { "words": words[d], "sentences": sent, "sections": sec, "relations": pwc_df_relations[d], "entities": entities[d], "doc_id": d, "method_subrelations": method_breaks[d], } return combined_information def _annotation_to_dict(dc): # convenience method if isinstance(dc, dict): ret = dict() for k, v in dc.items(): k = _annotation_to_dict(k) v = _annotation_to_dict(v) ret[k] = v return ret elif isinstance(dc, str): return dc elif isinstance(dc, (set, frozenset, list, tuple)): ret = [] for x in dc: ret.append(_annotation_to_dict(x)) return tuple(ret) else: return dc def annotations_to_jsonl(annotations, output_file, key="doc_id"): with open(output_file, "w") as of: for ann in sorted(annotations, key=lambda x: x[key]): as_json = _annotation_to_dict(ann) as_str = json.dumps(as_json, sort_keys=True) of.write(as_str) of.write("\n") def propagate_annotations(data_dict: Dict[str, Any]): words = data_dict["words"] entities = data_dict["entities"] entities = {(e["token_start"], e["token_end"]): e for e in entities} assert not any(e != f and overlap(e, f) for e in entities for f in entities), breakpoint() new_entities = {} for (s, e) in entities: if entities[(s, e)]["modified"] == True: span_text = words[s:e] possible_matches = [ (i, i + len(span_text)) for i in range(len(words)) if words[i : i + len(span_text)] == span_text ] for match in possible_matches: add_match = False if match in entities: if entities[match].get("proped", False): continue if entities[match]["modified"] == False: # Propagate the changes for k in ["entity", "links", "modified"]: entities[match][k] = deepcopy(entities[(s, e)][k]) elif entities[match]["entity"] != entities[(s, e)]["entity"]: if match > (s, e): for k in ["entity", "links", "modified"]: entities[match][k] = deepcopy(entities[(s, e)][k]) elif set(entities[match]["links"]) != set( entities[(s, e)]["links"] ): # Two entities with same text have different annotations. BAD !!! merged_links = set(entities[match]["links"]) | set(entities[(s, e)]["links"]) entities[match]["links"] = deepcopy(list(merged_links)) entities[(s, e)]["links"] = deepcopy(list(merged_links)) entities[match]["proped"] = True add_match = False else: for span in entities: if overlap(span, match): if entities[span]["modified"] == True: add_match = False if entities[span]["entity"] != entities[(s, e)]["entity"]: break elif set(entities[span]["links"]) != set(entities[(s, e)]["links"]): diff_links = set(entities[(s, e)]["links"]) ^ set(entities[span]["links"]) canon_name = set(["Canonical_Name"]) if ( diff_links != canon_name and set(entities[(s, e)]["links"]) != canon_name and set(entities[span]["links"]) != canon_name ): break else: merged_links = set(entities[(s, e)]["links"]) | set( entities[span]["links"] ) entities[(s, e)]["links"] = deepcopy(list(merged_links)) entities[span]["links"] = deepcopy(list(merged_links)) break break else: add_match = True if match in new_entities: if new_entities[match]["entity"] != entities[(s, e)]["entity"]: breakpoint() elif set(new_entities[match]["links"]) != set( entities[(s, e)]["links"] ): # Two entities with same text have different annotations. BAD !!! diff_links = set(new_entities[match]["links"]) & set(entities[(s, e)]["links"]) if ( len(diff_links) == 0 and len(set(new_entities[match]["links"])) > 0 and len(set(entities[(s, e)]["links"])) > 0 ): breakpoint() else: merged_links = set(new_entities[match]["links"] + entities[(s, e)]["links"]) entities[(s, e)]["links"] = deepcopy(list(merged_links)) new_entities[match]["links"] = deepcopy(list(merged_links)) else: add_match = False if add_match: new_entities[match] = { k: deepcopy(entities[(s, e)][k]) for k in ["entity", "links", "modified"] } new_entities[match]["token_start"] = match[0] new_entities[match]["token_end"] = match[1] for match in list(new_entities.keys()): for span in list(entities.keys()): if overlap(match, span): assert entities[span]["modified"] == False or entities[span]["proped"], breakpoint() if entities[span].get("proped", False): if match in new_entities: del new_entities[match] elif not entities[span]["modified"]: del entities[span] new_entities = sorted(list(new_entities.items()), key=lambda x: x[0][1]) disjoint_new_entities = [] for e in new_entities: if len(disjoint_new_entities) == 0: disjoint_new_entities.append(e) else: if e[0][0] >= disjoint_new_entities[-1][0][1]: disjoint_new_entities.append(e) assert not any( e[0] != f[0] and overlap(e[0], f[0]) for e in disjoint_new_entities for f in disjoint_new_entities ) disjoint_new_entities = dict(disjoint_new_entities) assert not any(overlap(e, f) for e in disjoint_new_entities for f in entities), breakpoint() entities.update(disjoint_new_entities) assert not any(e != f and overlap(e, f) for e in entities for f in entities), breakpoint() assert all(v["token_start"] == s and v["token_end"] == e for (s, e), v in entities.items()), breakpoint() data_dict["entities"] = [x for x in entities.values()] import argparse parser = argparse.ArgumentParser() parser.add_argument("--annotator") if __name__ == "__main__": args = parser.parse_args() annotations_dict = combine_brat_to_original_data( "data/pwc_s2_cleaned_text_v2.jsonl", "data/pwc_s2_cleaned_text_v2_sentences.jsonl", "outputs/pwc_s2_cleaned_text_v2_sentences_predictions.jsonl.clean", "/home/sarthakj/brat/brat/data/result_extraction/outputs/second_phase_annotations_" + args.annotator + "/", "/home/sarthakj/brat/brat/data/result_extraction/outputs/second_phase_annotations_original/", ) annotations_to_jsonl(list(annotations_dict.values()), "model_data/all_data_" + args.annotator + ".jsonl") data = [json.loads(line) for line in open("model_data/all_data_" + args.annotator + ".jsonl")] for d in tqdm(data): names = [v for rel in d["relations"] for k, v in rel.items() if k != "score"] names += [n for m, subm in d["method_subrelations"].items() for idx, n in subm] names = set(names) propagate_annotations(d) coreference = {n: [] for n in names} ner = [] for e in d["entities"]: e["links"] = set(e["links"]) e["canon"] = "Canonical_Name" in e["links"] if e["canon"]: e["links"].remove("Canonical_Name") if "proped" in e: del e["proped"] del e["modified"] e["links"] = e["links"] & names for l in e["links"]: coreference[l].append([e["token_start"], e["token_end"]]) ner.append((e["token_start"], e["token_end"], e["entity"])) del d["entities"] d["n_ary_relations"] = d["relations"] del d["relations"] d["coref"] = coreference d["ner"] = ner assert d["sentences"][-1][-1] == len(d["words"]), breakpoint() assert d["sections"][-1][-1] == len(d["words"]), breakpoint() annotations_to_jsonl(data, "model_data/all_data_" + args.annotator + "_propagated.jsonl")
[ "logging.basicConfig", "json.loads", "collections.namedtuple", "os.listdir", "argparse.ArgumentParser", "spacy.load", "tqdm.tqdm", "json.dumps", "spacy.tokens.Doc", "os.path.join", "os.path.isdir", "pandas.concat", "re.finditer", "copy.deepcopy", "pandas.DataFrame", "numpy.cumsum", "...
[((357, 370), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {}), '()\n', (368, 370), False, 'from tqdm import tqdm\n'), ((383, 482), 'collections.namedtuple', 'namedtuple', (['"""Span"""', "['start', 'end', 'token_start', 'token_end', 'entity', 'links', 'modified']"], {}), "('Span', ['start', 'end', 'token_start', 'token_end', 'entity',\n 'links', 'modified'])\n", (393, 482), False, 'from collections import namedtuple\n'), ((479, 518), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (498, 518), False, 'import logging\n'), ((852, 868), 'spacy.load', 'spacy.load', (['"""en"""'], {}), "('en')\n", (862, 868), False, 'import spacy\n'), ((25462, 25487), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (25485, 25487), False, 'import argparse\n'), ((4482, 4498), 'pandas.DataFrame', 'pd.DataFrame', (['df'], {}), '(df)\n', (4494, 4498), True, 'import pandas as pd\n'), ((4564, 4604), 'logging.info', 'logging.info', (['"""Generating DataFrame ..."""'], {}), "('Generating DataFrame ...')\n", (4576, 4604), False, 'import logging\n'), ((4670, 4711), 'logging.info', 'logging.info', (['"""Done generating DataFrame"""'], {}), "('Done generating DataFrame')\n", (4682, 4711), False, 'import logging\n'), ((4741, 4758), 'tqdm.tqdm', 'tqdm', (['map_changes'], {}), '(map_changes)\n', (4745, 4758), False, 'from tqdm import tqdm\n'), ((4937, 4954), 'pandas.concat', 'pd.concat', (['doc_df'], {}), '(doc_df)\n', (4946, 4954), True, 'import pandas as pd\n'), ((11806, 11850), 'logging.info', 'logging.info', (['"""Applying Normalize Spans ..."""'], {}), "('Applying Normalize Spans ...')\n", (11818, 11850), False, 'import logging\n'), ((12074, 12118), 'logging.info', 'logging.info', (['"""Applying Add Token Index ..."""'], {}), "('Applying Add Token Index ...')\n", (12086, 12118), False, 'import logging\n'), ((13686, 13723), 'logging.info', 'logging.info', (['"""Loading pwc docs ... """'], {}), "('Loading pwc docs ... ')\n", (13698, 13723), False, 'import logging\n'), ((14754, 14807), 'logging.info', 'logging.info', (['"""Loading PwC Sentence Predictions ... """'], {}), "('Loading PwC Sentence Predictions ... ')\n", (14766, 14807), False, 'import logging\n'), ((26277, 26287), 'tqdm.tqdm', 'tqdm', (['data'], {}), '(data)\n', (26281, 26287), False, 'from tqdm import tqdm\n'), ((800, 843), 'spacy.tokens.Doc', 'Doc', (['self.vocab'], {'words': 'words', 'spaces': 'spaces'}), '(self.vocab, words=words, spaces=spaces)\n', (803, 843), False, 'from spacy.tokens import Doc\n'), ((2426, 2449), 'os.listdir', 'os.listdir', (['brat_folder'], {}), '(brat_folder)\n', (2436, 2449), False, 'import os\n'), ((26177, 26193), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (26187, 26193), False, 'import json\n'), ((1026, 1047), 'os.path.isdir', 'os.path.isdir', (['folder'], {}), '(folder)\n', (1039, 1047), False, 'import os\n'), ((1073, 1091), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (1083, 1091), False, 'import os\n'), ((17206, 17222), 'numpy.cumsum', 'np.cumsum', (['cards'], {}), '(cards)\n', (17215, 17222), True, 'import numpy as np\n'), ((18956, 18991), 'json.dumps', 'json.dumps', (['as_json'], {'sort_keys': '(True)'}), '(as_json, sort_keys=True)\n', (18966, 18991), False, 'import json\n'), ((1177, 1213), 'os.path.join', 'os.path.join', (['folder', '"""document.txt"""'], {}), "(folder, 'document.txt')\n", (1189, 1213), False, 'import os\n'), ((2511, 2539), 'os.path.join', 'os.path.join', (['brat_folder', 'f'], {}), '(brat_folder, f)\n', (2523, 2539), False, 'import os\n'), ((11352, 11381), 're.finditer', 're.finditer', (['"""\\\\s+"""', 'sentence'], {}), "('\\\\s+', sentence)\n", (11363, 11381), False, 'import re\n'), ((1242, 1278), 'os.path.join', 'os.path.join', (['folder', '"""document.ann"""'], {}), "(folder, 'document.ann')\n", (1254, 1278), False, 'import os\n'), ((11147, 11200), 're.finditer', 're.finditer', (['"""[^\\\\s\\\\+\\\\-/\\\\(\\\\)&\\\\[\\\\],]+"""', 'sentence'], {}), "('[^\\\\s\\\\+\\\\-/\\\\(\\\\)&\\\\[\\\\],]+', sentence)\n", (11158, 11200), False, 'import re\n'), ((11251, 11300), 're.finditer', 're.finditer', (['"""[\\\\+\\\\-/\\\\(\\\\)&\\\\[\\\\],]+"""', 'sentence'], {}), "('[\\\\+\\\\-/\\\\(\\\\)&\\\\[\\\\],]+', sentence)\n", (11262, 11300), False, 'import re\n'), ((23801, 23828), 'copy.deepcopy', 'deepcopy', (['entities[s, e][k]'], {}), '(entities[s, e][k])\n', (23809, 23828), False, 'from copy import deepcopy\n'), ((20079, 20106), 'copy.deepcopy', 'deepcopy', (['entities[s, e][k]'], {}), '(entities[s, e][k])\n', (20087, 20106), False, 'from copy import deepcopy\n'), ((20357, 20384), 'copy.deepcopy', 'deepcopy', (['entities[s, e][k]'], {}), '(entities[s, e][k])\n', (20365, 20384), False, 'from copy import deepcopy\n')]
# Copyright (C) 2018 Innoviz Technologies # All rights reserved. # # This software may be modified and distributed under the terms # of the BSD 3-Clause license. See the LICENSE file for details. import pandas as pd import os import numpy as np from utilities.math_utils import RotationTranslationData from visualizations.vis import pcshow from utilities import data_utils if __name__ == '__main__': base_dir = os.path.dirname(os.getcwd()) video_dir = os.path.join(base_dir, 'data_examples', 'test_video') agg_point_cloud_list = [] max_frames_to_keep = 10 min_idx = 0 decimate = 1 max_dist = 100 for idx in data_utils.enumerate_frames(video_dir): if idx < min_idx or idx % decimate != 0: continue pc_file = data_utils.frame_to_filename(video_dir, idx, 'pointcloud') pc, ego, label = data_utils.read_all_data(video_dir, idx) ego_rt = RotationTranslationData(vecs=(ego[:3], ego[3:])) ego_pc = ego_rt.apply_transform(pc[:, :3]) ego_pc = np.concatenate((ego_pc, pc[:, 3:4]), -1) labeled_pc = np.concatenate((ego_pc, label), -1) agg_point_cloud_list.append(labeled_pc) if len(agg_point_cloud_list) > max_frames_to_keep: agg_point_cloud_list = agg_point_cloud_list[1:] agg_point_cloud = np.concatenate(agg_point_cloud_list, 0) pc2disp = ego_rt.inverse().apply_transform(agg_point_cloud[:, :3]) pc2disp = np.concatenate((pc2disp, agg_point_cloud[:, 3:]), -1) pc2disp = pc2disp[np.linalg.norm(pc2disp[:, :3], axis=1) < max_dist] pcshow(pc2disp, on_screen_text=pc_file, max_points=32000 * max_frames_to_keep)
[ "utilities.data_utils.enumerate_frames", "os.path.join", "os.getcwd", "utilities.math_utils.RotationTranslationData", "numpy.concatenate", "numpy.linalg.norm", "utilities.data_utils.read_all_data", "utilities.data_utils.frame_to_filename", "visualizations.vis.pcshow" ]
[((463, 516), 'os.path.join', 'os.path.join', (['base_dir', '"""data_examples"""', '"""test_video"""'], {}), "(base_dir, 'data_examples', 'test_video')\n", (475, 516), False, 'import os\n'), ((642, 680), 'utilities.data_utils.enumerate_frames', 'data_utils.enumerate_frames', (['video_dir'], {}), '(video_dir)\n', (669, 680), False, 'from utilities import data_utils\n'), ((434, 445), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (443, 445), False, 'import os\n'), ((770, 828), 'utilities.data_utils.frame_to_filename', 'data_utils.frame_to_filename', (['video_dir', 'idx', '"""pointcloud"""'], {}), "(video_dir, idx, 'pointcloud')\n", (798, 828), False, 'from utilities import data_utils\n'), ((854, 894), 'utilities.data_utils.read_all_data', 'data_utils.read_all_data', (['video_dir', 'idx'], {}), '(video_dir, idx)\n', (878, 894), False, 'from utilities import data_utils\n'), ((912, 960), 'utilities.math_utils.RotationTranslationData', 'RotationTranslationData', ([], {'vecs': '(ego[:3], ego[3:])'}), '(vecs=(ego[:3], ego[3:]))\n', (935, 960), False, 'from utilities.math_utils import RotationTranslationData\n'), ((1029, 1069), 'numpy.concatenate', 'np.concatenate', (['(ego_pc, pc[:, 3:4])', '(-1)'], {}), '((ego_pc, pc[:, 3:4]), -1)\n', (1043, 1069), True, 'import numpy as np\n'), ((1092, 1127), 'numpy.concatenate', 'np.concatenate', (['(ego_pc, label)', '(-1)'], {}), '((ego_pc, label), -1)\n', (1106, 1127), True, 'import numpy as np\n'), ((1321, 1360), 'numpy.concatenate', 'np.concatenate', (['agg_point_cloud_list', '(0)'], {}), '(agg_point_cloud_list, 0)\n', (1335, 1360), True, 'import numpy as np\n'), ((1454, 1507), 'numpy.concatenate', 'np.concatenate', (['(pc2disp, agg_point_cloud[:, 3:])', '(-1)'], {}), '((pc2disp, agg_point_cloud[:, 3:]), -1)\n', (1468, 1507), True, 'import numpy as np\n'), ((1593, 1671), 'visualizations.vis.pcshow', 'pcshow', (['pc2disp'], {'on_screen_text': 'pc_file', 'max_points': '(32000 * max_frames_to_keep)'}), '(pc2disp, on_screen_text=pc_file, max_points=32000 * max_frames_to_keep)\n', (1599, 1671), False, 'from visualizations.vis import pcshow\n'), ((1534, 1572), 'numpy.linalg.norm', 'np.linalg.norm', (['pc2disp[:, :3]'], {'axis': '(1)'}), '(pc2disp[:, :3], axis=1)\n', (1548, 1572), True, 'import numpy as np\n')]
from typing import Sequence, Optional import pandas as pd import numpy as np def formatted_corr_df(df: pd.DataFrame, cols: Optional[Sequence[str]] = None) -> pd.DataFrame: """ Calculates correlations on a DataFrame and displays only the lower triangular of the resulting correlation DataFrame. :param df: :param cols: subset of column names on which to calculate correlations :return: """ if not cols: use_cols = list(df.columns) else: use_cols = list(cols) corr_df = df[use_cols].corr() corr_df = _lower_triangular_of_df(corr_df) return corr_df.applymap(lambda x: f'{x:.2f}' if not isinstance(x, str) else x) def _lower_triangular_of_df(df): return pd.DataFrame(np.tril(df), index=df.index, columns=df.columns).replace(0, '')
[ "numpy.tril" ]
[((738, 749), 'numpy.tril', 'np.tril', (['df'], {}), '(df)\n', (745, 749), True, 'import numpy as np\n')]
# -*- coding: utf-8 -*- __doc__="返回选择物体的类型" import rpw from rpw import revit, DB, UI,db,doc #from System.Collections.Generic import List #import json #from scriptutils import this_script #from scriptutils.userinput import CommandSwitchWindow #import subprocess as sp #Change Selected Grid From 3D to 2D #selection = rpw.ui.Selection().elements #Grid=selection[0] #Grid= rpw.db.Element(Grid).parameters.all #print(Grid) #Change Grid buble visibility selection = rpw.ui.Selection() Grid=selection endpoint=DB.DatumEnds.End0 startpoint=DB.DatumEnds.End1 CurrentView=doc.ActiveView DatumExtentType2D=DB.DatumExtentType.ViewSpecific @rpw.db.Transaction.ensure('Hide_Grid_Bubble') def DatumExtentType3DTo2D(_Grid,points,CurrentView): for i in points: _Grid.SetDatumExtentType(i,CurrentView,DatumExtentType2D) for i in Grid: DatumExtentType3DTo2D(i,[endpoint,startpoint],CurrentView) print("Good")
[ "rpw.ui.Selection", "rpw.db.Transaction.ensure" ]
[((465, 483), 'rpw.ui.Selection', 'rpw.ui.Selection', ([], {}), '()\n', (481, 483), False, 'import rpw\n'), ((635, 680), 'rpw.db.Transaction.ensure', 'rpw.db.Transaction.ensure', (['"""Hide_Grid_Bubble"""'], {}), "('Hide_Grid_Bubble')\n", (660, 680), False, 'import rpw\n')]
import json import urllib.request import urllib.parse import csv import login class Action: def __init__(self, tenantId, appId, appSecret, body, url, filename, column): self.tenantId = tenantId self.appId = appId self.appSecret = appSecret self.body = body self.url = url self.filename = filename self.column = column def action(self): login_MDEcli = login.Login(self.tenantId, self.appId, self.appSecret) aadToken = login_MDEcli.login() with open(self.filename, newline='') as csvfile: data = csv.DictReader(csvfile) for item in data: try: url = f"https://api.securitycenter.microsoft.com/api/machines/{item[self.column].strip()}/{self.url}" headers = { 'Content-Type' : 'application/json', 'Accept' : 'application/json', 'Authorization' : "Bearer " + aadToken } data = str(json.dumps(self.body)).encode("utf-8") req = urllib.request.Request(url=url, data=data, headers=headers) response = urllib.request.urlopen(req) jsonResponse = json.loads(response.read()) computerDnsName = jsonResponse['computerDnsName'] print(f'{self.url} started on {computerDnsName}') except Exception as e: print(f'Someting went wrong:{e}')
[ "login.Login", "csv.DictReader", "json.dumps" ]
[((426, 480), 'login.Login', 'login.Login', (['self.tenantId', 'self.appId', 'self.appSecret'], {}), '(self.tenantId, self.appId, self.appSecret)\n', (437, 480), False, 'import login\n'), ((598, 621), 'csv.DictReader', 'csv.DictReader', (['csvfile'], {}), '(csvfile)\n', (612, 621), False, 'import csv\n'), ((1061, 1082), 'json.dumps', 'json.dumps', (['self.body'], {}), '(self.body)\n', (1071, 1082), False, 'import json\n')]
from flask import Flask from flask_login import LoginManager from flask_wtf import CsrfProtect login_manager = LoginManager() def create_app(): app = Flask(__name__) app.config.from_pyfile("config/setting.py") csrf = CsrfProtect() csrf.init_app(app) register_blueprint(app) create_db(app) login_manager.init_app(app) login_manager.login_view = "auth.login" login_manager.login_message = "Please login first!" return app def register_blueprint(app): from .manager.blueprint import auth, web app.register_blueprint(web) app.register_blueprint(auth) def create_db(app): from .models.base import db db.init_app(app) db.create_all(app=app)
[ "flask_login.LoginManager", "flask_wtf.CsrfProtect", "flask.Flask" ]
[((112, 126), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (124, 126), False, 'from flask_login import LoginManager\n'), ((157, 172), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (162, 172), False, 'from flask import Flask\n'), ((232, 245), 'flask_wtf.CsrfProtect', 'CsrfProtect', ([], {}), '()\n', (243, 245), False, 'from flask_wtf import CsrfProtect\n')]
#! /usr/bin/env python3 # -*- coding: utf-8 -*- """ parse_bib_file.py: Convert raw bibtex file to yml format and md pages. """ import os import yaml import bibtexparser as bb def get_link(entry): if 'url' in entry.keys(): return entry['url'] elif 'doi' in entry.keys(): return f"https://doi.org/{entry['doi']}" else: print('Warning: no link in', entry['ID']) return '' def get_journal(entry): if entry.get('ENTRYTYPE') == 'article': return entry.get('journal') elif entry.get('ENTRYTYPE') == 'inproceedings': return entry.get('booktitle') else: print('Warning: unknown entry type in', entry) return '' def clean(list_of_string): replace = [ ['\\"{u}}', 'ü'], ['{', ''], ['}', ''], ] if not isinstance(list_of_string, list): list_of_string = [list_of_string] clean_list = [] for a in list_of_string: for r in replace: a = a.replace(*r) clean_list.append(a) return clean_list def check_for_overwrite(fname): answer = "" if os.path.exists(fname): while answer not in ["y", "n"]: answer = input(f"{fname} exists, overwrite? (y/[n])") or "n" if answer == "n": return False return True if __name__ == "__main__": import sys in_name_bib = "_data/mendeley-export.bib" out_name_data = "_data/mendeley-export.yml" out_folder_pages = "_publications" with open(in_name_bib, 'r') as f: bib_db = bb.load(f) general_dict = { 'layout': 'publication', 'ref-code': '', 'ref-link': '', 'ref-video': '' } categories = {'inproceedings': 'Conference', 'article': 'Journal'} all_dicts = [] for entry in bib_db.entries: author_tuples = [a.split(', ') for a in entry['author'].split(' and ')] author_names = [f"{a[1].strip()[0]}. {a[0]}" for a in author_tuples] author_names = clean(author_names) dict_to_write = {} dict_to_write['ref-authors'] = author_names dict_to_write['title'] = clean(entry['title'])[0] dict_to_write['ref-year'] = int(entry['year']) dict_to_write['ref-journal'] = get_journal(entry) dict_to_write['ref-link'] = get_link(entry) dict_to_write['categories'] = categories.get(entry['ENTRYTYPE'], 'Other') all_dicts.append(dict_to_write) fname = os.path.join(out_folder_pages, entry['ID'] + '.md') if not check_for_overwrite(fname): continue with open(fname, 'w') as f: f.write('---\n') f.write('layout: publication\n') f.write(yaml.dump(dict_to_write)) f.write('---\n\n\n') f.write(entry.get('abstract', '')) f.write('\n') print('wrote', fname) if not check_for_overwrite(out_name_data): sys.exit(0) # successful termination with open(out_name_data, 'w') as f: f.write(yaml.dump(all_dicts)) print('wrote', out_name_data)
[ "os.path.exists", "yaml.dump", "os.path.join", "sys.exit", "bibtexparser.load" ]
[((1113, 1134), 'os.path.exists', 'os.path.exists', (['fname'], {}), '(fname)\n', (1127, 1134), False, 'import os\n'), ((1545, 1555), 'bibtexparser.load', 'bb.load', (['f'], {}), '(f)\n', (1552, 1555), True, 'import bibtexparser as bb\n'), ((2510, 2561), 'os.path.join', 'os.path.join', (['out_folder_pages', "(entry['ID'] + '.md')"], {}), "(out_folder_pages, entry['ID'] + '.md')\n", (2522, 2561), False, 'import os\n'), ((2977, 2988), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2985, 2988), False, 'import sys\n'), ((3071, 3091), 'yaml.dump', 'yaml.dump', (['all_dicts'], {}), '(all_dicts)\n', (3080, 3091), False, 'import yaml\n'), ((2758, 2782), 'yaml.dump', 'yaml.dump', (['dict_to_write'], {}), '(dict_to_write)\n', (2767, 2782), False, 'import yaml\n')]
# Generated by Django 2.2.13 on 2020-07-22 12:47 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('apis_vocabularies', '0001_initial'), ] operations = [ migrations.AddField( model_name='texttype', name='lang', field=models.CharField(blank=True, default='deu', help_text="The ISO 639-3 (or 2) code for the label's language.", max_length=3, null=True, verbose_name='ISO Code'), ), ]
[ "django.db.models.CharField" ]
[((334, 501), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '"""deu"""', 'help_text': '"""The ISO 639-3 (or 2) code for the label\'s language."""', 'max_length': '(3)', 'null': '(True)', 'verbose_name': '"""ISO Code"""'}), '(blank=True, default=\'deu\', help_text=\n "The ISO 639-3 (or 2) code for the label\'s language.", max_length=3,\n null=True, verbose_name=\'ISO Code\')\n', (350, 501), False, 'from django.db import migrations, models\n')]
import torch import get_data import numpy as np import torchaudio def number_of_correct(pred, target): return pred.squeeze().eq(target).sum().item() def get_likely_index(tensor): return tensor.argmax(dim=-1) def compute_accuracy(model, data_loader, device): model.eval() correct = 0 for data, target in data_loader: data = data.to(device) target = target.to(device) pred = model(data) pred = get_likely_index(pred) correct += number_of_correct(pred, target) score = correct / len(data_loader.dataset) return score def apply_to_wav(model, waveform: torch.Tensor, sample_rate: float, device: str): model.eval() mel_spec = get_data.prepare_wav(waveform, sample_rate) mel_spec = torch.unsqueeze(mel_spec, dim=0).to(device) res = model(mel_spec) probs = torch.nn.Softmax(dim=-1)(res).cpu().detach().numpy() predictions = [] for idx in np.argsort(-probs): label = get_data.idx_to_label(idx) predictions.append((label, probs[idx])) return predictions def apply_to_file(model, wav_file: str, device: str): waveform, sample_rate = torchaudio.load(wav_file) return apply_to_wav(model, waveform, sample_rate, device)
[ "torch.nn.Softmax", "get_data.prepare_wav", "torchaudio.load", "torch.unsqueeze", "get_data.idx_to_label", "numpy.argsort" ]
[((707, 750), 'get_data.prepare_wav', 'get_data.prepare_wav', (['waveform', 'sample_rate'], {}), '(waveform, sample_rate)\n', (727, 750), False, 'import get_data\n'), ((938, 956), 'numpy.argsort', 'np.argsort', (['(-probs)'], {}), '(-probs)\n', (948, 956), True, 'import numpy as np\n'), ((1156, 1181), 'torchaudio.load', 'torchaudio.load', (['wav_file'], {}), '(wav_file)\n', (1171, 1181), False, 'import torchaudio\n'), ((974, 1000), 'get_data.idx_to_label', 'get_data.idx_to_label', (['idx'], {}), '(idx)\n', (995, 1000), False, 'import get_data\n'), ((766, 798), 'torch.unsqueeze', 'torch.unsqueeze', (['mel_spec'], {'dim': '(0)'}), '(mel_spec, dim=0)\n', (781, 798), False, 'import torch\n'), ((849, 873), 'torch.nn.Softmax', 'torch.nn.Softmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (865, 873), False, 'import torch\n')]
import os import sys import numpy as np from sklearn import linear_model from sklearn.metrics import classification_report from sklearn.metrics import confusion_matrix from scipy.sparse import csr_matrix import csv def ReadCsv(fileName, mode): in_file = open(fileName) reader = csv.reader(in_file, delimiter='\t', quotechar='"') if(mode == 'input'): data = [[]] for row in reader: data.append(row) data.pop(0) elif(mode == 'output'): column = 1 data = [] for row in reader: data.append(int(row[column])) return data # in_filepath = sys.argv[1] in_filepath = "/home/rostunov/workspace/neuro/maltparser/malt/data/x.txt" X = np.asarray(ReadCsv(in_filepath, 'input'), 'int') # A = csr_matrix(X) # print (A)
[ "csv.reader" ]
[((287, 337), 'csv.reader', 'csv.reader', (['in_file'], {'delimiter': '"""\t"""', 'quotechar': '"""\\""""'}), '(in_file, delimiter=\'\\t\', quotechar=\'"\')\n', (297, 337), False, 'import csv\n')]
import string from src import config def password_generator(): alphabet = string.digits alphabet += string.ascii_lowercase base = len(alphabet) shift = 0 length = int(config.get('minimal_password_length')) while True: password = '' temp = shift while temp > 0: k = temp // base rest = temp % base password = alphabet[rest] + password temp = k while len(password) < length: password = alphabet[0] + password yield password if alphabet[-1] * length == password: length += 1 shift = 0 else: shift += 1
[ "src.config.get" ]
[((191, 228), 'src.config.get', 'config.get', (['"""minimal_password_length"""'], {}), "('minimal_password_length')\n", (201, 228), False, 'from src import config\n')]
from django.db import models from django.contrib.auth import get_user_model class TextAnswer(models.Model): """Model definition for TextAnswer.""" user = models.ForeignKey(get_user_model(), on_delete=models.CASCADE) question = models.ForeignKey('Question', on_delete=models.CASCADE) answer = models.CharField('Answer', max_length=100) class Meta: """Meta definition for TextAnswer.""" verbose_name = 'Text Answer' verbose_name_plural = 'Text Answers' def __str__(self): """Unicode representation of TextAnswer.""" return f'{self.answer}'
[ "django.contrib.auth.get_user_model", "django.db.models.CharField", "django.db.models.ForeignKey" ]
[((242, 297), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Question"""'], {'on_delete': 'models.CASCADE'}), "('Question', on_delete=models.CASCADE)\n", (259, 297), False, 'from django.db import models\n'), ((311, 353), 'django.db.models.CharField', 'models.CharField', (['"""Answer"""'], {'max_length': '(100)'}), "('Answer', max_length=100)\n", (327, 353), False, 'from django.db import models\n'), ((183, 199), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (197, 199), False, 'from django.contrib.auth import get_user_model\n')]
from django.conf.urls import patterns, url from qs import views urlpatterns = patterns('', url(r'^$', views.index, name='index'), url(r'^sendqs/$', views.sendqs, name='sendqs'), url(r'^show/$', views.showall, name='showall'), )
[ "django.conf.urls.url" ]
[((97, 133), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.index'], {'name': '"""index"""'}), "('^$', views.index, name='index')\n", (100, 133), False, 'from django.conf.urls import patterns, url\n'), ((140, 185), 'django.conf.urls.url', 'url', (['"""^sendqs/$"""', 'views.sendqs'], {'name': '"""sendqs"""'}), "('^sendqs/$', views.sendqs, name='sendqs')\n", (143, 185), False, 'from django.conf.urls import patterns, url\n'), ((192, 237), 'django.conf.urls.url', 'url', (['"""^show/$"""', 'views.showall'], {'name': '"""showall"""'}), "('^show/$', views.showall, name='showall')\n", (195, 237), False, 'from django.conf.urls import patterns, url\n')]
import cv2 import numpy as np from plantcv.plantcv.transform import nonuniform_illumination def test_nonuniform_illumination_rgb(transform_test_data): """Test for PlantCV.""" # Load rgb image rgb_img = cv2.imread(transform_test_data.small_rgb_img) corrected = nonuniform_illumination(img=rgb_img, ksize=11) assert np.mean(corrected) < np.mean(rgb_img) def test_nonuniform_illumination_gray(transform_test_data): """Test for PlantCV.""" # Load rgb image gray_img = cv2.imread(transform_test_data.small_gray_img, -1) corrected = nonuniform_illumination(img=gray_img, ksize=11) assert corrected.shape == gray_img.shape
[ "numpy.mean", "plantcv.plantcv.transform.nonuniform_illumination", "cv2.imread" ]
[((216, 261), 'cv2.imread', 'cv2.imread', (['transform_test_data.small_rgb_img'], {}), '(transform_test_data.small_rgb_img)\n', (226, 261), False, 'import cv2\n'), ((278, 324), 'plantcv.plantcv.transform.nonuniform_illumination', 'nonuniform_illumination', ([], {'img': 'rgb_img', 'ksize': '(11)'}), '(img=rgb_img, ksize=11)\n', (301, 324), False, 'from plantcv.plantcv.transform import nonuniform_illumination\n'), ((500, 550), 'cv2.imread', 'cv2.imread', (['transform_test_data.small_gray_img', '(-1)'], {}), '(transform_test_data.small_gray_img, -1)\n', (510, 550), False, 'import cv2\n'), ((567, 614), 'plantcv.plantcv.transform.nonuniform_illumination', 'nonuniform_illumination', ([], {'img': 'gray_img', 'ksize': '(11)'}), '(img=gray_img, ksize=11)\n', (590, 614), False, 'from plantcv.plantcv.transform import nonuniform_illumination\n'), ((336, 354), 'numpy.mean', 'np.mean', (['corrected'], {}), '(corrected)\n', (343, 354), True, 'import numpy as np\n'), ((357, 373), 'numpy.mean', 'np.mean', (['rgb_img'], {}), '(rgb_img)\n', (364, 373), True, 'import numpy as np\n')]
def main(): import numpy as np import matplotlib.pyplot as plt import torchvision from torch.autograd import Variable import torch.nn as nn import pickle from random import randint, randrange import sys from tqdm import tqdm import cv2 print("CUDA available: {}".format(torch.cuda.is_available())) location = "ncc" # Import model architectures from models.DSCLRCN_OldContext import DSCLRCN from models.CoSADUV import CoSADUV from models.CoSADUV_NoTemporal import CoSADUV_NoTemporal # Prepare settings and get the datasets from util.data_utils import get_SALICON_datasets, get_video_datasets ### Data options ### dataset_root_dir = "Dataset/UAV123" # Dataset/[SALICON, UAV123, UAV123_LIKE_MISC] mean_image_name = ( "mean_image.npy" ) # Must be located at dataset_root_dir/mean_image_name img_size = ( 480, 640, ) # height, width - original: 480, 640, reimplementation: 96, 128 duration = ( 300 ) # Length of sequences loaded from each video, if a video dataset is used from util import loss_functions from util.solver import Solver ### Testing options ### # Minibatchsize: Determines how many images are processed at a time on the GPU minibatchsize = 2 # Recommended: 4 for 480x640 for >12GB mem, 2 for <12GB mem. ########## PREPARE DATASETS ########## ### Prepare datasets and loaders ### if "SALICON" in dataset_root_dir: train_data, val_data, test_data, mean_image = get_SALICON_datasets( dataset_root_dir, mean_image_name, img_size ) train_loader = [ torch.utils.data.DataLoader( train_data, batch_size=minibatchsize, shuffle=True, num_workers=8, pin_memory=True, ) ] val_loader = [ torch.utils.data.DataLoader( val_data, batch_size=minibatchsize, shuffle=True, num_workers=8, pin_memory=True, ) ] # Load test loader using val_data as SALICON does not give GT for its test set test_loader = [ torch.utils.data.DataLoader( val_data, batch_size=minibatchsize, shuffle=True, num_workers=8, pin_memory=True, ) ] elif "UAV123" in dataset_root_dir: train_loader, val_loader, test_loader, mean_image = get_video_datasets( dataset_root_dir, mean_image_name, duration=duration, img_size=img_size, shuffle=False, loader_settings={ "batch_size": minibatchsize, "num_workers": 8, "pin_memory": False, }, ) ########## LOADING MODELS ########## # Loading a model from the saved state that produced # the lowest validation loss during training: # Requires the model classes be loaded # Assumes the model uses models.CoSADUV_NoTemporal architecture. # If not, this method will fail def load_model_from_checkpoint(model_name): filename = "trained_models/" + model_name + ".pth" if torch.cuda.is_available(): checkpoint = torch.load(filename) else: # Load GPU model on CPU checkpoint = torch.load(filename, map_location="cpu") start_epoch = checkpoint["epoch"] best_accuracy = checkpoint["best_accuracy"] if "DSCLRCN" in model_name: model = DSCLRCN(input_dim=img_size, local_feats_net="Seg") elif "CoSADUV_NoTemporal" in model_name: model = CoSADUV_NoTemporal(input_dim=img_size, local_feats_net="Seg") elif "CoSADUV" in model_name: model = CoSADUV(input_dim=img_size, local_feats_net="Seg") else: tqdm.write("Error: no model name found in filename: {}".format(model_name)) return # Ignore extra parameters ('.num_batches_tracked' # that are added on NCC due to different pytorch version) model.load_state_dict(checkpoint["state_dict"], strict=False) tqdm.write( "=> loaded model checkpoint '{}' (trained for {} epochs)\n with architecture {}".format( model_name, checkpoint["epoch"], type(model).__name__ ) ) if torch.cuda.is_available(): model = model.cuda() tqdm.write(" loaded to cuda") model.eval() return model def load_model(model_name): model = torch.load("trained_models/" + model_name, map_location="cpu") print("=> loaded model '{}'".format(model_name)) if torch.cuda.is_available(): model = model.cuda() print(" loaded to cuda") model.eval() return model ########## LOAD THE MODELS ########## models = [] model_names = [] # Loading some pretrained models to test them on the images: # DSCLRCN models ## Trained on SALICON ### NSS_loss # model_names.append("DSCLRCN/SALICON NSS -1.62NSS val best and last/best_model_DSCLRCN_NSS_loss_batch20_epoch5") ## Trained on UAV123 ### NSS_alt loss func # model_names.append("DSCLRCN/UAV123 NSS_alt 1.38last 3.15best testing/best_model_DSCLRCN_NSS_alt_batch20_epoch5") # CoSADUV_NoTemporal models ## Trained on UAV123 ### DoM loss func # model_names.append( # "CoSADUV_NoTemporal/DoM SGD 0.01lr - 3.16 NSS_alt/best_model_CoSADUV_NoTemporal_DoM_batch20_epoch6" # ) ### NSS_alt loss func # model_names.append("CoSADUV_NoTemporal/NSS_alt Adam lr 1e-4 - 1.36/best_model_CoSADUV_NoTemporal_NSS_alt_batch20_epoch5") ### CE_MAE loss func # model_names.append("CoSADUV_NoTemporal/best_model_CoSADUV_NoTemporal_CE_MAE_loss_batch20_epoch10") # CoSADUV models (CoSADUV2) ## Trained on UAV123 ### NSS_alt loss func #### 1 Frame backpropagation #### Kernel size 1 # model_names.append("CoSADUV/NSS_alt Adam 0.001lr 1frame backprop size1 kernel -2train -0.7val 1epoch/best_model_CoSADUV_NSS_alt_batch20_epoch5") #### Kernel size 3 model_names.append( "CoSADUV/NSS_alt Adam 0.01lr 1frame backprop size3 kernel/best_model_CoSADUV_NSS_alt_batch20_epoch5" ) #### 2 Frame backpropagation #### Kernel size 3 #model_names.append( # "CoSADUV/NSS_alt Adam 0.01lr 2frame backprop size3 kernel - 6.56 NSS_alt val/best_model_CoSADUV_NSS_alt_batch20_epoch5" #) ### DoM loss func # Only very poor results achieved ### CE_MAE loss func # Only very poor results achieved max_name_len = max([len(name) for name in model_names]) # Load the models specified above iterable = model_names # for i, name in enumerate(iterable): # if "best_model" in name: # models.append(load_model_from_checkpoint(name)) # else: # models.append(load_model(name)) print() print("Loaded all specified models") ########## TEST THE MODEL ########## # Define a function for testing a model # Output is resized to the size of the data_source def test_model(model, data_loader, loss_fns=[loss_functions.MAE_loss]): loss_sums = [] loss_counts = [] for i, loss_fn in enumerate(loss_fns): if loss_fn != loss_functions.NSS_alt: loss_sums.append(0) loss_counts.append(0) else: loss_sums.append([0, 0]) loss_counts.append([0, 0]) loop1 = data_loader if location != "ncc": loop1 = tqdm(loop1) for video_loader in loop1: # Reset temporal state if model is temporal if model.temporal: model.clear_temporal_state() loop2 = video_loader if location != "ncc": loop2 = tqdm(loop2) for data in loop2: inputs, labels = data if torch.cuda.is_available(): inputs = inputs.cuda() labels = labels.cuda() # Produce the output outputs = model(inputs).squeeze(1) # if model is temporal detach its state if model.temporal: model.detach_temporal_state() # Move the output to the CPU so we can process it using numpy outputs = outputs.cpu().data.numpy() # Threshold output if model is temporal if model.temporal: outputs[outputs >= 0.50001] = 1 outputs[outputs < 0.50001] = 0 # If outputs contains a single image, insert # a singleton batchsize dimension at index 0 if len(outputs.shape) == 2: outputs = np.expand_dims(outputs, 0) # Resize the images to input size outputs = np.array( [ cv2.resize(output, (labels.shape[2], labels.shape[1])) for output in outputs ] ) outputs = torch.from_numpy(outputs) if torch.cuda.is_available(): outputs = outputs.cuda() labels = labels.cuda() # Apply each loss function, add results to corresponding entry in loss_sums and loss_counts for i, loss_fn in enumerate(loss_fns): # If loss fn is NSS_alt, manually add std_dev() if the target is all-0 if loss_fn == loss_functions.NSS_alt: for i in range(len(labels)): if labels[i].sum() == 0: loss_sums[i][1] += outputs[i].std().item() loss_counts[i][1] += 1 else: loss_sums[i][0] += loss_fn(outputs[i], labels[i]).item() loss_counts[i][0] += 1 else: loss_sums[i] += loss_fn(outputs, labels).item() loss_counts[i] += 1 return loss_sums, loss_counts # Obtaining loss values on the test set for different models: loop3 = model_names if location != "ncc": loop3 = tqdm(loop3) for i, model_name in enumerate(loop3): if location != "ncc": tqdm.write("model name: {}".format(model_name)) else: print("model name: {}".format(model_name)) if "best_model" in model_name: model = load_model_from_checkpoint(model_name) else: model = load_model(model_name) loss_fns = [ loss_functions.NSS_alt, loss_functions.CE_loss, loss_functions.MAE_loss, loss_functions.DoM, ] test_losses, test_counts = test_model(model, test_loader, loss_fns=loss_fns) # Print out the result tqdm.write("[{}] Model: ".format(i, model_names[i])) for i, func in enumerate(loss_fns): if func == loss_functions.NSS_alt: tqdm.write( ("{:25} : {:6f}").format( "NSS_alt (+ve imgs)", test_losses[i][0] / max(test_counts[i][0], 1) ) ) tqdm.write( ("{:25} : {:6f}").format( "NSS_alt (-ve imgs)", test_losses[i][1] / max(test_counts[i][1], 1) ) ) else: tqdm.write( ("{:25} : {:6f}").format( func.__name__, test_losses[i] / test_counts[i] ) ) del model if __name__ == "__main__": import torch torch.multiprocessing.set_start_method("forkserver") # spawn, forkserver, or fork # Use CuDNN with benchmarking for performance improvement: # from 1.05 batch20/s to 1.55 batch20/s on Quadro P4000 torch.backends.cudnn.enabled = True torch.backends.cudnn.benchmark = True main()
[ "models.DSCLRCN_OldContext.DSCLRCN", "cv2.resize", "tqdm.tqdm.write", "torch.load", "util.data_utils.get_SALICON_datasets", "tqdm.tqdm", "models.CoSADUV_NoTemporal.CoSADUV_NoTemporal", "util.data_utils.get_video_datasets", "torch.from_numpy", "models.CoSADUV.CoSADUV", "torch.cuda.is_available", ...
[((12055, 12107), 'torch.multiprocessing.set_start_method', 'torch.multiprocessing.set_start_method', (['"""forkserver"""'], {}), "('forkserver')\n", (12093, 12107), False, 'import torch\n'), ((1562, 1627), 'util.data_utils.get_SALICON_datasets', 'get_SALICON_datasets', (['dataset_root_dir', 'mean_image_name', 'img_size'], {}), '(dataset_root_dir, mean_image_name, img_size)\n', (1582, 1627), False, 'from util.data_utils import get_SALICON_datasets, get_video_datasets\n'), ((3348, 3373), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3371, 3373), False, 'import torch\n'), ((4524, 4549), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4547, 4549), False, 'import torch\n'), ((4719, 4781), 'torch.load', 'torch.load', (["('trained_models/' + model_name)"], {'map_location': '"""cpu"""'}), "('trained_models/' + model_name, map_location='cpu')\n", (4729, 4781), False, 'import torch\n'), ((4850, 4875), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4873, 4875), False, 'import torch\n'), ((10555, 10566), 'tqdm.tqdm', 'tqdm', (['loop3'], {}), '(loop3)\n', (10559, 10566), False, 'from tqdm import tqdm\n'), ((315, 340), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (338, 340), False, 'import torch\n'), ((1687, 1803), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['train_data'], {'batch_size': 'minibatchsize', 'shuffle': '(True)', 'num_workers': '(8)', 'pin_memory': '(True)'}), '(train_data, batch_size=minibatchsize, shuffle=\n True, num_workers=8, pin_memory=True)\n', (1714, 1803), False, 'import torch\n'), ((1939, 2053), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['val_data'], {'batch_size': 'minibatchsize', 'shuffle': '(True)', 'num_workers': '(8)', 'pin_memory': '(True)'}), '(val_data, batch_size=minibatchsize, shuffle=\n True, num_workers=8, pin_memory=True)\n', (1966, 2053), False, 'import torch\n'), ((2277, 2391), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['val_data'], {'batch_size': 'minibatchsize', 'shuffle': '(True)', 'num_workers': '(8)', 'pin_memory': '(True)'}), '(val_data, batch_size=minibatchsize, shuffle=\n True, num_workers=8, pin_memory=True)\n', (2304, 2391), False, 'import torch\n'), ((2591, 2791), 'util.data_utils.get_video_datasets', 'get_video_datasets', (['dataset_root_dir', 'mean_image_name'], {'duration': 'duration', 'img_size': 'img_size', 'shuffle': '(False)', 'loader_settings': "{'batch_size': minibatchsize, 'num_workers': 8, 'pin_memory': False}"}), "(dataset_root_dir, mean_image_name, duration=duration,\n img_size=img_size, shuffle=False, loader_settings={'batch_size':\n minibatchsize, 'num_workers': 8, 'pin_memory': False})\n", (2609, 2791), False, 'from util.data_utils import get_SALICON_datasets, get_video_datasets\n'), ((3400, 3420), 'torch.load', 'torch.load', (['filename'], {}), '(filename)\n', (3410, 3420), False, 'import torch\n'), ((3496, 3536), 'torch.load', 'torch.load', (['filename'], {'map_location': '"""cpu"""'}), "(filename, map_location='cpu')\n", (3506, 3536), False, 'import torch\n'), ((3688, 3738), 'models.DSCLRCN_OldContext.DSCLRCN', 'DSCLRCN', ([], {'input_dim': 'img_size', 'local_feats_net': '"""Seg"""'}), "(input_dim=img_size, local_feats_net='Seg')\n", (3695, 3738), False, 'from models.DSCLRCN_OldContext import DSCLRCN\n'), ((4596, 4627), 'tqdm.tqdm.write', 'tqdm.write', (['""" loaded to cuda"""'], {}), "(' loaded to cuda')\n", (4606, 4627), False, 'from tqdm import tqdm\n'), ((7787, 7798), 'tqdm.tqdm', 'tqdm', (['loop1'], {}), '(loop1)\n', (7791, 7798), False, 'from tqdm import tqdm\n'), ((3808, 3869), 'models.CoSADUV_NoTemporal.CoSADUV_NoTemporal', 'CoSADUV_NoTemporal', ([], {'input_dim': 'img_size', 'local_feats_net': '"""Seg"""'}), "(input_dim=img_size, local_feats_net='Seg')\n", (3826, 3869), False, 'from models.CoSADUV_NoTemporal import CoSADUV_NoTemporal\n'), ((8059, 8070), 'tqdm.tqdm', 'tqdm', (['loop2'], {}), '(loop2)\n', (8063, 8070), False, 'from tqdm import tqdm\n'), ((8160, 8185), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (8183, 8185), False, 'import torch\n'), ((9354, 9379), 'torch.from_numpy', 'torch.from_numpy', (['outputs'], {}), '(outputs)\n', (9370, 9379), False, 'import torch\n'), ((9400, 9425), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (9423, 9425), False, 'import torch\n'), ((3928, 3978), 'models.CoSADUV.CoSADUV', 'CoSADUV', ([], {'input_dim': 'img_size', 'local_feats_net': '"""Seg"""'}), "(input_dim=img_size, local_feats_net='Seg')\n", (3935, 3978), False, 'from models.CoSADUV import CoSADUV\n'), ((9026, 9052), 'numpy.expand_dims', 'np.expand_dims', (['outputs', '(0)'], {}), '(outputs, 0)\n', (9040, 9052), True, 'import numpy as np\n'), ((9186, 9240), 'cv2.resize', 'cv2.resize', (['output', '(labels.shape[2], labels.shape[1])'], {}), '(output, (labels.shape[2], labels.shape[1]))\n', (9196, 9240), False, 'import cv2\n')]
#!/usr/bin/env python3 from pyglet.gl import * from pyglet.window import NoSuchConfigException from rubiks import CubeController, CubeView if __name__ == '__main__': controller = CubeController() platform = pyglet.window.get_platform() display = platform.get_default_display() screen = display.get_default_screen() try: template = pyglet.gl.Config(depth_size=24, sample_buffers=1, samples=4) config = screen.get_best_config(template) except NoSuchConfigException: config = screen.get_best_config() window = pyglet.window.Window(width=1024, height=768, caption="The Rubik's Cube", resizable=True, config=config) window.push_handlers(controller.on_key_press) view = CubeView(controller, window) pyglet.app.run()
[ "rubiks.CubeController", "rubiks.CubeView" ]
[((186, 202), 'rubiks.CubeController', 'CubeController', ([], {}), '()\n', (200, 202), False, 'from rubiks import CubeController, CubeView\n'), ((729, 757), 'rubiks.CubeView', 'CubeView', (['controller', 'window'], {}), '(controller, window)\n', (737, 757), False, 'from rubiks import CubeController, CubeView\n')]
import sys import os from PyQt5.QtWidgets import (QTabWidget, QMessageBox) from codeeditor import CodeEditor from widgets import MessageBox class TabWidget(QTabWidget): def __init__(self, parent=None): super().__init__() self.mainWindow = parent self.setStyleSheet( ''' background-color: #2c2c2c; color: white; alternate-background-color: #FFFFFF; selection-background-color: #3b5784; ''') self.setStyleSheet(''' QTabBar::tab:selected {background: darkgreen;} ''') self.setMovable(True) self.setTabsClosable(True) # signals self.tabCloseRequested.connect(self.closeTab) self.currentChanged.connect(self.changeTab) self.textPad = None self.codeView = None def newTab(self, editor=None, codeView=None): if not editor: editor = CodeEditor(parent=self.mainWindow) self.addTab(editor, "noname") editor.filename = None if self.mainWindow: self.codeView = self.mainWindow.codeView else: if editor.filename == None: self.addTab(editor, "noname") else: self.addTab(editor, os.path.basename(editor.filename)) x = self.count() - 1 self.setTabToolTip(x, editor.filename) self.codeView = self.mainWindow.codeView def closeTab(self, index): x = self.currentIndex() if x != index: self.setCurrentIndex(index) tabText = self.tabText(index) if '*' in tabText: q = MessageBox(QMessageBox.Warning, 'Warning', 'File not saved\n\nSave now ?', QMessageBox.Yes | QMessageBox.No) if (q.exec_() == QMessageBox.Yes): self.mainWindow.save() self.removeTab(index) else: self.removeTab(index) else: self.removeTab(index) x = self.currentIndex() self.setCurrentIndex(x) if x == -1: self.refreshCodeView('') self.mainWindow.setWindowTitle('CrossCobra - Python IDE') def changeTab(self, index): x = self.count() y = x - 1 if y >= 0: self.setCurrentIndex(index) textPad = self.currentWidget() self.textPad = textPad text = self.textPad.text() if self.codeView: self.refreshCodeView(text) else: self.codeView = self.mainWindow.codeView self.refreshCodeView(text) if self.textPad: self.mainWindow.refresh(self.textPad) def refreshCodeView(self, text=None): text = text codeViewDict = self.codeView.makeDictForCodeView(text) self.codeView.updateCodeView(codeViewDict) def getCurrentTextPad(self): textPad = self.currentWidget() return textPad
[ "os.path.basename", "codeeditor.CodeEditor", "widgets.MessageBox" ]
[((997, 1031), 'codeeditor.CodeEditor', 'CodeEditor', ([], {'parent': 'self.mainWindow'}), '(parent=self.mainWindow)\n', (1007, 1031), False, 'from codeeditor import CodeEditor\n'), ((1811, 1925), 'widgets.MessageBox', 'MessageBox', (['QMessageBox.Warning', '"""Warning"""', '"""File not saved\n\nSave now ?"""', '(QMessageBox.Yes | QMessageBox.No)'], {}), '(QMessageBox.Warning, \'Warning\', """File not saved\n\nSave now ?""",\n QMessageBox.Yes | QMessageBox.No)\n', (1821, 1925), False, 'from widgets import MessageBox\n'), ((1391, 1424), 'os.path.basename', 'os.path.basename', (['editor.filename'], {}), '(editor.filename)\n', (1407, 1424), False, 'import os\n')]
#!/usr/bin/python3 #Title: headers.py #Author: ApexPredator #License: MIT #Github: https://github.com/ApexPredator-InfoSec/header_check #Description: This script take a URL or list or URLs as arguments and tests for the headers: 'Strict-Transport-Security', 'Content-Security-Policy', 'X-Frame-Options', and 'Server' import requests import argparse import socket from urllib3.exceptions import InsecureRequestWarning parser = argparse.ArgumentParser(prog='headers.py', usage='python3 -t <target> -f <file contianing target list> -d\npython3 headers.py -t https://securityheaders.com -d\npython3 headers.py -f urls.txt') #build argument list parser.add_argument('-t', '--target', help='Target URL', required=False) parser.add_argument('-f', '--file', help='File Containing Target URLs', required=False) parser.add_argument('-d','--debug', help='Debug with proxy', required=False, action = 'store_const', const = True) args = parser.parse_args() s = requests.session() requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) #disable SSL verification warning to cleanup output http_proxy = 'http://127.0.0.1:8080' #define proxy address to enable using BURP or ZAP proxyDict = { #define proxy dictionary to enable using BURP or ZAP "http" : http_proxy, "https" : http_proxy } if args.debug: proxy = proxyDict #enable proxy if -d or --debug is present else: proxy = False #disable proxy is -d or --debug is not present def test_url(target): print("[+] Sending get request to target and checking header....") res = s.get(target, verify=False, proxies=proxy) #perform get request on url, disble SSL verification to prevent error for sites with invalid certs, proxy if proxy is enabled if 'https://' in target: #test for https url test_headers(target, res, 'Strict-Transport-Security') #test for Strict-Transport-Security header test_headers(target, res, 'Content-Security-Policy') #test for Content-Security-Policy test_headers(target, res, 'X-Frame-Options') #test for X-Frame-Options header test_headers(target, res, 'Server') #test for Server header elif 'http://' in target: #test for http url test_header(target, res, 'Content-Security-Policy') #test for Content-Security-Policy test_header(target, res, 'X-Frame-Options') #test for X-Frame-Options header test_header(target, res, 'Server') #test for Server header else: print("%s is an invalid URL" %target) #print invalid url if not https or http def test_headers(target, res, header): print("[+]Testing headers for %s" %target + ' IP: ' + socket.gethostbyname(target[8:])) #print URL being tested and its IP if header in res.headers: #test if the header passed to test_headers is in the get response headers print("[+] %s is enabled" %header) #print header is enabled print("[+] Value of %s header is: %s" %(header,res.headers[header])) #print value of header else: print("[-] !!!! %s is not enabled on %s" %(header,target) + ' IP: ' + socket.gethostbyname(target[8:])) #print header is not enabled if it is not present, display URL and IP def test_header(target, res, header): print("[+]Testing headers for %s" %target + ' IP: ' + socket.gethostbyname(target[7:])) #print URL being tested and its IP if header in res.headers: #test if header passed to test_header is in the get response headers print("[+] %s is enabled" %header) #print the header is enabled print("[+] Value of %s header is: %s" %(header,res.headers[header])) #print the vlaue of the header else: print("[-] !!!! %s is not enabled on %s" %(header,target) + ' IP: ' + socket.gethostbyname(target[7:])) #print header is not enabled if header is not found in get response header, display URL and IP def main(): if args.target: #test it -t or --target were passed and set target with value passed target = args.target test_url(target) elif args.file: #test if -f or --file were passed and set target with file named passed file = args.file with open(file, 'r') as target_list: #open file passed for line in target_list.readlines(): #read the lines in target = line.strip() #set target print("\n[+]Fetching URL from file.......\n") test_url(target) #test the url currently set in target else: print("[-]Either -t or -f arguments are required\nusage: python3 headers.py -t <target> -f <file contianing target list> -d\npython3 headers.py -t https://securityheaders.com -d\npython3 headers.py -f urls.txt") #print help message if neither -t or -f is passed if __name__ == '__main__': main()
[ "socket.gethostbyname", "requests.session", "requests.packages.urllib3.disable_warnings", "argparse.ArgumentParser" ]
[((427, 632), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""headers.py"""', 'usage': '"""python3 -t <target> -f <file contianing target list> -d\npython3 headers.py -t https://securityheaders.com -d\npython3 headers.py -f urls.txt"""'}), '(prog=\'headers.py\', usage=\n """python3 -t <target> -f <file contianing target list> -d\npython3 headers.py -t https://securityheaders.com -d\npython3 headers.py -f urls.txt"""\n )\n', (450, 632), False, 'import argparse\n'), ((950, 968), 'requests.session', 'requests.session', ([], {}), '()\n', (966, 968), False, 'import requests\n'), ((969, 1044), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ([], {'category': 'InsecureRequestWarning'}), '(category=InsecureRequestWarning)\n', (1011, 1044), False, 'import requests\n'), ((2638, 2670), 'socket.gethostbyname', 'socket.gethostbyname', (['target[8:]'], {}), '(target[8:])\n', (2658, 2670), False, 'import socket\n'), ((3269, 3301), 'socket.gethostbyname', 'socket.gethostbyname', (['target[7:]'], {}), '(target[7:])\n', (3289, 3301), False, 'import socket\n'), ((3068, 3100), 'socket.gethostbyname', 'socket.gethostbyname', (['target[8:]'], {}), '(target[8:])\n', (3088, 3100), False, 'import socket\n'), ((3706, 3738), 'socket.gethostbyname', 'socket.gethostbyname', (['target[7:]'], {}), '(target[7:])\n', (3726, 3738), False, 'import socket\n')]
# Generated by Django 2.1 on 2018-08-06 02:11 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ('sites', '0002_alter_domain_unique'), ] operations = [ migrations.CreateModel( name='Feature', fields=[ ('created', models.DateTimeField(auto_now_add=True)), ('last_modified', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=32)), ('identifier', models.SlugField(blank=True, primary_key=True, serialize=False, unique=True)), ], options={ 'ordering': ('created', 'last_modified'), 'abstract': False, }, ), migrations.CreateModel( name='FeatureUsage', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(auto_now_add=True)), ('last_modified', models.DateTimeField(auto_now=True)), ('feature', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='usage', to='features.Feature')), ('sites', models.ManyToManyField(to='sites.Site')), ], options={ 'ordering': ('created', 'last_modified'), 'abstract': False, }, ), migrations.CreateModel( name='FeatureValue', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(auto_now_add=True)), ('last_modified', models.DateTimeField(auto_now=True)), ], options={ 'ordering': ('created', 'last_modified'), 'abstract': False, }, ), migrations.CreateModel( name='BooleanFeatureValue', fields=[ ('featurevalue_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='features.FeatureValue')), ('value', models.BooleanField()), ], options={ 'ordering': ('created', 'last_modified'), 'abstract': False, }, bases=('features.featurevalue',), ), migrations.CreateModel( name='NumericFeatureValue', fields=[ ('featurevalue_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='features.FeatureValue')), ('value', models.DecimalField(decimal_places=3, max_digits=11)), ], options={ 'ordering': ('created', 'last_modified'), 'abstract': False, }, bases=('features.featurevalue',), ), migrations.CreateModel( name='StringFeatureValue', fields=[ ('featurevalue_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='features.FeatureValue')), ('value', models.TextField()), ], options={ 'ordering': ('created', 'last_modified'), 'abstract': False, }, bases=('features.featurevalue',), ), migrations.AddField( model_name='featureusage', name='value', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='usage', to='features.FeatureValue'), ), ]
[ "django.db.models.OneToOneField", "django.db.models.TextField", "django.db.models.ForeignKey", "django.db.models.ManyToManyField", "django.db.models.BooleanField", "django.db.models.SlugField", "django.db.models.AutoField", "django.db.models.DateTimeField", "django.db.models.DecimalField", "django...
[((3818, 3935), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""usage"""', 'to': '"""features.FeatureValue"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='usage', to='features.FeatureValue')\n", (3835, 3935), False, 'from django.db import migrations, models\n'), ((386, 425), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (406, 425), False, 'from django.db import migrations, models\n'), ((462, 497), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (482, 497), False, 'from django.db import migrations, models\n'), ((525, 556), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)'}), '(max_length=32)\n', (541, 556), False, 'from django.db import migrations, models\n'), ((590, 666), 'django.db.models.SlugField', 'models.SlugField', ([], {'blank': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'unique': '(True)'}), '(blank=True, primary_key=True, serialize=False, unique=True)\n', (606, 666), False, 'from django.db import migrations, models\n'), ((934, 1027), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (950, 1027), False, 'from django.db import migrations, models\n'), ((1054, 1093), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1074, 1093), False, 'from django.db import migrations, models\n'), ((1130, 1165), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1150, 1165), False, 'from django.db import migrations, models\n'), ((1196, 1308), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""usage"""', 'to': '"""features.Feature"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='usage', to='features.Feature')\n", (1213, 1308), False, 'from django.db import migrations, models\n'), ((1332, 1371), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""sites.Site"""'}), "(to='sites.Site')\n", (1354, 1371), False, 'from django.db import migrations, models\n'), ((1639, 1732), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1655, 1732), False, 'from django.db import migrations, models\n'), ((1759, 1798), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1779, 1798), False, 'from django.db import migrations, models\n'), ((1835, 1870), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1855, 1870), False, 'from django.db import migrations, models\n'), ((2159, 2334), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""features.FeatureValue"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'features.FeatureValue')\n", (2179, 2334), False, 'from django.db import migrations, models\n'), ((2353, 2374), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (2372, 2374), False, 'from django.db import migrations, models\n'), ((2709, 2884), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""features.FeatureValue"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'features.FeatureValue')\n", (2729, 2884), False, 'from django.db import migrations, models\n'), ((2903, 2955), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(3)', 'max_digits': '(11)'}), '(decimal_places=3, max_digits=11)\n', (2922, 2955), False, 'from django.db import migrations, models\n'), ((3289, 3464), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""features.FeatureValue"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'features.FeatureValue')\n", (3309, 3464), False, 'from django.db import migrations, models\n'), ((3483, 3501), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (3499, 3501), False, 'from django.db import migrations, models\n')]
import argparse import os from os.path import join import sys import joblib import pandas as pd import matplotlib.pyplot as plt import matplotlib matplotlib.use('TkAgg') sys.path.append('.') from project.models.common import get_errors, get_model_details_for_algorithm, get_color, init_scale_from_train_set from project.models.details import get_model_filepath, ModelDetails from project.models.scale import transform_x, inverse_transform_y from project.utils.app_ids import app_name_to_id from project.utils.logger import logger from project.definitions import ROOT_DIR from project.models.data import ( get_data_frame, DataFrameColumns, ) parser = argparse.ArgumentParser(description='Model training and validation.') parser.add_argument('--app_name', required=True, type=str, help='app name') parser.add_argument('--alg', required=True, type=str, help='algorithm') if __name__ == "__main__": args = parser.parse_args() logger.info(args) app_id = app_name_to_id.get(args.app_name, None) if app_id is None: raise ValueError(f'missing app "{args.app_name}" from app map={str(app_name_to_id)}') results_filepath = join(ROOT_DIR, '..', 'execution_results/results.csv') results_test_filepath = os.path.join(ROOT_DIR, '..', 'execution_results/results_test.csv') results_train_filepath = os.path.join(ROOT_DIR, '..', 'execution_results/results_train.csv') df, df_err = get_data_frame(results_filepath, app_id) df_test, df_test_err = get_data_frame(results_test_filepath, app_id) df_train, df_train_err = get_data_frame(results_train_filepath, app_id) if df_err is not None or df_test_err is not None or df_train_err is not None: raise ValueError(f'data frame load err') x_origin = df.loc[:, df.columns != DataFrameColumns.EXECUTION_TIME] x_test = df_test.loc[:, df_test.columns != DataFrameColumns.EXECUTION_TIME] x_train = df_train.loc[:, df_train.columns != DataFrameColumns.EXECUTION_TIME] y = df.loc[:, df.columns == DataFrameColumns.EXECUTION_TIME] y_test = df_test.loc[:, df_test.columns == DataFrameColumns.EXECUTION_TIME] y_train = df_train.loc[:, df_train.columns == DataFrameColumns.EXECUTION_TIME] x_plot_train = x_train[DataFrameColumns.OVERALL_SIZE] y_plot_train = x_train[DataFrameColumns.CPUS] z_plot_train = y_train[DataFrameColumns.EXECUTION_TIME] x_plot_test = x_test[DataFrameColumns.OVERALL_SIZE] y_plot_test = x_test[DataFrameColumns.CPUS] z_plot_test = y_test[DataFrameColumns.EXECUTION_TIME] # plot data points ax = plt.axes(projection='3d') ax.set_xlabel('over', linespacing=0.1, labelpad=-12) ax.set_ylabel('cpus', linespacing=0.1, labelpad=-12) ax.set_zlabel('t', linespacing=0.1, labelpad=-15) ax.tick_params( axis='both', # changes apply to the x-axis which='both', # both major and minor ticks are affected bottom=False, # ticks along the bottom edge are off top=False, left=False, # ticks along the bottom edge are off right=False, labelbottom=False, labeltop=False, labelright=False, labelleft=False ) ax.dist = 8 ax.scatter(x_plot_train, y_plot_train, z_plot_train, c='#2ca02c', alpha=1, label='training points') ax.scatter(x_plot_test, y_plot_test, z_plot_test, label='test points', c='#cc0000', alpha=1) # Load model details model_details = get_model_details_for_algorithm(args.app_name, args.alg) if model_details.scale: init_scale_from_train_set(model_details, app_id) x_test = pd.DataFrame(transform_x(x_test), columns=x_test.columns) x = pd.DataFrame(transform_x(x_origin), columns=x_origin.columns) # Load model model_filepath, err = get_model_filepath(args.alg, model_details) if err is not None: raise ValueError(err) model = joblib.load(model_filepath) z_all = model.predict(x) # Efficiency z_test = model.predict(x_test) z_test_inverse = inverse_transform_y(z_test) y_test_list = list(y_test[DataFrameColumns.EXECUTION_TIME]) y_train_list = list(y_train[DataFrameColumns.EXECUTION_TIME]) errors, errors_rel = get_errors(y_test_list, z_test_inverse) logger.info('############### SUMMARY ##################') logger.info('avg time [s] = %s' % str(sum(y_test_list) / len(y_test_list))) logger.info('avg error [s] = %s' % str(sum(errors) / len(errors))) logger.info('avg error relative [percentage] = %s' % str(sum(errors_rel) / len(errors_rel))) logger.info(f'best params: {str(model.get_params())}') # Plot prediction surface z_inverse = inverse_transform_y(z_all) x_plot = x_origin[DataFrameColumns.OVERALL_SIZE].to_numpy() y_plot = x_origin[DataFrameColumns.CPUS].to_numpy() ax.plot_trisurf(x_plot, y_plot, z_inverse, alpha=0.5, color=get_color(args.alg)) fake_legend_point = matplotlib.lines.Line2D([0], [0], linestyle="solid", c=get_color(args.alg)) plt.margins() plt.gcf().autofmt_xdate() handles, labels = ax.get_legend_handles_labels() handles.append(fake_legend_point) labels.append(args.alg) ax.legend(handles, labels, loc='upper left') ax.view_init(elev=20., azim=140) model_scheme = ModelDetails(args.app_name, 1.0, True, False) fig_path = os.path.join(ROOT_DIR, 'models', 'figures', '_'.join([args.alg, args.app_name, 'surf.png'])) plt.savefig(fig_path, bbox_inches='tight', pad_inches=0)
[ "project.utils.logger.logger.info", "project.models.common.init_scale_from_train_set", "project.models.details.ModelDetails", "project.models.details.get_model_filepath", "sys.path.append", "matplotlib.pyplot.margins", "project.models.common.get_errors", "argparse.ArgumentParser", "project.models.co...
[((148, 171), 'matplotlib.use', 'matplotlib.use', (['"""TkAgg"""'], {}), "('TkAgg')\n", (162, 171), False, 'import matplotlib\n'), ((173, 193), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (188, 193), False, 'import sys\n'), ((663, 732), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Model training and validation."""'}), "(description='Model training and validation.')\n", (686, 732), False, 'import argparse\n'), ((945, 962), 'project.utils.logger.logger.info', 'logger.info', (['args'], {}), '(args)\n', (956, 962), False, 'from project.utils.logger import logger\n'), ((976, 1015), 'project.utils.app_ids.app_name_to_id.get', 'app_name_to_id.get', (['args.app_name', 'None'], {}), '(args.app_name, None)\n', (994, 1015), False, 'from project.utils.app_ids import app_name_to_id\n'), ((1158, 1211), 'os.path.join', 'join', (['ROOT_DIR', '""".."""', '"""execution_results/results.csv"""'], {}), "(ROOT_DIR, '..', 'execution_results/results.csv')\n", (1162, 1211), False, 'from os.path import join\n'), ((1240, 1306), 'os.path.join', 'os.path.join', (['ROOT_DIR', '""".."""', '"""execution_results/results_test.csv"""'], {}), "(ROOT_DIR, '..', 'execution_results/results_test.csv')\n", (1252, 1306), False, 'import os\n'), ((1336, 1403), 'os.path.join', 'os.path.join', (['ROOT_DIR', '""".."""', '"""execution_results/results_train.csv"""'], {}), "(ROOT_DIR, '..', 'execution_results/results_train.csv')\n", (1348, 1403), False, 'import os\n'), ((1421, 1461), 'project.models.data.get_data_frame', 'get_data_frame', (['results_filepath', 'app_id'], {}), '(results_filepath, app_id)\n', (1435, 1461), False, 'from project.models.data import get_data_frame, DataFrameColumns\n'), ((1489, 1534), 'project.models.data.get_data_frame', 'get_data_frame', (['results_test_filepath', 'app_id'], {}), '(results_test_filepath, app_id)\n', (1503, 1534), False, 'from project.models.data import get_data_frame, DataFrameColumns\n'), ((1564, 1610), 'project.models.data.get_data_frame', 'get_data_frame', (['results_train_filepath', 'app_id'], {}), '(results_train_filepath, app_id)\n', (1578, 1610), False, 'from project.models.data import get_data_frame, DataFrameColumns\n'), ((2569, 2594), 'matplotlib.pyplot.axes', 'plt.axes', ([], {'projection': '"""3d"""'}), "(projection='3d')\n", (2577, 2594), True, 'import matplotlib.pyplot as plt\n'), ((3429, 3485), 'project.models.common.get_model_details_for_algorithm', 'get_model_details_for_algorithm', (['args.app_name', 'args.alg'], {}), '(args.app_name, args.alg)\n', (3460, 3485), False, 'from project.models.common import get_errors, get_model_details_for_algorithm, get_color, init_scale_from_train_set\n'), ((3757, 3800), 'project.models.details.get_model_filepath', 'get_model_filepath', (['args.alg', 'model_details'], {}), '(args.alg, model_details)\n', (3775, 3800), False, 'from project.models.details import get_model_filepath, ModelDetails\n'), ((3869, 3896), 'joblib.load', 'joblib.load', (['model_filepath'], {}), '(model_filepath)\n', (3880, 3896), False, 'import joblib\n'), ((3999, 4026), 'project.models.scale.inverse_transform_y', 'inverse_transform_y', (['z_test'], {}), '(z_test)\n', (4018, 4026), False, 'from project.models.scale import transform_x, inverse_transform_y\n'), ((4182, 4221), 'project.models.common.get_errors', 'get_errors', (['y_test_list', 'z_test_inverse'], {}), '(y_test_list, z_test_inverse)\n', (4192, 4221), False, 'from project.models.common import get_errors, get_model_details_for_algorithm, get_color, init_scale_from_train_set\n'), ((4226, 4283), 'project.utils.logger.logger.info', 'logger.info', (['"""############### SUMMARY ##################"""'], {}), "('############### SUMMARY ##################')\n", (4237, 4283), False, 'from project.utils.logger import logger\n'), ((4637, 4663), 'project.models.scale.inverse_transform_y', 'inverse_transform_y', (['z_all'], {}), '(z_all)\n', (4656, 4663), False, 'from project.models.scale import transform_x, inverse_transform_y\n'), ((4974, 4987), 'matplotlib.pyplot.margins', 'plt.margins', ([], {}), '()\n', (4985, 4987), True, 'import matplotlib.pyplot as plt\n'), ((5242, 5287), 'project.models.details.ModelDetails', 'ModelDetails', (['args.app_name', '(1.0)', '(True)', '(False)'], {}), '(args.app_name, 1.0, True, False)\n', (5254, 5287), False, 'from project.models.details import get_model_filepath, ModelDetails\n'), ((5400, 5456), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fig_path'], {'bbox_inches': '"""tight"""', 'pad_inches': '(0)'}), "(fig_path, bbox_inches='tight', pad_inches=0)\n", (5411, 5456), True, 'import matplotlib.pyplot as plt\n'), ((3523, 3571), 'project.models.common.init_scale_from_train_set', 'init_scale_from_train_set', (['model_details', 'app_id'], {}), '(model_details, app_id)\n', (3548, 3571), False, 'from project.models.common import get_errors, get_model_details_for_algorithm, get_color, init_scale_from_train_set\n'), ((3599, 3618), 'project.models.scale.transform_x', 'transform_x', (['x_test'], {}), '(x_test)\n', (3610, 3618), False, 'from project.models.scale import transform_x, inverse_transform_y\n'), ((3665, 3686), 'project.models.scale.transform_x', 'transform_x', (['x_origin'], {}), '(x_origin)\n', (3676, 3686), False, 'from project.models.scale import transform_x, inverse_transform_y\n'), ((4848, 4867), 'project.models.common.get_color', 'get_color', (['args.alg'], {}), '(args.alg)\n', (4857, 4867), False, 'from project.models.common import get_errors, get_model_details_for_algorithm, get_color, init_scale_from_train_set\n'), ((4948, 4967), 'project.models.common.get_color', 'get_color', (['args.alg'], {}), '(args.alg)\n', (4957, 4967), False, 'from project.models.common import get_errors, get_model_details_for_algorithm, get_color, init_scale_from_train_set\n'), ((4992, 5001), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (4999, 5001), True, 'import matplotlib.pyplot as plt\n')]
import copy as _copy import math as _math import os as _os import cv2 as _cv2 import numpy as _np from PIL import Image as _IMG from easytorch.utils.logger import * """ ################################################################################################## Very useful image related utilities ################################################################################################## """ def _same_file(x): return x class Image: def __init__(self, dtype=_np.uint8): self.dir = None self.file = None self.array = None self.mask = None self.ground_truth = None self.extras = {} self.dtype = dtype def load(self, dir, file): try: self.dir = dir self.file = file self.array = _np.array(_IMG.open(self.path), dtype=self.dtype) except Exception as e: error('Fail to load file: ' + self.file + ': ' + str(e)) def load_mask(self, mask_dir=None, fget_mask=_same_file): if fget_mask is None: fget_mask = _same_file try: mask_file = fget_mask(self.file) self.mask = _np.array(_IMG.open(_os.path.join(mask_dir, mask_file)), dtype=self.dtype) except Exception as e: error('Fail to load mask: ' + str(e)) def load_ground_truth(self, gt_dir=None, fget_ground_truth=_same_file): if fget_ground_truth is None: fget_ground_truth = _same_file try: gt_file = fget_ground_truth(self.file) self.ground_truth = _np.array(_IMG.open(_os.path.join(gt_dir, gt_file)), dtype=self.dtype) except Exception as e: error('Fail to load ground truth: ' + str(e)) def get_array(self, dir='', getter=_same_file, file=None): if getter is None: getter = _same_file if not file: file = self.file arr = _np.array(_IMG.open(_os.path.join(dir, getter(file))), dtype=self.dtype) return arr def apply_mask(self): if self.mask is not None: self.array[self.mask == 0] = 0 def apply_clahe(self, clip_limit=2.0, tile_shape=(8, 8)): enhancer = _cv2.createCLAHE(clipLimit=clip_limit, tileGridSize=tile_shape) if len(self.array.shape) == 2: self.array = enhancer.apply(self.array) elif len(self.array.shape) == 3: self.array[:, :, 0] = enhancer.apply(self.array[:, :, 0]) self.array[:, :, 1] = enhancer.apply(self.array[:, :, 1]) self.array[:, :, 2] = enhancer.apply(self.array[:, :, 2]) else: error('More than three channels') def __copy__(self): copy_obj = Image() copy_obj.file = _copy.copy(self.file) copy_obj.array = _copy.copy(self.array) copy_obj.mask = _copy.copy(self.mask) copy_obj.ground_truth = _copy.copy(self.ground_truth) copy_obj.extras = _copy.deepcopy(self.extras) copy_obj.dtype = _copy.deepcopy(self.dtype) return copy_obj @property def path(self): return _os.path.join(self.dir, self.file) def get_rgb_scores(arr_2d=None, truth=None): """ Returns a rgb image of pixelwise separation between ground truth and arr_2d (predicted image) with different color codes Easy when needed to inspect segmentation result against ground truth. :param arr_2d: :param truth: :return: """ arr_rgb = _np.zeros([arr_2d.shape[0], arr_2d.shape[1], 3], dtype=_np.uint8) x = arr_2d.copy() y = truth.copy() x[x == 255] = 1 y[y == 255] = 1 xy = x + (y * 2) arr_rgb[xy == 3] = [255, 255, 255] arr_rgb[xy == 1] = [0, 255, 0] arr_rgb[xy == 2] = [255, 0, 0] arr_rgb[xy == 0] = [0, 0, 0] return arr_rgb def get_praf1(arr_2d=None, truth=None): """ Returns precision, recall, f1 and accuracy score between two binary arrays upto five precision. :param arr_2d: :param truth: :return: """ x = arr_2d.copy() y = truth.copy() x[x == 255] = 1 y[y == 255] = 1 xy = x + (y * 2) tp = xy[xy == 3].shape[0] fp = xy[xy == 1].shape[0] tn = xy[xy == 0].shape[0] fn = xy[xy == 2].shape[0] try: p = tp / (tp + fp) except ZeroDivisionError: p = 0 try: r = tp / (tp + fn) except ZeroDivisionError: r = 0 try: a = (tp + tn) / (tp + fp + fn + tn) except ZeroDivisionError: a = 0 try: f1 = 2 * p * r / (p + r) except ZeroDivisionError: f1 = 0 return { 'Precision': round(p, 5), 'Recall': round(r, 5), 'Accuracy': round(a, 5), 'F1': round(f1, 5) } def rescale2d(arr): m = _np.max(arr) n = _np.min(arr) return (arr - n) / (m - n) def rescale3d(arrays): return list(rescale2d(arr) for arr in arrays) def get_signed_diff_int8(image_arr1=None, image_arr2=None): signed_diff = _np.array(image_arr1 - image_arr2, dtype=_np.int8) fx = _np.array(signed_diff - _np.min(signed_diff), _np.uint8) fx = rescale2d(fx) return _np.array(fx * 255, _np.uint8) def whiten_image2d(img_arr2d=None): img_arr2d = img_arr2d.copy() img_arr2d = (img_arr2d - img_arr2d.mean()) / img_arr2d.std() return _np.array(rescale2d(img_arr2d) * 255, dtype=_np.uint8) def get_chunk_indexes(img_shape=(0, 0), chunk_shape=(0, 0), offset_row_col=None): """ Returns a generator for four corners of each patch within image as specified. :param img_shape: Shape of the original image :param chunk_shape: Shape of desired patch :param offset_row_col: Offset for each patch on both x, y directions :return: """ img_rows, img_cols = img_shape chunk_row, chunk_col = chunk_shape offset_row, offset_col = offset_row_col row_end = False for i in range(0, img_rows, offset_row): if row_end: continue row_from, row_to = i, i + chunk_row if row_to > img_rows: row_to = img_rows row_from = img_rows - chunk_row row_end = True col_end = False for j in range(0, img_cols, offset_col): if col_end: continue col_from, col_to = j, j + chunk_col if col_to > img_cols: col_to = img_cols col_from = img_cols - chunk_col col_end = True yield [int(row_from), int(row_to), int(col_from), int(col_to)] def get_chunk_indices_by_index(img_shape=(0, 0), chunk_shape=(0, 0), indices=None): x, y = chunk_shape ix = [] for (c1, c2) in indices: w, h = img_shape p, q, r, s = c1 - x // 2, c1 + x // 2, c2 - y // 2, c2 + y // 2 if p < 0: p, q = 0, x if q > w: p, q = w - x, w if r < 0: r, s = 0, y if s > h: r, s = h - y, h ix.append([int(p), int(q), int(r), int(s)]) return ix def merge_patches(patches=None, image_size=(0, 0), patch_size=(0, 0), offset_row_col=None): """ Merge different pieces of image to form a full image. Overlapped regions are averaged. :param patches: List of all patches to merge in order (left to right). :param image_size: Full image size :param patch_size: A patch size(Patches must be uniform in size to be able to merge) :param offset_row_col: Offset used to chunk the patches. :return: """ padded_sum = _np.zeros([image_size[0], image_size[1]]) non_zero_count = _np.zeros_like(padded_sum) for i, chunk_ix in enumerate(get_chunk_indexes(image_size, patch_size, offset_row_col)): row_from, row_to, col_from, col_to = chunk_ix patch = _np.array(patches[i, :, :]).squeeze() padded = _np.pad(patch, [(row_from, image_size[0] - row_to), (col_from, image_size[1] - col_to)], 'constant') padded_sum = padded + padded_sum non_zero_count = non_zero_count + _np.array(padded > 0).astype(int) non_zero_count[non_zero_count == 0] = 1 return _np.array(padded_sum / non_zero_count, dtype=_np.uint8) def expand_and_mirror_patch(full_img_shape=None, orig_patch_indices=None, expand_by=None): """ Given a patch within an image, this function select a speciified region around it if present, else mirros it. It is useful in neuralnetworks like u-net which look for wide range of area than the actual input image. :param full_img_shape: Full image shape :param orig_patch_indices: Four cornets of the actual patch :param expand_by: Expand by (x, y ) in each dimension :return: """ i, j = int(expand_by[0] / 2), int(expand_by[1] / 2) p, q, r, s = orig_patch_indices a, b, c, d = p - i, q + i, r - j, s + j pad_a, pad_b, pad_c, pad_d = [0] * 4 if a < 0: pad_a = i - p a = 0 if b > full_img_shape[0]: pad_b = b - full_img_shape[0] b = full_img_shape[0] if c < 0: pad_c = j - r c = 0 if d > full_img_shape[1]: pad_d = d - full_img_shape[1] d = full_img_shape[1] return a, b, c, d, [(pad_a, pad_b), (pad_c, pad_d)] def largest_cc(binary_arr=None): from skimage.measure import label labels = label(binary_arr) if labels.max() != 0: # assume at least 1 CC largest = labels == _np.argmax(_np.bincount(labels.flat)[1:]) + 1 return largest def map_img_to_img2d(map_to, img): arr = map_to.copy() rgb = arr.copy() if len(arr.shape) == 2: rgb = _np.zeros((arr.shape[0], arr.shape[1], 3), dtype=_np.uint8) rgb[:, :, 0], rgb[:, :, 1], rgb[:, :, 2] = arr, arr, arr rgb[:, :, 0][img == 255] = 255 rgb[:, :, 1][img == 255] = 0 rgb[:, :, 2][img == 255] = 0 return rgb def remove_connected_comp(segmented_img, connected_comp_diam_limit=20): """ Remove connected components of a binary image that are less than smaller than specified diameter. :param segmented_img: Binary image. :param connected_comp_diam_limit: Diameter limit :return: """ from scipy.ndimage.measurements import label img = segmented_img.copy() structure = _np.ones((3, 3), dtype=_np.int) labeled, n_components = label(img, structure) for i in range(n_components): ixy = _np.array(list(zip(*_np.where(labeled == i)))) x1, y1 = ixy[0] x2, y2 = ixy[-1] dst = _math.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2) if dst < connected_comp_diam_limit: for u, v in ixy: img[u, v] = 0 return img def get_pix_neigh(i, j, eight=False): """ Get four/ eight neighbors of an image. :param i: x position of pixel :param j: y position of pixel :param eight: Eight neighbors? Else four :return: """ n1 = (i - 1, j - 1) n2 = (i - 1, j) n3 = (i - 1, j + 1) n4 = (i, j - 1) n5 = (i, j + 1) n6 = (i + 1, j - 1) n7 = (i + 1, j) n8 = (i + 1, j + 1) if eight: return [n1, n2, n3, n4, n5, n6, n7, n8] else: return [n2, n5, n7, n4]
[ "PIL.Image.open", "copy.deepcopy", "numpy.ones", "numpy.where", "scipy.ndimage.measurements.label", "os.path.join", "math.sqrt", "numpy.max", "cv2.createCLAHE", "numpy.array", "numpy.zeros", "numpy.pad", "numpy.bincount", "numpy.min", "copy.copy", "numpy.zeros_like" ]
[((3470, 3535), 'numpy.zeros', '_np.zeros', (['[arr_2d.shape[0], arr_2d.shape[1], 3]'], {'dtype': '_np.uint8'}), '([arr_2d.shape[0], arr_2d.shape[1], 3], dtype=_np.uint8)\n', (3479, 3535), True, 'import numpy as _np\n'), ((4755, 4767), 'numpy.max', '_np.max', (['arr'], {}), '(arr)\n', (4762, 4767), True, 'import numpy as _np\n'), ((4776, 4788), 'numpy.min', '_np.min', (['arr'], {}), '(arr)\n', (4783, 4788), True, 'import numpy as _np\n'), ((4975, 5025), 'numpy.array', '_np.array', (['(image_arr1 - image_arr2)'], {'dtype': '_np.int8'}), '(image_arr1 - image_arr2, dtype=_np.int8)\n', (4984, 5025), True, 'import numpy as _np\n'), ((5126, 5156), 'numpy.array', '_np.array', (['(fx * 255)', '_np.uint8'], {}), '(fx * 255, _np.uint8)\n', (5135, 5156), True, 'import numpy as _np\n'), ((7501, 7542), 'numpy.zeros', '_np.zeros', (['[image_size[0], image_size[1]]'], {}), '([image_size[0], image_size[1]])\n', (7510, 7542), True, 'import numpy as _np\n'), ((7564, 7590), 'numpy.zeros_like', '_np.zeros_like', (['padded_sum'], {}), '(padded_sum)\n', (7578, 7590), True, 'import numpy as _np\n'), ((8109, 8164), 'numpy.array', '_np.array', (['(padded_sum / non_zero_count)'], {'dtype': '_np.uint8'}), '(padded_sum / non_zero_count, dtype=_np.uint8)\n', (8118, 8164), True, 'import numpy as _np\n'), ((9292, 9309), 'scipy.ndimage.measurements.label', 'label', (['binary_arr'], {}), '(binary_arr)\n', (9297, 9309), False, 'from scipy.ndimage.measurements import label\n'), ((10220, 10251), 'numpy.ones', '_np.ones', (['(3, 3)'], {'dtype': '_np.int'}), '((3, 3), dtype=_np.int)\n', (10228, 10251), True, 'import numpy as _np\n'), ((10280, 10301), 'scipy.ndimage.measurements.label', 'label', (['img', 'structure'], {}), '(img, structure)\n', (10285, 10301), False, 'from scipy.ndimage.measurements import label\n'), ((2205, 2268), 'cv2.createCLAHE', '_cv2.createCLAHE', ([], {'clipLimit': 'clip_limit', 'tileGridSize': 'tile_shape'}), '(clipLimit=clip_limit, tileGridSize=tile_shape)\n', (2221, 2268), True, 'import cv2 as _cv2\n'), ((2747, 2768), 'copy.copy', '_copy.copy', (['self.file'], {}), '(self.file)\n', (2757, 2768), True, 'import copy as _copy\n'), ((2794, 2816), 'copy.copy', '_copy.copy', (['self.array'], {}), '(self.array)\n', (2804, 2816), True, 'import copy as _copy\n'), ((2841, 2862), 'copy.copy', '_copy.copy', (['self.mask'], {}), '(self.mask)\n', (2851, 2862), True, 'import copy as _copy\n'), ((2895, 2924), 'copy.copy', '_copy.copy', (['self.ground_truth'], {}), '(self.ground_truth)\n', (2905, 2924), True, 'import copy as _copy\n'), ((2951, 2978), 'copy.deepcopy', '_copy.deepcopy', (['self.extras'], {}), '(self.extras)\n', (2965, 2978), True, 'import copy as _copy\n'), ((3004, 3030), 'copy.deepcopy', '_copy.deepcopy', (['self.dtype'], {}), '(self.dtype)\n', (3018, 3030), True, 'import copy as _copy\n'), ((3105, 3139), 'os.path.join', '_os.path.join', (['self.dir', 'self.file'], {}), '(self.dir, self.file)\n', (3118, 3139), True, 'import os as _os\n'), ((7811, 7916), 'numpy.pad', '_np.pad', (['patch', '[(row_from, image_size[0] - row_to), (col_from, image_size[1] - col_to)]', '"""constant"""'], {}), "(patch, [(row_from, image_size[0] - row_to), (col_from, image_size[1\n ] - col_to)], 'constant')\n", (7818, 7916), True, 'import numpy as _np\n'), ((9582, 9641), 'numpy.zeros', '_np.zeros', (['(arr.shape[0], arr.shape[1], 3)'], {'dtype': '_np.uint8'}), '((arr.shape[0], arr.shape[1], 3), dtype=_np.uint8)\n', (9591, 9641), True, 'import numpy as _np\n'), ((10460, 10503), 'math.sqrt', '_math.sqrt', (['((x2 - x1) ** 2 + (y2 - y1) ** 2)'], {}), '((x2 - x1) ** 2 + (y2 - y1) ** 2)\n', (10470, 10503), True, 'import math as _math\n'), ((5059, 5079), 'numpy.min', '_np.min', (['signed_diff'], {}), '(signed_diff)\n', (5066, 5079), True, 'import numpy as _np\n'), ((820, 840), 'PIL.Image.open', '_IMG.open', (['self.path'], {}), '(self.path)\n', (829, 840), True, 'from PIL import Image as _IMG\n'), ((7755, 7782), 'numpy.array', '_np.array', (['patches[i, :, :]'], {}), '(patches[i, :, :])\n', (7764, 7782), True, 'import numpy as _np\n'), ((1190, 1224), 'os.path.join', '_os.path.join', (['mask_dir', 'mask_file'], {}), '(mask_dir, mask_file)\n', (1203, 1224), True, 'import os as _os\n'), ((1600, 1630), 'os.path.join', '_os.path.join', (['gt_dir', 'gt_file'], {}), '(gt_dir, gt_file)\n', (1613, 1630), True, 'import os as _os\n'), ((8020, 8041), 'numpy.array', '_np.array', (['(padded > 0)'], {}), '(padded > 0)\n', (8029, 8041), True, 'import numpy as _np\n'), ((9399, 9424), 'numpy.bincount', '_np.bincount', (['labels.flat'], {}), '(labels.flat)\n', (9411, 9424), True, 'import numpy as _np\n'), ((10370, 10393), 'numpy.where', '_np.where', (['(labeled == i)'], {}), '(labeled == i)\n', (10379, 10393), True, 'import numpy as _np\n')]
from django.test import TestCase from django.test import TransactionTestCase from main import tasks from celery.contrib.testing.worker import start_worker from main.models import User from src.celery import app class UserModelTests(TestCase): @classmethod def setUpTestData(cls): cls.user = User.objects.create_user('<EMAIL>', '<PASSWORD>') def test_user_str(self): email = str(self.user) self.assertEqual(email, '<EMAIL>')
[ "main.models.User.objects.create_user" ]
[((309, 358), 'main.models.User.objects.create_user', 'User.objects.create_user', (['"""<EMAIL>"""', '"""<PASSWORD>"""'], {}), "('<EMAIL>', '<PASSWORD>')\n", (333, 358), False, 'from main.models import User\n')]
from pexpect import pxssh s = pxssh.pxssh() s.force_password=True if not s.login ('192.168.127.12', 'root', '<PASSWORD>!'): print ("SSH session failed on login.") else: print ("SSH session login successful") s.sendline ('cp /root/some.txt /root/some2.txt') s.prompt() # match the prompt print(s.before.decode('utf-8')) # print everything before the prompt. s.logout()
[ "pexpect.pxssh.pxssh" ]
[((30, 43), 'pexpect.pxssh.pxssh', 'pxssh.pxssh', ([], {}), '()\n', (41, 43), False, 'from pexpect import pxssh\n')]
from zeus.config import db from zeus.db.mixins import ApiTokenMixin, RepositoryMixin, StandardAttributes from zeus.db.utils import model_repr class RepositoryApiToken(StandardAttributes, RepositoryMixin, ApiTokenMixin, db.Model): """ An API token associated to a repository. """ __tablename__ = "repository_api_token" __repr__ = model_repr("repository_id", "key") def get_token_key(self): return "r"
[ "zeus.db.utils.model_repr" ]
[((352, 386), 'zeus.db.utils.model_repr', 'model_repr', (['"""repository_id"""', '"""key"""'], {}), "('repository_id', 'key')\n", (362, 386), False, 'from zeus.db.utils import model_repr\n')]
import numpy as np from PySide import QtGui, QtCore import sharppy.sharptab as tab from sharppy.sharptab.constants import * ## Written by <NAME> - OU School of Meteorology ## and <NAME> - CIMMS __all__ = ['backgroundWatch', 'plotWatch'] class backgroundWatch(QtGui.QFrame): ''' Draw the background frame and lines for the watch plot frame ''' def __init__(self): super(backgroundWatch, self).__init__() self.initUI() def initUI(self): ## window configuration settings, ## sich as padding, width, height, and ## min/max plot axes self.lpad = 0; self.rpad = 0 self.tpad = 0; self.bpad = 20 self.wid = self.size().width() - self.rpad self.hgt = self.size().height() - self.bpad self.tlx = self.rpad; self.tly = self.tpad self.brx = self.wid; self.bry = self.hgt if self.physicalDpiX() > 75: fsize = 10 else: fsize = 12 self.title_font = QtGui.QFont('Helvetica', fsize) self.plot_font = QtGui.QFont('Helvetica', fsize) self.title_metrics = QtGui.QFontMetrics( self.title_font ) self.plot_metrics = QtGui.QFontMetrics( self.plot_font ) self.title_height = self.title_metrics.height() self.plot_height = self.plot_metrics.height() self.plotBitMap = QtGui.QPixmap(self.width(), self.height()) self.plotBitMap.fill(QtCore.Qt.black) self.plotBackground() def resizeEvent(self, e): ''' Handles the event the window is resized ''' self.initUI() def draw_frame(self, qp): ''' Draw the background frame. qp: QtGui.QPainter object ''' ## set a new pen to draw with pen = QtGui.QPen(QtCore.Qt.white, 2, QtCore.Qt.SolidLine) qp.setPen(pen) qp.setFont(self.title_font) ## draw the borders in white qp.drawLine(self.tlx, self.tly, self.brx, self.tly) qp.drawLine(self.brx, self.tly, self.brx, self.bry) qp.drawLine(self.brx, self.bry, self.tlx, self.bry) qp.drawLine(self.tlx, self.bry, self.tlx, self.tly) y1 = self.bry / 13. pad = self.bry / 100. rect0 = QtCore.QRect(0, pad*4, self.brx, self.title_height) qp.drawText(rect0, QtCore.Qt.TextDontClip | QtCore.Qt.AlignCenter, 'Psbl Haz. Type') pen = QtGui.QPen(QtCore.Qt.white, 1, QtCore.Qt.SolidLine) qp.setPen(pen) qp.drawLine(0, pad*4 + (self.title_height + 3), self.brx, pad*4 + (self.title_height + 3)) def plotBackground(self): qp = QtGui.QPainter() qp.begin(self.plotBitMap) #qp.setRenderHint(qp.Antialiasing) #qp.setRenderHint(qp.TextAntialiasing) ## draw the frame self.draw_frame(qp) qp.end() class plotWatch(backgroundWatch): ''' Plot the data on the frame. Inherits the background class that plots the frame. ''' def __init__(self): super(plotWatch, self).__init__() self.prof = None def setProf(self, prof): self.prof = prof self.watch_type = self.prof.watch_type self.watch_type_color = self.prof.watch_type_color self.clearData() self.plotBackground() self.plotData() self.update() def resizeEvent(self, e): ''' Handles when the window is resized ''' super(plotWatch, self).resizeEvent(e) self.plotData() def paintEvent(self, e): ''' Handles painting on the frame ''' ## this function handles painting the plot super(plotWatch, self).paintEvent(e) ## create a new painter obkect qp = QtGui.QPainter() qp.begin(self) ## end the painter qp.drawPixmap(0,0,self.plotBitMap) qp.end() def clearData(self): ''' Handles the clearing of the pixmap in the frame. ''' self.plotBitMap = QtGui.QPixmap(self.width(), self.height()) self.plotBitMap.fill(QtCore.Qt.black) def plotData(self): if self.prof is None: return qp = QtGui.QPainter() qp.begin(self.plotBitMap) qp.setRenderHint(qp.Antialiasing) qp.setRenderHint(qp.TextAntialiasing) pen = QtGui.QPen(QtGui.QColor(self.watch_type_color), 1, QtCore.Qt.SolidLine) qp.setPen(pen) qp.setFont(self.plot_font) centery = self.bry / 2. rect0 = QtCore.QRect(0, centery, self.brx, self.title_height) qp.drawText(rect0, QtCore.Qt.TextDontClip | QtCore.Qt.AlignCenter, self.watch_type) qp.end()
[ "PySide.QtGui.QFont", "PySide.QtGui.QFontMetrics", "PySide.QtGui.QColor", "PySide.QtGui.QPen", "PySide.QtGui.QPainter", "PySide.QtCore.QRect" ]
[((996, 1027), 'PySide.QtGui.QFont', 'QtGui.QFont', (['"""Helvetica"""', 'fsize'], {}), "('Helvetica', fsize)\n", (1007, 1027), False, 'from PySide import QtGui, QtCore\n'), ((1053, 1084), 'PySide.QtGui.QFont', 'QtGui.QFont', (['"""Helvetica"""', 'fsize'], {}), "('Helvetica', fsize)\n", (1064, 1084), False, 'from PySide import QtGui, QtCore\n'), ((1114, 1149), 'PySide.QtGui.QFontMetrics', 'QtGui.QFontMetrics', (['self.title_font'], {}), '(self.title_font)\n', (1132, 1149), False, 'from PySide import QtGui, QtCore\n'), ((1180, 1214), 'PySide.QtGui.QFontMetrics', 'QtGui.QFontMetrics', (['self.plot_font'], {}), '(self.plot_font)\n', (1198, 1214), False, 'from PySide import QtGui, QtCore\n'), ((1775, 1826), 'PySide.QtGui.QPen', 'QtGui.QPen', (['QtCore.Qt.white', '(2)', 'QtCore.Qt.SolidLine'], {}), '(QtCore.Qt.white, 2, QtCore.Qt.SolidLine)\n', (1785, 1826), False, 'from PySide import QtGui, QtCore\n'), ((2247, 2300), 'PySide.QtCore.QRect', 'QtCore.QRect', (['(0)', '(pad * 4)', 'self.brx', 'self.title_height'], {}), '(0, pad * 4, self.brx, self.title_height)\n', (2259, 2300), False, 'from PySide import QtGui, QtCore\n'), ((2406, 2457), 'PySide.QtGui.QPen', 'QtGui.QPen', (['QtCore.Qt.white', '(1)', 'QtCore.Qt.SolidLine'], {}), '(QtCore.Qt.white, 1, QtCore.Qt.SolidLine)\n', (2416, 2457), False, 'from PySide import QtGui, QtCore\n'), ((2628, 2644), 'PySide.QtGui.QPainter', 'QtGui.QPainter', ([], {}), '()\n', (2642, 2644), False, 'from PySide import QtGui, QtCore\n'), ((3746, 3762), 'PySide.QtGui.QPainter', 'QtGui.QPainter', ([], {}), '()\n', (3760, 3762), False, 'from PySide import QtGui, QtCore\n'), ((4191, 4207), 'PySide.QtGui.QPainter', 'QtGui.QPainter', ([], {}), '()\n', (4205, 4207), False, 'from PySide import QtGui, QtCore\n'), ((4530, 4583), 'PySide.QtCore.QRect', 'QtCore.QRect', (['(0)', 'centery', 'self.brx', 'self.title_height'], {}), '(0, centery, self.brx, self.title_height)\n', (4542, 4583), False, 'from PySide import QtGui, QtCore\n'), ((4355, 4390), 'PySide.QtGui.QColor', 'QtGui.QColor', (['self.watch_type_color'], {}), '(self.watch_type_color)\n', (4367, 4390), False, 'from PySide import QtGui, QtCore\n')]
#!/usr/bin/python # Copyright (c) 2011 GeometryFactory Sarl (France) # # $URL$ # $Id$ # SPDX-License-Identifier: LGPL-3.0-or-later OR LicenseRef-Commercial # # Author(s) : <NAME> import sys import os import gdb sys.path.insert(0, os.getcwd() + '/python') import CGAL.printers
[ "os.getcwd" ]
[((239, 250), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (248, 250), False, 'import os\n')]
import os import dill from copy import deepcopy from itertools import tee from abc import ABCMeta, abstractmethod from malemba import ModelBase from malemba.ds_tools import ArrayHandler, group_array class ModelDualStage(ModelBase): class Concatenator(object, metaclass=ABCMeta): @abstractmethod def concatenate(self, X, Y): return def __init__(self, model1, model2, concatenator, params1=None, params2=None, **kwargs): """ :param model1: Boosting model object inherited from malemba.ArrayModelBase that is the first to be applied :param model2: Boosting model object inherited from malemba.ArrayModelBase that is the to be applied over model1 :param concatenator: the object inherited from Concatenator class to be used to concatinate X and model1 prediction for model2 input :param params1: parameters dict for model1 :param params2: parameters dict for model2 :param kwargs: """ self.model1 = model1 self.model2 = model2 assert isinstance(self.model1, ModelBase), \ "Error: a value for model1 argument must be an object inherited from malemba.ModelBase" assert isinstance(self.model2, ModelBase), \ "Error: a value for model2 argument must be an object inherited from malemba.ModelBase" self._model1_class = self.model1.__class__ self._model2_class = self.model2.__class__ self.concatenator = concatenator assert isinstance(concatenator, ModelDualStage.Concatenator), \ "Error: a value for 'concatenator' argument must be an object inherited from ModelDualStage.Concatenator" self.params1 = params1 self.params2 = params2 super(ModelDualStage, self).__init__(params=None, **kwargs) self.aggr_level1 = kwargs.get("aggr_level1", 0) self.aggr_level2 = kwargs.get("aggr_level2", 0) self._data_l = 0 self._model1_fract = 0.0 def fit(self, X, Y, model1_fract=0.0, aggr_level1=None, aggr_level2=None, dump_scheme_path=None, **kwargs): """ :param X: features data :param Y: labels data :param model1_fract: the fraction of data is to be used for model1 training :param aggr_level1: number of aggregation steps to turn the input data into appropriate for model1 input :param aggr_level2: number of aggregation steps to turn the input data into appropriate for model2 input :param dump_scheme_path: The path for models dump :param kwargs: :return: """ super(ModelDualStage, self).fit(X=X, Y=Y, **kwargs) if aggr_level1 is not None: self.aggr_level1 = aggr_level1 if aggr_level2 is not None: self.aggr_level2 = aggr_level2 if model1_fract > 0.0: self._model1_fract = model1_fract self._data_l = kwargs.get("data_l", 0) if self._data_l <= 0: Y, Y_l = tee(Y) for y in Y_l: self._data_l += 1 X_1 = ModelDualStage._data_part(X, int(float(self._data_l) * self._model1_fract)) Y_1 = ModelDualStage._data_part(Y, int(float(self._data_l) * self._model1_fract)) if self.aggr_level1 > 0: data1_handler = ArrayHandler() Y_1 = data1_handler.aggregate(Y_1, aggr_level=self.aggr_level1) data1_handler.group_lims = None X_1 = data1_handler.aggregate(X_1, aggr_level=self.aggr_level1) self.model1.fit(X=X_1, Y=Y_1) if dump_scheme_path is not None: try: self.model1.dump(scheme_path=os.path.join(dump_scheme_path, "model1")) except: print("WARNING: model1 dump failed") if self._model1_fract >= 1.0: print("WARNING: model1 fraction >= 1.0 - model2 is model1") self.model2 = self.model1 else: X = self._get_model2_data(X=X, aggr_level=self.aggr_level1) if self.aggr_level2 > 0: data2_handler= ArrayHandler() Y = data2_handler.aggregate(Y, aggr_level=self.aggr_level2) data2_handler.group_lims = None X = data2_handler.aggregate(X, aggr_level=self.aggr_level2) self.model2.fit(X=X, Y=Y) if dump_scheme_path is not None: try: self.model2.dump(scheme_path=os.path.join(dump_scheme_path, "model2")) except: print("WARNING: model2 dump failed") self.dump(scheme_path=dump_scheme_path, only_meta=True) @staticmethod def _data_part(data, part_l): l = 0 for d in data: if l >= part_l: break l += 1 yield d def predict(self, X, aggr_level1=None, aggr_level2=None, **kwargs): """ :param X: features data :param concat_method: the function to be used to concatinate X and model1 prediction for model2 input :param aggr_level1: number of aggregation steps to turn the input data into one dimensional array :param kwargs: :return: """ if aggr_level1 is not None: self.aggr_level1 = aggr_level1 if aggr_level2 is not None: self.aggr_level2 = aggr_level2 X = self._get_model2_data(X=X, aggr_level=self.aggr_level1) if self.aggr_level2 > 0: X_handler = ArrayHandler() X = X_handler.aggregate(X, aggr_level=self.aggr_level2) return group_array(self.model2.predict(X=X), group_lims=X_handler.group_lims) else: return self.model2.predict(X=X) def _get_model2_data(self, X, aggr_level=0): X, X_1 = tee(X) if aggr_level > 0: X_1_handler = ArrayHandler() X_1 = X_1_handler.aggregate(X_1, aggr_level=aggr_level) model1_pred = group_array(self.model1.predict(X=X_1), group_lims=X_1_handler.group_lims) else: model1_pred = self.model1.predict(X=X_1) return self.concatenator.concatenate(X, model1_pred) @staticmethod def _convert_str_to_factors(): return False def str_to_factors(self, X): pass def validate(self, X_test, Y_test, labels_to_remove=None, aggr_level1=None, aggr_level2=None): if aggr_level1 is not None: self.aggr_level1 = aggr_level1 if aggr_level2 is not None: self.aggr_level2 = aggr_level2 X_test = self._get_model2_data(X=X_test, aggr_level=self.aggr_level1) if self.aggr_level1 > 0: data_handler = ArrayHandler() Y_test = data_handler.aggregate(Y_test, aggr_level=self.aggr_level2) data_handler.group_lims = None X_test = data_handler.aggregate(X_test, aggr_level=self.aggr_level2) return self.model2.validate(X_test=X_test, Y_test=Y_test, labels_to_remove=labels_to_remove) def validate_model1(self, X_test, Y_test, labels_to_remove=None, aggr_level=None): if aggr_level is not None: self.aggr_level1 = aggr_level if self.aggr_level1 > 0: data_handler = ArrayHandler() Y_test = data_handler.aggregate(Y_test, aggr_level=self.aggr_level1) data_handler.group_lims = None X_test = data_handler.aggregate(X_test, aggr_level=self.aggr_level1) return self.model1.validate(X_test=X_test, Y_test=Y_test, labels_to_remove=labels_to_remove) def validate_model2(self, X_test, Y_test, labels_to_remove=None, aggr_level1=None, aggr_level2=None): return self.validate(X_test=X_test, Y_test=Y_test, labels_to_remove=labels_to_remove, aggr_level1=aggr_level1, aggr_level2=aggr_level2) def dump(self, scheme_path, **kwargs): if not os.path.exists(scheme_path): os.makedirs(scheme_path) model1 = self.__dict__.pop("model1") model2 = self.__dict__.pop("model2") meta_f = open(os.path.join(scheme_path, "meta.m"), "wb") dill.dump(self.__dict__, meta_f) meta_f.close() self.model1 = model1 self.model2 = model2 if kwargs.get("only_meta", False): return self.model1.dump(scheme_path=os.path.join(scheme_path, "model1"), **kwargs) self.model2.dump(scheme_path=os.path.join(scheme_path, "model2"), **kwargs) @classmethod def load(cls, scheme_path, params1=None, params2=None, **kwargs): with open(os.path.join(scheme_path, "meta.m"), "rb") as meta_f: meta_dict = dill.load(meta_f) if params1 is not None: if meta_dict["params1"] is not None: meta_dict["params1"].update(params1) else: meta_dict["params1"] = params1 if params2 is not None: if meta_dict["params2"] is not None: meta_dict["params2"].update(params2) else: meta_dict["params2"] = params2 model1 = meta_dict["_model1_class"].load(scheme_path=os.path.join(scheme_path, "model1"), params=meta_dict["params1"], **kwargs) model2 = meta_dict["_model2_class"].load(scheme_path=os.path.join(scheme_path, "model2"), params=meta_dict["params2"], **kwargs) model_dual_stage = cls(model1=model1, model2=model2, concatenator=meta_dict["concatenator"], params1=params1, params2=params2) model_dual_stage.__dict__.update(meta_dict) return model_dual_stage @property def num_threads(self): return self.model2.num_threads def get_features(self, X): pass @property def features(self): return self.model1_features @property def model1_features(self): return self.model1.features @property def model2_features(self): return self.model2.features @property def feature_types(self): return self.model1_feature_types @property def model1_feature_types(self): return self.model1.feature_types @property def model2_feature_types(self): return self.model2.feature_types @property def labels(self): return self.model1.labels @property def label_freqs(self): if not self._label_freqs: for label in self.model1.label_freqs: self._label_freqs[label] = self.model1.label_freqs[label] * self._model1_fract for label in self.model2.label_freqs: self._label_freqs[label] += self.model2.label_freqs[label] * (1.0-self._model1_fract) return self._label_freqs
[ "os.path.exists", "os.makedirs", "os.path.join", "malemba.ds_tools.ArrayHandler", "itertools.tee", "dill.dump", "dill.load" ]
[((5840, 5846), 'itertools.tee', 'tee', (['X'], {}), '(X)\n', (5843, 5846), False, 'from itertools import tee\n'), ((8255, 8287), 'dill.dump', 'dill.dump', (['self.__dict__', 'meta_f'], {}), '(self.__dict__, meta_f)\n', (8264, 8287), False, 'import dill\n'), ((5542, 5556), 'malemba.ds_tools.ArrayHandler', 'ArrayHandler', ([], {}), '()\n', (5554, 5556), False, 'from malemba.ds_tools import ArrayHandler, group_array\n'), ((5900, 5914), 'malemba.ds_tools.ArrayHandler', 'ArrayHandler', ([], {}), '()\n', (5912, 5914), False, 'from malemba.ds_tools import ArrayHandler, group_array\n'), ((6732, 6746), 'malemba.ds_tools.ArrayHandler', 'ArrayHandler', ([], {}), '()\n', (6744, 6746), False, 'from malemba.ds_tools import ArrayHandler, group_array\n'), ((7279, 7293), 'malemba.ds_tools.ArrayHandler', 'ArrayHandler', ([], {}), '()\n', (7291, 7293), False, 'from malemba.ds_tools import ArrayHandler, group_array\n'), ((8026, 8053), 'os.path.exists', 'os.path.exists', (['scheme_path'], {}), '(scheme_path)\n', (8040, 8053), False, 'import os\n'), ((8067, 8091), 'os.makedirs', 'os.makedirs', (['scheme_path'], {}), '(scheme_path)\n', (8078, 8091), False, 'import os\n'), ((8204, 8239), 'os.path.join', 'os.path.join', (['scheme_path', '"""meta.m"""'], {}), "(scheme_path, 'meta.m')\n", (8216, 8239), False, 'import os\n'), ((8784, 8801), 'dill.load', 'dill.load', (['meta_f'], {}), '(meta_f)\n', (8793, 8801), False, 'import dill\n'), ((3014, 3020), 'itertools.tee', 'tee', (['Y'], {}), '(Y)\n', (3017, 3020), False, 'from itertools import tee\n'), ((3347, 3361), 'malemba.ds_tools.ArrayHandler', 'ArrayHandler', ([], {}), '()\n', (3359, 3361), False, 'from malemba.ds_tools import ArrayHandler, group_array\n'), ((4153, 4167), 'malemba.ds_tools.ArrayHandler', 'ArrayHandler', ([], {}), '()\n', (4165, 4167), False, 'from malemba.ds_tools import ArrayHandler, group_array\n'), ((8469, 8504), 'os.path.join', 'os.path.join', (['scheme_path', '"""model1"""'], {}), "(scheme_path, 'model1')\n", (8481, 8504), False, 'import os\n'), ((8553, 8588), 'os.path.join', 'os.path.join', (['scheme_path', '"""model2"""'], {}), "(scheme_path, 'model2')\n", (8565, 8588), False, 'import os\n'), ((8706, 8741), 'os.path.join', 'os.path.join', (['scheme_path', '"""meta.m"""'], {}), "(scheme_path, 'meta.m')\n", (8718, 8741), False, 'import os\n'), ((9261, 9296), 'os.path.join', 'os.path.join', (['scheme_path', '"""model1"""'], {}), "(scheme_path, 'model1')\n", (9273, 9296), False, 'import os\n'), ((9496, 9531), 'os.path.join', 'os.path.join', (['scheme_path', '"""model2"""'], {}), "(scheme_path, 'model2')\n", (9508, 9531), False, 'import os\n'), ((4510, 4550), 'os.path.join', 'os.path.join', (['dump_scheme_path', '"""model2"""'], {}), "(dump_scheme_path, 'model2')\n", (4522, 4550), False, 'import os\n'), ((3727, 3767), 'os.path.join', 'os.path.join', (['dump_scheme_path', '"""model1"""'], {}), "(dump_scheme_path, 'model1')\n", (3739, 3767), False, 'import os\n')]
import xml.etree.ElementTree as ET from typing import NewType MethodCall = NewType("MethodCall", ET.Element) ConfigurableMethodTerminal = NewType("ConfigurableMethodTerminal", ET.Element) MethodTerminal = NewType("MethodTerminal", ET.Element) WhileLoop = NewType("WhileLoop", ET.Element) Terminal = NewType("Terminal", ET.Element) PairedConfigurableMethodCall = NewType("PairedConfigurableMethodCall",ET.Element) SequenceNode = NewType("PairedConfigurableMethodCall",ET.Element)
[ "typing.NewType" ]
[((76, 109), 'typing.NewType', 'NewType', (['"""MethodCall"""', 'ET.Element'], {}), "('MethodCall', ET.Element)\n", (83, 109), False, 'from typing import NewType\n'), ((139, 188), 'typing.NewType', 'NewType', (['"""ConfigurableMethodTerminal"""', 'ET.Element'], {}), "('ConfigurableMethodTerminal', ET.Element)\n", (146, 188), False, 'from typing import NewType\n'), ((206, 243), 'typing.NewType', 'NewType', (['"""MethodTerminal"""', 'ET.Element'], {}), "('MethodTerminal', ET.Element)\n", (213, 243), False, 'from typing import NewType\n'), ((256, 288), 'typing.NewType', 'NewType', (['"""WhileLoop"""', 'ET.Element'], {}), "('WhileLoop', ET.Element)\n", (263, 288), False, 'from typing import NewType\n'), ((300, 331), 'typing.NewType', 'NewType', (['"""Terminal"""', 'ET.Element'], {}), "('Terminal', ET.Element)\n", (307, 331), False, 'from typing import NewType\n'), ((363, 414), 'typing.NewType', 'NewType', (['"""PairedConfigurableMethodCall"""', 'ET.Element'], {}), "('PairedConfigurableMethodCall', ET.Element)\n", (370, 414), False, 'from typing import NewType\n'), ((429, 480), 'typing.NewType', 'NewType', (['"""PairedConfigurableMethodCall"""', 'ET.Element'], {}), "('PairedConfigurableMethodCall', ET.Element)\n", (436, 480), False, 'from typing import NewType\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from pylookyloo import Lookyloo import json # lookyloo_url = "https://lookyloo.circl.lu/" lookyloo_url = "http://0.0.0.0:5100" lookyloo = Lookyloo(lookyloo_url) lookyloo.init_apikey(username='admin', password='<PASSWORD>') event = lookyloo.misp_push('6ae2afdc-4d90-41ce-9cae-510daf1e6577') print(json.dumps(event, indent=2))
[ "json.dumps", "pylookyloo.Lookyloo" ]
[((188, 210), 'pylookyloo.Lookyloo', 'Lookyloo', (['lookyloo_url'], {}), '(lookyloo_url)\n', (196, 210), False, 'from pylookyloo import Lookyloo\n'), ((347, 374), 'json.dumps', 'json.dumps', (['event'], {'indent': '(2)'}), '(event, indent=2)\n', (357, 374), False, 'import json\n')]
# Copyright (C) 2015-2019 <NAME> # SPDX-License-Identifier: Apache-2.0 # Check for presence of FEniCS etc from .verify_environment import verify_env verify_env() __version__ = '2019.0.2' # This should potentially be made local to the mesh creation routines import dolfin dolfin.parameters['ghost_mode'] = 'shared_vertex' del dolfin def get_version(): """ Return the version number of Ocellaris """ return __version__ def get_detailed_version(): """ Return the version number of Ocellaris including source control commit revision information """ import os import subprocess this_dir = os.path.dirname(os.path.abspath(__file__)) proj_dir = os.path.abspath(os.path.join(this_dir, '..')) if os.path.isdir(os.path.join(proj_dir, '.git')): cmd = ['git', 'describe', '--always'] version = subprocess.check_output(cmd, cwd=proj_dir) local_version = '+git.' + version.decode('utf8').strip() else: local_version = '' return get_version() + local_version # Convenience imports for scripting from .simulation import Simulation from .run import setup_simulation, run_simulation
[ "os.path.abspath", "subprocess.check_output", "os.path.join" ]
[((656, 681), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (671, 681), False, 'import os\n'), ((714, 742), 'os.path.join', 'os.path.join', (['this_dir', '""".."""'], {}), "(this_dir, '..')\n", (726, 742), False, 'import os\n'), ((765, 795), 'os.path.join', 'os.path.join', (['proj_dir', '""".git"""'], {}), "(proj_dir, '.git')\n", (777, 795), False, 'import os\n'), ((862, 904), 'subprocess.check_output', 'subprocess.check_output', (['cmd'], {'cwd': 'proj_dir'}), '(cmd, cwd=proj_dir)\n', (885, 904), False, 'import subprocess\n')]
''' Created on July 7, 2019 @author: Terry @email:<EMAIL> ''' print(__doc__) import numpy as np import matplotlib.pyplot as plt from sklearn.datasets import load_iris from sklearn.tree import DecisionTreeClassifier, plot_tree # Parameter n_classes = 3 plot_colors = "ryb" plot_step = 0.02 # Load data iris = load_iris() for pairidx, pair in enumerate([[0,1],[0,2],[0,3], [1,2],[1,3],[2,3]]): # We only take two corresponding features X = iris.data[:,pair] y = iris.target # Train clf = DecisionTreeClassifier().fit(X,y) # Plot the descision boundary plt.subplot(2,3,pairidx + 1) x_min, x_max = X[:,0].min() - 1, X[:,0].max() - 1 y_min, y_max = X[:,1].min() - 1, X[:,1].max() - 1 xx, yy = np.meshgrid(np.arange(x_min,x_max,plot_step), np.arange(y_min,y_max,plot_step)) plt.tight_layout(h_pad=0.5,w_pad=0.5,pad=2.5) Z = clf.predict(np.c_[xx.ravel(),yy.ravel()]) Z = Z.reshape(xx.shape) cs = plt.contourf(xx, yy, Z, cmap=plt.cm.RdYlBu) plt.xlabel(iris.feature_names[pair[0]]) plt.ylabel(iris.feature_names[pair[1]]) # Plot the training points for i, color in zip(range(n_classes),plot_colors): idx = np.where(y == i) plt.scatter(X[idx,0],X[idx,1],c=color,label=iris.target_names[i], cmap=plt.cm.RdYlBu,edgecolors='black',s=15) plt.suptitle("Decision surface of a decision tree using paired features") plt.legend(loc='lower right', borderpad=0, handletextpad=0) plt.axis("tight") plt.figure() clf = DecisionTreeClassifier().fit(iris.data, iris.target) plot_tree(clf, filled=True) plt.show()
[ "sklearn.datasets.load_iris", "matplotlib.pyplot.contourf", "matplotlib.pyplot.ylabel", "numpy.arange", "numpy.where", "matplotlib.pyplot.xlabel", "sklearn.tree.DecisionTreeClassifier", "matplotlib.pyplot.figure", "sklearn.tree.plot_tree", "matplotlib.pyplot.tight_layout", "matplotlib.pyplot.sca...
[((315, 326), 'sklearn.datasets.load_iris', 'load_iris', ([], {}), '()\n', (324, 326), False, 'from sklearn.datasets import load_iris\n'), ((1404, 1477), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['"""Decision surface of a decision tree using paired features"""'], {}), "('Decision surface of a decision tree using paired features')\n", (1416, 1477), True, 'import matplotlib.pyplot as plt\n'), ((1478, 1537), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""lower right"""', 'borderpad': '(0)', 'handletextpad': '(0)'}), "(loc='lower right', borderpad=0, handletextpad=0)\n", (1488, 1537), True, 'import matplotlib.pyplot as plt\n'), ((1538, 1555), 'matplotlib.pyplot.axis', 'plt.axis', (['"""tight"""'], {}), "('tight')\n", (1546, 1555), True, 'import matplotlib.pyplot as plt\n'), ((1557, 1569), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1567, 1569), True, 'import matplotlib.pyplot as plt\n'), ((1629, 1656), 'sklearn.tree.plot_tree', 'plot_tree', (['clf'], {'filled': '(True)'}), '(clf, filled=True)\n', (1638, 1656), False, 'from sklearn.tree import DecisionTreeClassifier, plot_tree\n'), ((1657, 1667), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1665, 1667), True, 'import matplotlib.pyplot as plt\n'), ((620, 650), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(3)', '(pairidx + 1)'], {}), '(2, 3, pairidx + 1)\n', (631, 650), True, 'import matplotlib.pyplot as plt\n'), ((878, 925), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {'h_pad': '(0.5)', 'w_pad': '(0.5)', 'pad': '(2.5)'}), '(h_pad=0.5, w_pad=0.5, pad=2.5)\n', (894, 925), True, 'import matplotlib.pyplot as plt\n'), ((1012, 1055), 'matplotlib.pyplot.contourf', 'plt.contourf', (['xx', 'yy', 'Z'], {'cmap': 'plt.cm.RdYlBu'}), '(xx, yy, Z, cmap=plt.cm.RdYlBu)\n', (1024, 1055), True, 'import matplotlib.pyplot as plt\n'), ((1060, 1099), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['iris.feature_names[pair[0]]'], {}), '(iris.feature_names[pair[0]])\n', (1070, 1099), True, 'import matplotlib.pyplot as plt\n'), ((1104, 1143), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['iris.feature_names[pair[1]]'], {}), '(iris.feature_names[pair[1]])\n', (1114, 1143), True, 'import matplotlib.pyplot as plt\n'), ((782, 816), 'numpy.arange', 'np.arange', (['x_min', 'x_max', 'plot_step'], {}), '(x_min, x_max, plot_step)\n', (791, 816), True, 'import numpy as np\n'), ((840, 874), 'numpy.arange', 'np.arange', (['y_min', 'y_max', 'plot_step'], {}), '(y_min, y_max, plot_step)\n', (849, 874), True, 'import numpy as np\n'), ((1249, 1265), 'numpy.where', 'np.where', (['(y == i)'], {}), '(y == i)\n', (1257, 1265), True, 'import numpy as np\n'), ((1274, 1395), 'matplotlib.pyplot.scatter', 'plt.scatter', (['X[idx, 0]', 'X[idx, 1]'], {'c': 'color', 'label': 'iris.target_names[i]', 'cmap': 'plt.cm.RdYlBu', 'edgecolors': '"""black"""', 's': '(15)'}), "(X[idx, 0], X[idx, 1], c=color, label=iris.target_names[i], cmap\n =plt.cm.RdYlBu, edgecolors='black', s=15)\n", (1285, 1395), True, 'import matplotlib.pyplot as plt\n'), ((1576, 1600), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ([], {}), '()\n', (1598, 1600), False, 'from sklearn.tree import DecisionTreeClassifier, plot_tree\n'), ((547, 571), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ([], {}), '()\n', (569, 571), False, 'from sklearn.tree import DecisionTreeClassifier, plot_tree\n')]
# encoding: utf-8 from application import static_manager from static_bundle import (JsBundle, CssBundle) css1 = CssBundle("css") css1.add_file("example1.less") css1.add_file("example2.css") js1 = JsBundle("js") js1.add_file("vendors/example1.js") js1.add_file("vendors/example2.js") js2 = JsBundle("js/include") # modules depends on app.js js2.add_file("app.js") js2.add_directory("modules") builder = static_manager.create_builder() builder.create_asset("Styles").add_bundle(css1) builder.create_asset("Vendors", minify=True).add_bundle(js1) builder.create_asset("Application", minify=True).add_bundle(js2)
[ "application.static_manager.create_builder", "static_bundle.CssBundle", "static_bundle.JsBundle" ]
[((141, 157), 'static_bundle.CssBundle', 'CssBundle', (['"""css"""'], {}), "('css')\n", (150, 157), False, 'from static_bundle import JsBundle, CssBundle\n'), ((226, 240), 'static_bundle.JsBundle', 'JsBundle', (['"""js"""'], {}), "('js')\n", (234, 240), False, 'from static_bundle import JsBundle, CssBundle\n'), ((320, 342), 'static_bundle.JsBundle', 'JsBundle', (['"""js/include"""'], {}), "('js/include')\n", (328, 342), False, 'from static_bundle import JsBundle, CssBundle\n'), ((434, 465), 'application.static_manager.create_builder', 'static_manager.create_builder', ([], {}), '()\n', (463, 465), False, 'from application import static_manager\n')]
""" Intermediate Factors @author: <NAME> This module computes the interpolated features between the principal vectors -- the one linking source to target following the geodesics on the Grassmannian. We use the equivalent formulation derived in [1] and represent this geodesics for each pair of principal components. Example ------- Examples are given in the vignettes. Notes ------- Examples are given in the vignette References ------- [1] <NAME>., <NAME>., <NAME>., "TO CHANGE" """ import numpy as np import pandas as pd from pathlib import Path from joblib import Parallel, delayed from precise.principal_vectors import PVComputation class IntermediateFactors: """ Handle the intermediate representations between Attributes ------- source_components_ : numpy.ndarray, shape (n_components, n_features) Loadings of the source factors, be them already aligned to target or not. target_components : numpy.ndarray, shape (n_components, n_features) Loadings of the target factors, be them already aligned to source or not. intermediate_factors_ : numpy.ndarray, shape (n_representations, n_components, n_features) Loadings of intermediate factors along the geodesic path. Components are ordered by similarity, i.e. first components correspond to path between first PVs, etc. n_representations: int Number of representations along the geodesic path. If -1, means that the Geodesic Flow Kernel has been used instead. geodesic_matrix_: numpy.ndarray, shape (n_features, n_features) Geodesic Matrix for geodesic flow kernel. geodesic_flow_: method float:numpy.array Method that computes geodesic flow at a certain position. """ def __init__(self, n_representations, n_jobs=1): """ Parameters ------- n_representations : int Number of representations to pick between source and target. n_jobs: int (optional, default to 1) Number of jobs for computation. """ self.n_representations = n_representations self.intermediate_factors_ = None self.source_components_ = None self.target_components_ = None self.n_jobs = 1 def _compute_principal_vectors(self): n_pv = np.min([self.source_components_.shape[0], self.target_components_.shape[0]]) n_factors = { 'source': self.source_components_.shape[0], 'target': self.target_components_.shape[0] } self.principal_vectors_ = PVComputation(n_factors, n_pv) self.principal_vectors_.compute_principal_vectors(self.source_components_, self.target_components_) def _compute_flow_time(t, principal_vectors): Pi = np.sin( (1-t) * principal_vectors.angles_)\ /np.sin(principal_vectors.angles_) Pi[np.isnan(Pi)] = 1-t # Asymptotic value of sin/sin in 0 Xi = np.sin( t * principal_vectors.angles_)\ / np.sin(principal_vectors.angles_) Xi[np.isnan(Xi)] = t # Asymptotic value of sin/sin in 0 return (principal_vectors.source_components_.T*Pi \ + principal_vectors.target_components_.T*Xi).T def sample_flow(self, source_components, target_components, already_aligned=False): """ Sample intermediate subspaces (i.e. set of factors) uniformely along the geodesic flow. IMPORTANT: Same genes have to be given for source and target, and in same order Parameters ------- source_components : np.ndarray, shape (n_components, n_features) Source factors target_components : np.ndarray, shape (n_components, n_features) Target factors already_aligned : boolean (optional, default to False) Whether the components are already aligned (i.e. are they PV or not). Return values ------- Intermediate subspace, numpy.ndarray of shape (n_representations + 1, n_components, n_features). """ self.source_components_ = source_components self.target_components_ = target_components # Compute the principal vectors if not already_aligned: self._compute_principal_vectors() else: self.principal_vectors_.source_components_ = self.source_components_ self.principal_vectors_.target_components = self.target_components_ # Sample at different uniformly distributed time points if self.n_representations == -1: t_sample = np.array([1]) else: t_sample = np.linspace(0, 1, self.n_representations + 1) if self.n_jobs >= 2: return np.array( Parallel(n_jobs=self.n_jobs)\ (delayed(IntermediateFactors._compute_flow_time)(t, self.principal_vectors_)\ for t in t_sample) ) else: return np.array([IntermediateFactors._compute_flow_time(t, self.principal_vectors_) for t in t_sample]) def compute_geodesic_matrix(self, source_components, target_components): """ Return method for computing the domain-invariant kernel of Geodesic Flow Kernel. Parameters ------- source_components : np.ndarray, shape (n_components, n_features) Source factors target_components : np.ndarray, shape (n_components, n_features) Target factors Return values ------- Method that takes two p-dimensional vector and returns their domain-invariant scalar product. """ self.source_components_ = source_components self.target_components_ = target_components self._compute_principal_vectors() diag_term = (self.principal_vectors_.angles_ - np.cos(self.principal_vectors_.angles_)*np.sin(self.principal_vectors_.angles_)) \ / 2 / self.principal_vectors_.angles_ / np.power(np.sin(self.principal_vectors_.angles_), 2) off_diag_term = (np.sin(self.principal_vectors_.angles_) - np.cos(self.principal_vectors_.angles_)*self.principal_vectors_.angles_) \ / 2 / np.power(np.sin(self.principal_vectors_.angles_),2) / self.principal_vectors_.angles_ # Correct for extreme case when theta = 0 diag_term[np.isnan(diag_term)] = 1./3. diag_term[np.isinf(diag_term)] = 1./3. off_diag_term[np.isnan(off_diag_term)] = 1./6. off_diag_term[np.isinf(off_diag_term)] = 1./6. diag_term = np.diag(diag_term) off_diag_term = np.diag(off_diag_term) self.G_matrix = np.block([ [diag_term, off_diag_term], [off_diag_term, diag_term] ]) self.projection = np.block([self.principal_vectors_.source_components_.transpose(), self.principal_vectors_.target_components_.transpose()]) return self.G_matrix #return lambda x,y: IntermediateFactors._compute_domain_invariant_scalar_product(x, y, self.projection, self.G_matrix) def _compute_domain_invariant_scalar_product(x, y, projection, G_matrix): x_p = x.dot(projection) y_p = y.dot(projection) return x_p.dot(G_matrix).dot(y_p.transpose())
[ "numpy.block", "numpy.diag", "joblib.Parallel", "numpy.array", "numpy.linspace", "numpy.isnan", "numpy.cos", "precise.principal_vectors.PVComputation", "numpy.min", "numpy.sin", "joblib.delayed", "numpy.isinf" ]
[((2327, 2403), 'numpy.min', 'np.min', (['[self.source_components_.shape[0], self.target_components_.shape[0]]'], {}), '([self.source_components_.shape[0], self.target_components_.shape[0]])\n', (2333, 2403), True, 'import numpy as np\n'), ((2602, 2632), 'precise.principal_vectors.PVComputation', 'PVComputation', (['n_factors', 'n_pv'], {}), '(n_factors, n_pv)\n', (2615, 2632), False, 'from precise.principal_vectors import PVComputation\n'), ((6663, 6681), 'numpy.diag', 'np.diag', (['diag_term'], {}), '(diag_term)\n', (6670, 6681), True, 'import numpy as np\n'), ((6706, 6728), 'numpy.diag', 'np.diag', (['off_diag_term'], {}), '(off_diag_term)\n', (6713, 6728), True, 'import numpy as np\n'), ((6754, 6820), 'numpy.block', 'np.block', (['[[diag_term, off_diag_term], [off_diag_term, diag_term]]'], {}), '([[diag_term, off_diag_term], [off_diag_term, diag_term]])\n', (6762, 6820), True, 'import numpy as np\n'), ((2861, 2904), 'numpy.sin', 'np.sin', (['((1 - t) * principal_vectors.angles_)'], {}), '((1 - t) * principal_vectors.angles_)\n', (2867, 2904), True, 'import numpy as np\n'), ((2918, 2951), 'numpy.sin', 'np.sin', (['principal_vectors.angles_'], {}), '(principal_vectors.angles_)\n', (2924, 2951), True, 'import numpy as np\n'), ((2963, 2975), 'numpy.isnan', 'np.isnan', (['Pi'], {}), '(Pi)\n', (2971, 2975), True, 'import numpy as np\n'), ((3032, 3069), 'numpy.sin', 'np.sin', (['(t * principal_vectors.angles_)'], {}), '(t * principal_vectors.angles_)\n', (3038, 3069), True, 'import numpy as np\n'), ((3086, 3119), 'numpy.sin', 'np.sin', (['principal_vectors.angles_'], {}), '(principal_vectors.angles_)\n', (3092, 3119), True, 'import numpy as np\n'), ((3131, 3143), 'numpy.isnan', 'np.isnan', (['Xi'], {}), '(Xi)\n', (3139, 3143), True, 'import numpy as np\n'), ((4680, 4693), 'numpy.array', 'np.array', (['[1]'], {}), '([1])\n', (4688, 4693), True, 'import numpy as np\n'), ((4731, 4776), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(self.n_representations + 1)'], {}), '(0, 1, self.n_representations + 1)\n', (4742, 4776), True, 'import numpy as np\n'), ((6457, 6476), 'numpy.isnan', 'np.isnan', (['diag_term'], {}), '(diag_term)\n', (6465, 6476), True, 'import numpy as np\n'), ((6504, 6523), 'numpy.isinf', 'np.isinf', (['diag_term'], {}), '(diag_term)\n', (6512, 6523), True, 'import numpy as np\n'), ((6555, 6578), 'numpy.isnan', 'np.isnan', (['off_diag_term'], {}), '(off_diag_term)\n', (6563, 6578), True, 'import numpy as np\n'), ((6610, 6633), 'numpy.isinf', 'np.isinf', (['off_diag_term'], {}), '(off_diag_term)\n', (6618, 6633), True, 'import numpy as np\n'), ((6099, 6138), 'numpy.sin', 'np.sin', (['self.principal_vectors_.angles_'], {}), '(self.principal_vectors_.angles_)\n', (6105, 6138), True, 'import numpy as np\n'), ((4852, 4880), 'joblib.Parallel', 'Parallel', ([], {'n_jobs': 'self.n_jobs'}), '(n_jobs=self.n_jobs)\n', (4860, 4880), False, 'from joblib import Parallel, delayed\n'), ((6312, 6351), 'numpy.sin', 'np.sin', (['self.principal_vectors_.angles_'], {}), '(self.principal_vectors_.angles_)\n', (6318, 6351), True, 'import numpy as np\n'), ((6168, 6207), 'numpy.sin', 'np.sin', (['self.principal_vectors_.angles_'], {}), '(self.principal_vectors_.angles_)\n', (6174, 6207), True, 'import numpy as np\n'), ((4907, 4954), 'joblib.delayed', 'delayed', (['IntermediateFactors._compute_flow_time'], {}), '(IntermediateFactors._compute_flow_time)\n', (4914, 4954), False, 'from joblib import Parallel, delayed\n'), ((5955, 5994), 'numpy.cos', 'np.cos', (['self.principal_vectors_.angles_'], {}), '(self.principal_vectors_.angles_)\n', (5961, 5994), True, 'import numpy as np\n'), ((5995, 6034), 'numpy.sin', 'np.sin', (['self.principal_vectors_.angles_'], {}), '(self.principal_vectors_.angles_)\n', (6001, 6034), True, 'import numpy as np\n'), ((6210, 6249), 'numpy.cos', 'np.cos', (['self.principal_vectors_.angles_'], {}), '(self.principal_vectors_.angles_)\n', (6216, 6249), True, 'import numpy as np\n')]
import logging import os import hydra import pytorch_lightning as pl from hydra.utils import instantiate from omegaconf import DictConfig, OmegaConf from deep_learning_template import BaseDataModule, BaseTask from deep_learning_template.core import initialization as init from deep_learning_template.utils.config import freeze_config os.environ["HYDRA_FULL_ERROR"] = "1" log = logging.getLogger(__name__) @hydra.main(config_path="conf", config_name="config") def main(cfg: DictConfig) -> None: """Hydra application's main function, that builds the model and trains/tests it on the dataset. Args: cfg: Application-wide configuration generated by Hydra. """ freeze_config(cfg) log.info(f"Hydra app running with config: \n{OmegaConf.to_yaml(cfg)}") init.validate_cfg(cfg) trainer: pl.Trainer = instantiate(cfg.trainer.trainer, logger=init.initialize_loggers(cfg)) data_module: BaseDataModule = instantiate(cfg.datamodule.datamodule) task: BaseTask = init.initialize_task(cfg, data_module) if cfg.trainer.train: trainer.fit(task, datamodule=data_module) log.info("Training complete.") if cfg.trainer.test: trainer.test(task, datamodule=data_module) log.info("Testing complete.") if __name__ == "__main__": main()
[ "logging.getLogger", "deep_learning_template.core.initialization.initialize_loggers", "hydra.main", "hydra.utils.instantiate", "omegaconf.OmegaConf.to_yaml", "deep_learning_template.core.initialization.initialize_task", "deep_learning_template.core.initialization.validate_cfg", "deep_learning_template...
[((381, 408), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (398, 408), False, 'import logging\n'), ((412, 464), 'hydra.main', 'hydra.main', ([], {'config_path': '"""conf"""', 'config_name': '"""config"""'}), "(config_path='conf', config_name='config')\n", (422, 464), False, 'import hydra\n'), ((687, 705), 'deep_learning_template.utils.config.freeze_config', 'freeze_config', (['cfg'], {}), '(cfg)\n', (700, 705), False, 'from deep_learning_template.utils.config import freeze_config\n'), ((785, 807), 'deep_learning_template.core.initialization.validate_cfg', 'init.validate_cfg', (['cfg'], {}), '(cfg)\n', (802, 807), True, 'from deep_learning_template.core import initialization as init\n'), ((939, 977), 'hydra.utils.instantiate', 'instantiate', (['cfg.datamodule.datamodule'], {}), '(cfg.datamodule.datamodule)\n', (950, 977), False, 'from hydra.utils import instantiate\n'), ((999, 1037), 'deep_learning_template.core.initialization.initialize_task', 'init.initialize_task', (['cfg', 'data_module'], {}), '(cfg, data_module)\n', (1019, 1037), True, 'from deep_learning_template.core import initialization as init\n'), ((875, 903), 'deep_learning_template.core.initialization.initialize_loggers', 'init.initialize_loggers', (['cfg'], {}), '(cfg)\n', (898, 903), True, 'from deep_learning_template.core import initialization as init\n'), ((755, 777), 'omegaconf.OmegaConf.to_yaml', 'OmegaConf.to_yaml', (['cfg'], {}), '(cfg)\n', (772, 777), False, 'from omegaconf import DictConfig, OmegaConf\n')]
import numpy as np from sklearn.preprocessing import MinMaxScaler, StandardScaler from sklearn.cross_validation import train_test_split import theanets import climate climate.enable_default_logging() X_orig = np.load('/Users/bzamecnik/Documents/music-processing/music-processing-experiments/c-scale-piano_spectrogram_2048_hamming.npy') sample_count, feature_count = X_orig.shape X = MinMaxScaler().fit_transform(X_orig) X = X.astype(np.float32) X_train, X_test = train_test_split(X, test_size=0.4, random_state=42) X_val, X_test = train_test_split(X_test, test_size=0.5, random_state=42) # (np.maximum(0, 44100/512*np.arange(13)-2)).astype('int') #blocks = [0, 84, 170, 256, 342, 428, 514, 600, 687, 773, 859, 945, 1031, 1205] blocks = [0, 48, 98, 148, 198, 248, 298, 348, 398, 448, 498, 548, 598, 700] def make_labels(blocks): label_count = len(blocks) - 1 labels = np.zeros(blocks[-1]) for i in range(label_count): labels[blocks[i]:blocks[i+1]] = i return labels y = make_labels(blocks) def score(exp, Xs): X_train, X_val, X_test = Xs def sc(exp, X): return r2_score(X, exp.network.predict(X)) print("training: ", sc(exp, X_train)) # NOTE: only optimize to validation dataset's score! print("validation:", sc(exp, X_val)) print("test: ", sc(exp, X_test)) exp1 = theanets.Experiment( theanets.Autoencoder, layers=(feature_count, 500, feature_count), hidden_l1=0.1) exp1.train(X_train, X_val, optimize='nag', learning_rate=1e-3, momentum=0.9) exp2 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 500, feature_count), hidden_l1=0.1) exp2.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.9) # gives quite nice prediction, trains slow exp3 = theanets.Experiment( theanets.Autoencoder, layers=(feature_count, 500, feature_count), hidden_l1=0.1, hidden_activation='relu') exp3.train(X_train, X_val, optimize='nag', learning_rate=1e-3, momentum=0.9) exp4 = theanets.Experiment( theanets.Autoencoder, layers=(feature_count, 500, feature_count), hidden_l1=0.1, input_dropout=0.3) exp4.train(X_train, X_val, optimize='nag', learning_rate=1e-3, momentum=0.9) # rmsprop - converges faster in this case than nag exp5 = theanets.Experiment( theanets.Autoencoder, layers=(feature_count, 500, feature_count), hidden_l1=0.1) exp5.train(X_train, X_val, optimize='rmsprop', learning_rate=1e-3, momentum=0.9) # tied weighs - work good, much lower loss function values # r2: 0.75037549551862703 exp6 = theanets.Experiment( theanets.Autoencoder, layers=(feature_count, 500, feature_count), hidden_l1=0.1, tied_weights=True) exp6.train(X_train, X_val, optimize='rmsprop', learning_rate=1e-3, momentum=0.9) # higher hidden L1 penalty - worse exp7 = theanets.Experiment( theanets.Autoencoder, layers=(feature_count, 500, feature_count), hidden_l1=0.7, tied_weights=True) exp7.train(X_train, X_val, optimize='rmsprop', learning_rate=1e-3, momentum=0.9) # hidden L2 penalty - a bit worse exp8 = theanets.Experiment( theanets.Autoencoder, layers=(feature_count, 500, feature_count), hidden_l1=0.1, hidden_l2=0.1, tied_weights=True) exp8.train(X_train, X_val, optimize='rmsprop', learning_rate=1e-3, momentum=0.9) # no regularization - in this case better # r2: 0.82211329411744094 exp10 = theanets.Experiment( theanets.Autoencoder, layers=(feature_count, 500, feature_count), tied_weights=True) exp10.train(X_train, X_val, optimize='rmsprop', learning_rate=1e-3, momentum=0.9) # layerwise autoencoder training exp11 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 500, feature_count), tied_weights=True) exp11.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.9) # wow - this actually is able to to a 2D visualization exp12 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 100, 10, 2, 10, 100, feature_count), tied_weights=True) exp12.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.9) def compute_middle_layer(X, model): X_pred_ff = model.feed_forward(X) middle = int(len(X_pred_ff)/2) X_middle = X_pred_ff[middle] return X_middle def visualize_2d(X, y=None): colors = y/max(y) if y is not None else np.linspace(0,1,len(X)) scatter(X[:,0], X[:,1], c=colors, alpha=0.2, edgecolors='none', cmap='rainbow') # same visualization, a little bit better r2 exp13 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 256, 64, 16, 2, 16, 64, 256, feature_count), tied_weights=True) exp13.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.9) # contractive - better than without # r2: 0.82820148664941162 exp14 = theanets.Experiment( theanets.Autoencoder, layers=(feature_count, 500, feature_count), tied_weights=True, contractive=0.8) exp14.train(X_train, X_val, optimize='rmsprop', learning_rate=1e-3, momentum=0.9) # tanh - bad exp15 = theanets.Experiment( theanets.Autoencoder, layers=(feature_count, 500, feature_count), tied_weights=True, hidden_activation='tanh') exp15.train(X_train, X_val, optimize='rmsprop', learning_rate=1e-3, momentum=0.9) # relu, contractive exp16 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 128, 16, 2, 16, 128, feature_count), tied_weights=True, hidden_activation='relu', contractive=0.5) exp16.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.9) exp17 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 128, 16, 2, 16, 128, feature_count), tied_weights=True, contractive=0.8) exp17.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.9) exp18 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, input_dropout=0.8) exp18.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.9) # r2: 0.83371355062803953 exp19 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, input_dropout=0.8, hidden_dropout=0.8) exp19.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.9) exp20 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, input_dropout=0.9, hidden_dropout=0.9) exp20.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.9) # ----------------- # animate the 2D point movement import matplotlib.animation as animation def export_animation(X_2d, y, filename): fig = plt.figure() # 854x480 px (480p) in inches, note that 8.54 gives 853px width :/ fig.set_size_inches(8.545, 4.80) plt.axis('equal') # plt.tight_layout() # plt.xlim(-0.1, 1.1) # plt.ylim(-0.1, 1.1) images = [] im1 = scatter(X_2d[:, 0], X_2d[:, 1], c=y/max(y), cmap='rainbow', alpha=0.2) for i in range(len(X_2d)): im2 = scatter(X_2d[i, 0], X_2d[i, 1], c=y[i]/max(y), cmap='rainbow') images.append([im1, im2]) ani = animation.ArtistAnimation(fig, images, interval=20, blit=False, repeat=False) writer = animation.writers['ffmpeg'](fps=50, bitrate=5000) ani.save(filename, writer=writer, dpi=100) export_animation(X_tsne, y, 'piano-tsne.mp4') #---------------------- exp21 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, input_dropout=0.3, hidden_dropout=0.5, batch_size=len(X_train)) exp21.train(X_train, X_val, optimize='rmsprop', learning_rate=1e-3, momentum=0.9) exp22 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, input_dropout=0.3, hidden_dropout=0.5) exp22.train(X_train, X_val, optimize='rmsprop', learning_rate=1e-3, momentum=0.9) exp23 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, 256, 128, 64, 32, 16, 8, 4, 2, 4, 8, 16, 32, 64, 128, 256, 512, feature_count), tied_weights=True, input_dropout=0.3, hidden_dropout=0.5) exp23.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.9) exp24 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, input_dropout=0.3, hidden_dropout=0.5, hidden_activation='linear') exp24.train(X_train, X_val, optimize='rmsprop', learning_rate=1e-3, momentum=0.9) # r2: 0.833454635805 exp25 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True) exp25.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.9) # r2: 0.731835366439 exp26 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True) exp26.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.1) # r2: 0.854741515141 (*) exp27 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True) exp27.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) # r2: 0.84260338122 exp28 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True) exp28.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.7) exp29 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True) exp29.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp30 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, input_dropout=0.9) exp30.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp31 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 100, feature_count), tied_weights=True) exp31.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp32 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 200, 20, 2, 20, 200, feature_count), tied_weights=True, input_dropout=0.5, hidden_dropout=0.5) exp32.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) # bad - makes a single curve exp33 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 200, 20, 2, 20, 200, feature_count), tied_weights=True, hidden_l1=0.1) exp33.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) # bad - makes a non-discriminative curve exp34 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 200, 20, 2, 20, 200, feature_count), tied_weights=True, input_dropout=0.5) exp34.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp35 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 200, 20, 2, 20, 200, feature_count), tied_weights=True, hidden_dropout=0.5) exp35.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp36 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 200, 20, 2, 20, 200, feature_count), tied_weights=True) exp36.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp33 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, 256, 128, 64, 32, 16, 8, 4, 2, 4, 8, 16, 32, 64, 128, 256, 512, feature_count), tied_weights=True) exp33.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) X_zca_train, X_zca_test = train_test_split(X_zca, test_size=0.4, random_state=42) X_zca_val, X_zca_test = train_test_split(X_zca_test, test_size=0.5, random_state=42) exp34 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True) exp34.train(X_zca_train, X_zca_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp35 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, 256, 128, 64, 32, 16, 8, 4, 2, 4, 8, 16, 32, 64, 128, 256, 512, feature_count), tied_weights=True, hidden_activation='relu') exp35.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) # - try tanh and relu for deeper networks # - try other normalization (mean-std instead od min-max) X_ms = StandardScaler().fit_transform(X_orig).astype(np.float32) X_ms_train, X_ms_test = train_test_split(X_ms, test_size=0.4, random_state=42) X_ms_val, X_ms_test = train_test_split(X_ms_test, test_size=0.5, random_state=42) exp36 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True) exp36.train(X_ms_train, X_ms_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp37 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, hidden_activation='tanh') exp37.train(X_ms_train, X_ms_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp38 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True) exp38.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) X_orig_train, X_orig_test = train_test_split(X_orig.astype('float32'), test_size=0.4, random_state=42) X_orig_val, X_orig_test = train_test_split(X_orig_test, test_size=0.5, random_state=42) exp39 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True) exp39.train(X_orig_train, X_orig_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp40 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, hidden_activation='linear', hidden_l1=0.5) exp40.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp41 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, hidden_activation='relu', hidden_l1=0.5) exp41.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp42 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, hidden_activation='relu', weight_l1=0.5) exp42.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) # bad exp43 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, hidden_activation='relu', contractive=0.9) exp43.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) # not bad exp44 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, hidden_activation='relu') exp45.train(X_ms_train, X_ms_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp45 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, hidden_activation='relu', contractive=0.5) exp45.train(X_ms_train, X_ms_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) # r2: 0.849283267068 exp46 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, hidden_activation='linear', contractive=0.5) exp46.train(X_ms_train, X_ms_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5) exp47 = theanets.Experiment(theanets.Autoencoder, layers=(feature_count, 512, feature_count), tied_weights=True, hidden_activation='linear', contractive=0.5) exp47.train(X_train, X_val, optimize='layerwise', learning_rate=1e-3, momentum=0.5)
[ "theanets.Experiment", "climate.enable_default_logging", "sklearn.preprocessing.StandardScaler", "matplotlib.animation.ArtistAnimation", "numpy.zeros", "sklearn.cross_validation.train_test_split", "numpy.load", "sklearn.preprocessing.MinMaxScaler" ]
[((168, 200), 'climate.enable_default_logging', 'climate.enable_default_logging', ([], {}), '()\n', (198, 200), False, 'import climate\n'), ((211, 347), 'numpy.load', 'np.load', (['"""/Users/bzamecnik/Documents/music-processing/music-processing-experiments/c-scale-piano_spectrogram_2048_hamming.npy"""'], {}), "(\n '/Users/bzamecnik/Documents/music-processing/music-processing-experiments/c-scale-piano_spectrogram_2048_hamming.npy'\n )\n", (218, 347), True, 'import numpy as np\n'), ((466, 517), 'sklearn.cross_validation.train_test_split', 'train_test_split', (['X'], {'test_size': '(0.4)', 'random_state': '(42)'}), '(X, test_size=0.4, random_state=42)\n', (482, 517), False, 'from sklearn.cross_validation import train_test_split\n'), ((534, 590), 'sklearn.cross_validation.train_test_split', 'train_test_split', (['X_test'], {'test_size': '(0.5)', 'random_state': '(42)'}), '(X_test, test_size=0.5, random_state=42)\n', (550, 590), False, 'from sklearn.cross_validation import train_test_split\n'), ((1334, 1438), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 500, feature_count)', 'hidden_l1': '(0.1)'}), '(theanets.Autoencoder, layers=(feature_count, 500,\n feature_count), hidden_l1=0.1)\n', (1353, 1438), False, 'import theanets\n'), ((1530, 1634), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 500, feature_count)', 'hidden_l1': '(0.1)'}), '(theanets.Autoencoder, layers=(feature_count, 500,\n feature_count), hidden_l1=0.1)\n', (1549, 1634), False, 'import theanets\n'), ((1774, 1904), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 500, feature_count)', 'hidden_l1': '(0.1)', 'hidden_activation': '"""relu"""'}), "(theanets.Autoencoder, layers=(feature_count, 500,\n feature_count), hidden_l1=0.1, hidden_activation='relu')\n", (1793, 1904), False, 'import theanets\n'), ((1996, 2119), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 500, feature_count)', 'hidden_l1': '(0.1)', 'input_dropout': '(0.3)'}), '(theanets.Autoencoder, layers=(feature_count, 500,\n feature_count), hidden_l1=0.1, input_dropout=0.3)\n', (2015, 2119), False, 'import theanets\n'), ((2262, 2366), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 500, feature_count)', 'hidden_l1': '(0.1)'}), '(theanets.Autoencoder, layers=(feature_count, 500,\n feature_count), hidden_l1=0.1)\n', (2281, 2366), False, 'import theanets\n'), ((2547, 2670), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 500, feature_count)', 'hidden_l1': '(0.1)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 500,\n feature_count), hidden_l1=0.1, tied_weights=True)\n', (2566, 2670), False, 'import theanets\n'), ((2801, 2924), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 500, feature_count)', 'hidden_l1': '(0.7)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 500,\n feature_count), hidden_l1=0.7, tied_weights=True)\n', (2820, 2924), False, 'import theanets\n'), ((3054, 3192), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 500, feature_count)', 'hidden_l1': '(0.1)', 'hidden_l2': '(0.1)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 500,\n feature_count), hidden_l1=0.1, hidden_l2=0.1, tied_weights=True)\n', (3073, 3192), False, 'import theanets\n'), ((3357, 3465), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 500, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 500,\n feature_count), tied_weights=True)\n', (3376, 3465), False, 'import theanets\n'), ((3597, 3705), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 500, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 500,\n feature_count), tied_weights=True)\n', (3616, 3705), False, 'import theanets\n'), ((3854, 3978), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 100, 10, 2, 10, 100, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 100, 10, 2,\n 10, 100, feature_count), tied_weights=True)\n', (3873, 3978), False, 'import theanets\n'), ((4474, 4607), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 256, 64, 16, 2, 16, 64, 256, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 256, 64, \n 16, 2, 16, 64, 256, feature_count), tied_weights=True)\n', (4493, 4607), False, 'import theanets\n'), ((4766, 4891), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 500, feature_count)', 'tied_weights': '(True)', 'contractive': '(0.8)'}), '(theanets.Autoencoder, layers=(feature_count, 500,\n feature_count), tied_weights=True, contractive=0.8)\n', (4785, 4891), False, 'import theanets\n'), ((5001, 5135), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 500, feature_count)', 'tied_weights': '(True)', 'hidden_activation': '"""tanh"""'}), "(theanets.Autoencoder, layers=(feature_count, 500,\n feature_count), tied_weights=True, hidden_activation='tanh')\n", (5020, 5135), False, 'import theanets\n'), ((5252, 5423), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 128, 16, 2, 16, 128, feature_count)', 'tied_weights': '(True)', 'hidden_activation': '"""relu"""', 'contractive': '(0.5)'}), "(theanets.Autoencoder, layers=(feature_count, 128, 16, 2,\n 16, 128, feature_count), tied_weights=True, hidden_activation='relu',\n contractive=0.5)\n", (5271, 5423), False, 'import theanets\n'), ((5517, 5658), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 128, 16, 2, 16, 128, feature_count)', 'tied_weights': '(True)', 'contractive': '(0.8)'}), '(theanets.Autoencoder, layers=(feature_count, 128, 16, 2,\n 16, 128, feature_count), tied_weights=True, contractive=0.8)\n', (5536, 5658), False, 'import theanets\n'), ((5756, 5883), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'input_dropout': '(0.8)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, input_dropout=0.8)\n', (5775, 5883), False, 'import theanets\n'), ((6007, 6154), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'input_dropout': '(0.8)', 'hidden_dropout': '(0.8)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, input_dropout=0.8, hidden_dropout=0.8)\n', (6026, 6154), False, 'import theanets\n'), ((6252, 6399), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'input_dropout': '(0.9)', 'hidden_dropout': '(0.9)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, input_dropout=0.9, hidden_dropout=0.9)\n', (6271, 6399), False, 'import theanets\n'), ((7656, 7803), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'input_dropout': '(0.3)', 'hidden_dropout': '(0.5)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, input_dropout=0.3, hidden_dropout=0.5)\n', (7675, 7803), False, 'import theanets\n'), ((7899, 8115), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, 256, 128, 64, 32, 16, 8, 4, 2, 4, 8, 16, 32, 64, 128, \n 256, 512, feature_count)', 'tied_weights': '(True)', 'input_dropout': '(0.3)', 'hidden_dropout': '(0.5)'}), '(theanets.Autoencoder, layers=(feature_count, 512, 256, \n 128, 64, 32, 16, 8, 4, 2, 4, 8, 16, 32, 64, 128, 256, 512,\n feature_count), tied_weights=True, input_dropout=0.3, hidden_dropout=0.5)\n', (7918, 8115), False, 'import theanets\n'), ((8216, 8396), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'input_dropout': '(0.3)', 'hidden_dropout': '(0.5)', 'hidden_activation': '"""linear"""'}), "(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, input_dropout=0.3, hidden_dropout=\n 0.5, hidden_activation='linear')\n", (8235, 8396), False, 'import theanets\n'), ((8512, 8620), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True)\n', (8531, 8620), False, 'import theanets\n'), ((8739, 8847), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True)\n', (8758, 8847), False, 'import theanets\n'), ((8970, 9078), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True)\n', (8989, 9078), False, 'import theanets\n'), ((9196, 9304), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True)\n', (9215, 9304), False, 'import theanets\n'), ((9402, 9510), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True)\n', (9421, 9510), False, 'import theanets\n'), ((9608, 9735), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'input_dropout': '(0.9)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, input_dropout=0.9)\n', (9627, 9735), False, 'import theanets\n'), ((9833, 9941), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 100, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 100,\n feature_count), tied_weights=True)\n', (9852, 9941), False, 'import theanets\n'), ((10039, 10206), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 200, 20, 2, 20, 200, feature_count)', 'tied_weights': '(True)', 'input_dropout': '(0.5)', 'hidden_dropout': '(0.5)'}), '(theanets.Autoencoder, layers=(feature_count, 200, 20, 2,\n 20, 200, feature_count), tied_weights=True, input_dropout=0.5,\n hidden_dropout=0.5)\n', (10058, 10206), False, 'import theanets\n'), ((10329, 10468), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 200, 20, 2, 20, 200, feature_count)', 'tied_weights': '(True)', 'hidden_l1': '(0.1)'}), '(theanets.Autoencoder, layers=(feature_count, 200, 20, 2,\n 20, 200, feature_count), tied_weights=True, hidden_l1=0.1)\n', (10348, 10468), False, 'import theanets\n'), ((10607, 10750), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 200, 20, 2, 20, 200, feature_count)', 'tied_weights': '(True)', 'input_dropout': '(0.5)'}), '(theanets.Autoencoder, layers=(feature_count, 200, 20, 2,\n 20, 200, feature_count), tied_weights=True, input_dropout=0.5)\n', (10626, 10750), False, 'import theanets\n'), ((10848, 10992), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 200, 20, 2, 20, 200, feature_count)', 'tied_weights': '(True)', 'hidden_dropout': '(0.5)'}), '(theanets.Autoencoder, layers=(feature_count, 200, 20, 2,\n 20, 200, feature_count), tied_weights=True, hidden_dropout=0.5)\n', (10867, 10992), False, 'import theanets\n'), ((11090, 11214), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 200, 20, 2, 20, 200, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 200, 20, 2,\n 20, 200, feature_count), tied_weights=True)\n', (11109, 11214), False, 'import theanets\n'), ((11313, 11490), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, 256, 128, 64, 32, 16, 8, 4, 2, 4, 8, 16, 32, 64, 128, \n 256, 512, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 512, 256, \n 128, 64, 32, 16, 8, 4, 2, 4, 8, 16, 32, 64, 128, 256, 512,\n feature_count), tied_weights=True)\n', (11332, 11490), False, 'import theanets\n'), ((11609, 11664), 'sklearn.cross_validation.train_test_split', 'train_test_split', (['X_zca'], {'test_size': '(0.4)', 'random_state': '(42)'}), '(X_zca, test_size=0.4, random_state=42)\n', (11625, 11664), False, 'from sklearn.cross_validation import train_test_split\n'), ((11689, 11749), 'sklearn.cross_validation.train_test_split', 'train_test_split', (['X_zca_test'], {'test_size': '(0.5)', 'random_state': '(42)'}), '(X_zca_test, test_size=0.5, random_state=42)\n', (11705, 11749), False, 'from sklearn.cross_validation import train_test_split\n'), ((11760, 11868), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True)\n', (11779, 11868), False, 'import theanets\n'), ((11974, 12177), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, 256, 128, 64, 32, 16, 8, 4, 2, 4, 8, 16, 32, 64, 128, \n 256, 512, feature_count)', 'tied_weights': '(True)', 'hidden_activation': '"""relu"""'}), "(theanets.Autoencoder, layers=(feature_count, 512, 256, \n 128, 64, 32, 16, 8, 4, 2, 4, 8, 16, 32, 64, 128, 256, 512,\n feature_count), tied_weights=True, hidden_activation='relu')\n", (11993, 12177), False, 'import theanets\n'), ((12460, 12514), 'sklearn.cross_validation.train_test_split', 'train_test_split', (['X_ms'], {'test_size': '(0.4)', 'random_state': '(42)'}), '(X_ms, test_size=0.4, random_state=42)\n', (12476, 12514), False, 'from sklearn.cross_validation import train_test_split\n'), ((12537, 12596), 'sklearn.cross_validation.train_test_split', 'train_test_split', (['X_ms_test'], {'test_size': '(0.5)', 'random_state': '(42)'}), '(X_ms_test, test_size=0.5, random_state=42)\n', (12553, 12596), False, 'from sklearn.cross_validation import train_test_split\n'), ((12606, 12714), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True)\n', (12625, 12714), False, 'import theanets\n'), ((12818, 12952), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'hidden_activation': '"""tanh"""'}), "(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, hidden_activation='tanh')\n", (12837, 12952), False, 'import theanets\n'), ((13056, 13164), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True)\n', (13075, 13164), False, 'import theanets\n'), ((13383, 13444), 'sklearn.cross_validation.train_test_split', 'train_test_split', (['X_orig_test'], {'test_size': '(0.5)', 'random_state': '(42)'}), '(X_orig_test, test_size=0.5, random_state=42)\n', (13399, 13444), False, 'from sklearn.cross_validation import train_test_split\n'), ((13454, 13562), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)'}), '(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True)\n', (13473, 13562), False, 'import theanets\n'), ((13670, 13825), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'hidden_activation': '"""linear"""', 'hidden_l1': '(0.5)'}), "(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, hidden_activation='linear',\n hidden_l1=0.5)\n", (13689, 13825), False, 'import theanets\n'), ((13919, 14068), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'hidden_activation': '"""relu"""', 'hidden_l1': '(0.5)'}), "(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, hidden_activation='relu', hidden_l1=0.5)\n", (13938, 14068), False, 'import theanets\n'), ((14166, 14315), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'hidden_activation': '"""relu"""', 'weight_l1': '(0.5)'}), "(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, hidden_activation='relu', weight_l1=0.5)\n", (14185, 14315), False, 'import theanets\n'), ((14419, 14574), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'hidden_activation': '"""relu"""', 'contractive': '(0.9)'}), "(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, hidden_activation='relu',\n contractive=0.9)\n", (14438, 14574), False, 'import theanets\n'), ((14678, 14812), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'hidden_activation': '"""relu"""'}), "(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, hidden_activation='relu')\n", (14697, 14812), False, 'import theanets\n'), ((14916, 15071), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'hidden_activation': '"""relu"""', 'contractive': '(0.5)'}), "(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, hidden_activation='relu',\n contractive=0.5)\n", (14935, 15071), False, 'import theanets\n'), ((15193, 15350), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'hidden_activation': '"""linear"""', 'contractive': '(0.5)'}), "(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, hidden_activation='linear',\n contractive=0.5)\n", (15212, 15350), False, 'import theanets\n'), ((15450, 15607), 'theanets.Experiment', 'theanets.Experiment', (['theanets.Autoencoder'], {'layers': '(feature_count, 512, feature_count)', 'tied_weights': '(True)', 'hidden_activation': '"""linear"""', 'contractive': '(0.5)'}), "(theanets.Autoencoder, layers=(feature_count, 512,\n feature_count), tied_weights=True, hidden_activation='linear',\n contractive=0.5)\n", (15469, 15607), False, 'import theanets\n'), ((880, 900), 'numpy.zeros', 'np.zeros', (['blocks[-1]'], {}), '(blocks[-1])\n', (888, 900), True, 'import numpy as np\n'), ((7106, 7183), 'matplotlib.animation.ArtistAnimation', 'animation.ArtistAnimation', (['fig', 'images'], {'interval': '(20)', 'blit': '(False)', 'repeat': '(False)'}), '(fig, images, interval=20, blit=False, repeat=False)\n', (7131, 7183), True, 'import matplotlib.animation as animation\n'), ((385, 399), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {}), '()\n', (397, 399), False, 'from sklearn.preprocessing import MinMaxScaler, StandardScaler\n'), ((12378, 12394), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (12392, 12394), False, 'from sklearn.preprocessing import MinMaxScaler, StandardScaler\n')]
import os import glob import argparse import numpy as np def parse_args(): parser = argparse.ArgumentParser(description='Display datas') parser.add_argument('--data-dir', default='C:/Users/junya/Documents/plant_segmentation_data', help='dataset directory') parser.add_argument('--val-rate', default=0.1, type=float, help='Number of validation rate') args = parser.parse_args() return args if __name__ == '__main__': args = parse_args() phase = 'train' train_dir = os.path.join(args.data_dir, phase) all_set = np.array(glob.glob(os.path.join(train_dir, '*.jpg'))) all_idx = np.random.choice(all_set.shape[0], all_set.shape[0], replace=False) train_num = int(all_set.shape[0] * (1 - args.val_rate)) train_idx = all_idx[0:train_num] val_idx = all_idx[train_num:all_set.shape[0]] train_set = all_set[train_idx] val_set = all_set[val_idx] np.savetxt("segmentation/train.txt", train_set, fmt='%s', delimiter=',') np.savetxt("segmentation/val.txt", val_set, fmt='%s', delimiter=',')
[ "numpy.random.choice", "numpy.savetxt", "os.path.join", "argparse.ArgumentParser" ]
[((90, 142), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Display datas"""'}), "(description='Display datas')\n", (113, 142), False, 'import argparse\n'), ((548, 582), 'os.path.join', 'os.path.join', (['args.data_dir', 'phase'], {}), '(args.data_dir, phase)\n', (560, 582), False, 'import os\n'), ((670, 737), 'numpy.random.choice', 'np.random.choice', (['all_set.shape[0]', 'all_set.shape[0]'], {'replace': '(False)'}), '(all_set.shape[0], all_set.shape[0], replace=False)\n', (686, 737), True, 'import numpy as np\n'), ((958, 1030), 'numpy.savetxt', 'np.savetxt', (['"""segmentation/train.txt"""', 'train_set'], {'fmt': '"""%s"""', 'delimiter': '""","""'}), "('segmentation/train.txt', train_set, fmt='%s', delimiter=',')\n", (968, 1030), True, 'import numpy as np\n'), ((1035, 1103), 'numpy.savetxt', 'np.savetxt', (['"""segmentation/val.txt"""', 'val_set'], {'fmt': '"""%s"""', 'delimiter': '""","""'}), "('segmentation/val.txt', val_set, fmt='%s', delimiter=',')\n", (1045, 1103), True, 'import numpy as np\n'), ((621, 653), 'os.path.join', 'os.path.join', (['train_dir', '"""*.jpg"""'], {}), "(train_dir, '*.jpg')\n", (633, 653), False, 'import os\n')]
''' This file gets the cities from the Deutscher Wetterdienst website file (a file of the name 'TU_Stundenwerte_Beschreibung_Stationen.txt' must already be in the current folder) and saves it into the file 'current_cityfile.dump'.''' import pickle import re # regular expressions package import csv #### FUNCTION DEFINITIONS ##################################################### def get_cities(filename): ''' Reads cities and ids from textfile ''' print('Reading Deutscher Wetterdienst - File and extract cities...') citylist = [] # Attention! The textfile given by the DWD is encoded in Latin-1. # Python3 uses utf-8 by default, so we have to specify it here. # In Python2 none of this will work, the open() function doesn't # even accept encode= as a parameter. with open(filename, 'rt', encoding='Latin-1') as text_file: # Read the first two lines, which we don't need. text_file.readline() text_file.readline() lines = text_file.readlines() for idx, textline in enumerate(lines): wordlist = re.sub("[^\w]", " ", textline).split() # The city is the 8th. word in each line try: citylist.append([wordlist[0], wordlist[8], wordlist[9]]) except IndexError: # check if this IndexError was raised in the last line: if len(lines) == idx+1: print("Checking exited cleanly at the end of the file.") else: print('There was an indexerror while reading from' \ 'the DW text file in line ' + str(idx)) list_sorted = sorted(citylist, key= lambda line:line[1]) return list_sorted, text_file def delete_multiples(citylist): ''' Removes duplicate items from a list. A duplicate is if there are several weather stations in the same city''' print('Deleting cities that occur multiple times in the city list ' + \ 'because the DW has several stations within one city...') duplicates = 0 new_citylist = [] citiesonly = [] # this list is used to temporarilty store # the cities and check whether it occured already for element in citylist: # element is of the form ['ID', 'city', 'part_of_city'] if element[1] not in citiesonly: new_citylist.append(element) citiesonly.append(element[1]) else: duplicates+=1 print("Deleted " + str(duplicates) + " duplicates.") return new_citylist #### IMPLEMENTATION ########################################################### citylist, text_file = get_cities('TU_Stundenwerte_Beschreibung_Stationen.txt') citylist = delete_multiples(citylist) with open('citylist.txt', 'w', encoding='utf-8') as myfile: myfile.writelines(["%s\n" % item1 for item in citylist for item1 in item]) pickle.dump(citylist, open('current_cityfile.dump','wb'))
[ "re.sub" ]
[((1146, 1177), 're.sub', 're.sub', (['"""[^\\\\w]"""', '""" """', 'textline'], {}), "('[^\\\\w]', ' ', textline)\n", (1152, 1177), False, 'import re\n')]
# python3 # Copyright 2021 InstaDeep Ltd. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Wraps a PettingZoo MARL environment to be used as a dm_env environment.""" import copy from typing import Any, Dict, Iterator, List, Optional, Union import dm_env import gym import numpy as np from acme import specs from acme.wrappers.gym_wrapper import _convert_to_spec from pettingzoo.utils.env import AECEnv, ParallelEnv from supersuit import black_death_v1 from mava import types from mava.utils.wrapper_utils import ( apply_env_wrapper_preprocessers, convert_np_type, parameterized_restart, ) from mava.wrappers.env_wrappers import ParallelEnvWrapper, SequentialEnvWrapper class PettingZooAECEnvWrapper(SequentialEnvWrapper): """Environment wrapper for PettingZoo MARL environments.""" def __init__( self, environment: AECEnv, env_preprocess_wrappers: Optional[List] = [ # (env_preprocessor, dict_with_preprocessor_params) (black_death_v1, None), ], ): self._environment = environment self._reset_next_step = True if env_preprocess_wrappers: self._environment = apply_env_wrapper_preprocessers( self._environment, env_preprocess_wrappers ) self.correct_agent_name() self.last_turn_agent = None def reset(self) -> dm_env.TimeStep: """Resets the episode.""" self._reset_next_step = False self._environment.reset() self._step_types = { agent: dm_env.StepType.FIRST for agent in self.possible_agents } self._first_step_performed = {agent: False for agent in self.possible_agents} observe, _, done, _ = self._environment.last() agent = self.current_agent observation = self._convert_observation(agent, observe, done) self._discount = convert_np_type(self.discount_spec()[agent].dtype, 1) reward = convert_np_type(self.reward_spec()[agent].dtype, 0) return parameterized_restart(reward, self._discount, observation) def step( # type: ignore[override] self, action: Union[int, float] ) -> dm_env.TimeStep: """Steps the environment.""" if self._reset_next_step: return self.reset() _, _, done, _ = self._environment.last() # If current agent is done if done: self._environment.step(None) else: self._environment.step(action) agent = self.current_agent # Reset if all agents are done if self.env_done(): self._reset_next_step = True reward = convert_np_type(self.reward_spec()[agent].dtype, 0) observation = self._convert_observation( agent, self._environment.observe(agent), done ) else: # observation for next agent observe, reward, done, info = self._environment.last() # Convert rewards to match spec reward = convert_np_type(self.reward_spec()[agent].dtype, reward) observation = self._convert_observation(agent, observe, done) step_type = dm_env.StepType.LAST if done else dm_env.StepType.MID return dm_env.TimeStep( observation=observation, reward=reward, discount=self._discount, step_type=step_type, ) def env_done(self) -> bool: return not self.agents def agent_iter(self, max_iter: int = 2 ** 63) -> Iterator: return self._environment.agent_iter(max_iter) # Convert PettingZoo observation so it's dm_env compatible. Also, the list # of legal actions must be converted to a legal actions mask. def _convert_observation( # type: ignore[override] self, agent: str, observe: Union[dict, np.ndarray], done: bool ) -> types.OLT: legals: np.ndarray = None observation: np.ndarray = None if isinstance(observe, dict) and "action_mask" in observe: legals = observe["action_mask"] observation = observe["observation"] else: legals = np.ones( _convert_to_spec(self._environment.action_spaces[agent]).shape, dtype=self._environment.action_spaces[agent].dtype, ) observation = observe if observation.dtype == np.int8: observation = np.dtype(np.float32).type( observation ) # observation is not expected to be int8 if legals.dtype == np.int8: legals = np.dtype(np.int64).type(legals) observation = types.OLT( observation=observation, legal_actions=legals, terminal=np.asarray([done], dtype=np.float32), ) return observation def correct_agent_name(self) -> None: self._environment.reset() if "tictactoe" in self._environment.metadata["name"]: corrected_names = ["player_0", "player_1"] self._environment.unwrapped.possible_agents = corrected_names self._environment.unwrapped.agents = corrected_names self._environment.possible_agents = corrected_names self._environment.agents = corrected_names previous_names = list(self.observation_spaces.keys()) for corrected_name, prev_name in zip(corrected_names, previous_names): self.observation_spaces[corrected_name] = self.observation_spaces[ prev_name ] self.action_spaces[corrected_name] = self.action_spaces[prev_name] self.rewards[corrected_name] = self.rewards[prev_name] self.dones[corrected_name] = self.dones[prev_name] self.infos[corrected_name] = self.infos[prev_name] del self.observation_spaces[prev_name] del self.action_spaces[prev_name] del self.rewards[prev_name] del self.dones[prev_name] del self.infos[prev_name] def observation_spec(self) -> types.Observation: observation_specs = {} for agent in self._environment.possible_agents: if isinstance(self._environment.observation_spaces[agent], gym.spaces.Dict): obs_space = copy.deepcopy( self._environment.observation_spaces[agent]["observation"] ) legal_actions_space = copy.deepcopy( self._environment.observation_spaces[agent]["action_mask"] ) else: obs_space = copy.deepcopy(self._environment.observation_spaces[agent]) legal_actions_space = copy.deepcopy( self._environment.action_spaces[agent] ) if obs_space.dtype == np.int8: obs_space.dtype = np.dtype(np.float32) if legal_actions_space.dtype == np.int8: legal_actions_space.dtype = np.dtype(np.int64) observation_specs[agent] = types.OLT( observation=_convert_to_spec(obs_space), legal_actions=_convert_to_spec(legal_actions_space), terminal=specs.Array((1,), np.float32), ) return observation_specs def action_spec(self) -> Dict[str, specs.DiscreteArray]: action_specs = {} for agent in self.possible_agents: action_specs[agent] = _convert_to_spec( self._environment.action_spaces[agent] ) return action_specs def reward_spec(self) -> Dict[str, specs.Array]: reward_specs = {} for agent in self.possible_agents: reward_specs[agent] = specs.Array((), np.float32) return reward_specs def discount_spec(self) -> Dict[str, specs.BoundedArray]: discount_specs = {} for agent in self.possible_agents: discount_specs[agent] = specs.BoundedArray( (), np.float32, minimum=0, maximum=1.0 ) return discount_specs def extra_spec(self) -> Dict[str, specs.BoundedArray]: return {} @property def agents(self) -> List: return self._environment.agents @property def possible_agents(self) -> List: return self._environment.possible_agents @property def environment(self) -> AECEnv: """Returns the wrapped environment.""" return self._environment @property def current_agent(self) -> Any: return self._environment.agent_selection @property def num_agents(self) -> int: return self._environment.num_agents def __getattr__(self, name: str) -> Any: """Expose any other attributes of the underlying environment.""" if hasattr(self.__class__, name): return self.__getattribute__(name) else: return getattr(self._environment, name) class PettingZooParallelEnvWrapper(ParallelEnvWrapper): """Environment wrapper for PettingZoo MARL environments.""" def __init__( self, environment: ParallelEnv, env_preprocess_wrappers: Optional[List] = [ # (env_preprocessor, dict_with_preprocessor_params) (black_death_v1, None), ], ): self._environment = environment self._reset_next_step = True if env_preprocess_wrappers: self._environment = apply_env_wrapper_preprocessers( self._environment, env_preprocess_wrappers ) def reset(self) -> dm_env.TimeStep: """Resets the episode.""" self._reset_next_step = False self._step_type = dm_env.StepType.FIRST discount_spec = self.discount_spec() observe = self._environment.reset() self._discounts = { agent: convert_np_type(discount_spec[agent].dtype, 1) for agent in self.possible_agents } if type(observe) == tuple: observe, env_extras = observe else: env_extras = {} observations = self._convert_observations( observe, {agent: False for agent in self.possible_agents} ) rewards_spec = self.reward_spec() rewards = { agent: convert_np_type(rewards_spec[agent].dtype, 0) for agent in self.possible_agents } return parameterized_restart(rewards, self._discounts, observations), env_extras def step(self, actions: Dict[str, np.ndarray]) -> dm_env.TimeStep: """Steps the environment.""" if self._reset_next_step: return self.reset() observations, rewards, dones, infos = self._environment.step(actions) rewards_spec = self.reward_spec() # Handle empty rewards if not rewards: rewards = { agent: convert_np_type(rewards_spec[agent].dtype, 0) for agent in self.possible_agents } else: rewards = { agent: convert_np_type(rewards_spec[agent].dtype, reward) for agent, reward in rewards.items() } if observations: observations = self._convert_observations(observations, dones) if self.env_done(): self._step_type = dm_env.StepType.LAST self._reset_next_step = True else: self._step_type = dm_env.StepType.MID return dm_env.TimeStep( observation=observations, reward=rewards, discount=self._discounts, step_type=self._step_type, ) def env_done(self) -> bool: return not self.agents # Convert PettingZoo observation so it's dm_env compatible. Also, the list # of legal actions must be converted to a legal actions mask. def _convert_observations( self, observes: Dict[str, np.ndarray], dones: Dict[str, bool] ) -> types.Observation: observations: Dict[str, types.OLT] = {} for agent, observation in observes.items(): if isinstance(observation, dict) and "action_mask" in observation: legals = observation["action_mask"] observation = observation["observation"] else: # TODO Handle legal actions better for continous envs, # maybe have min and max for each action and clip the agents actions # accordingly legals = np.ones( _convert_to_spec(self._environment.action_spaces[agent]).shape, dtype=self._environment.action_spaces[agent].dtype, ) observations[agent] = types.OLT( observation=observation, legal_actions=legals, terminal=np.asarray([dones[agent]], dtype=np.float32), ) return observations def observation_spec(self) -> types.Observation: observation_specs = {} for agent in self.possible_agents: observation_specs[agent] = types.OLT( observation=_convert_to_spec( self._environment.observation_spaces[agent] ), legal_actions=_convert_to_spec(self._environment.action_spaces[agent]), terminal=specs.Array((1,), np.float32), ) return observation_specs def action_spec(self) -> Dict[str, Union[specs.DiscreteArray, specs.BoundedArray]]: action_specs = {} action_spaces = self._environment.action_spaces for agent in self.possible_agents: action_specs[agent] = _convert_to_spec(action_spaces[agent]) return action_specs def reward_spec(self) -> Dict[str, specs.Array]: reward_specs = {} for agent in self.possible_agents: reward_specs[agent] = specs.Array((), np.float32) return reward_specs def discount_spec(self) -> Dict[str, specs.BoundedArray]: discount_specs = {} for agent in self.possible_agents: discount_specs[agent] = specs.BoundedArray( (), np.float32, minimum=0, maximum=1.0 ) return discount_specs def extra_spec(self) -> Dict[str, specs.BoundedArray]: return {} @property def agents(self) -> List: return self._environment.agents @property def possible_agents(self) -> List: return self._environment.possible_agents @property def environment(self) -> ParallelEnv: """Returns the wrapped environment.""" return self._environment @property def current_agent(self) -> Any: return self._environment.agent_selection def __getattr__(self, name: str) -> Any: """Expose any other attributes of the underlying environment.""" if hasattr(self.__class__, name): return self.__getattribute__(name) else: return getattr(self._environment, name)
[ "copy.deepcopy", "acme.specs.BoundedArray", "dm_env.TimeStep", "numpy.asarray", "mava.utils.wrapper_utils.apply_env_wrapper_preprocessers", "acme.specs.Array", "mava.utils.wrapper_utils.convert_np_type", "acme.wrappers.gym_wrapper._convert_to_spec", "mava.utils.wrapper_utils.parameterized_restart", ...
[((2552, 2610), 'mava.utils.wrapper_utils.parameterized_restart', 'parameterized_restart', (['reward', 'self._discount', 'observation'], {}), '(reward, self._discount, observation)\n', (2573, 2610), False, 'from mava.utils.wrapper_utils import apply_env_wrapper_preprocessers, convert_np_type, parameterized_restart\n'), ((3779, 3885), 'dm_env.TimeStep', 'dm_env.TimeStep', ([], {'observation': 'observation', 'reward': 'reward', 'discount': 'self._discount', 'step_type': 'step_type'}), '(observation=observation, reward=reward, discount=self.\n _discount, step_type=step_type)\n', (3794, 3885), False, 'import dm_env\n'), ((12037, 12152), 'dm_env.TimeStep', 'dm_env.TimeStep', ([], {'observation': 'observations', 'reward': 'rewards', 'discount': 'self._discounts', 'step_type': 'self._step_type'}), '(observation=observations, reward=rewards, discount=self.\n _discounts, step_type=self._step_type)\n', (12052, 12152), False, 'import dm_env\n'), ((1702, 1777), 'mava.utils.wrapper_utils.apply_env_wrapper_preprocessers', 'apply_env_wrapper_preprocessers', (['self._environment', 'env_preprocess_wrappers'], {}), '(self._environment, env_preprocess_wrappers)\n', (1733, 1777), False, 'from mava.utils.wrapper_utils import apply_env_wrapper_preprocessers, convert_np_type, parameterized_restart\n'), ((8026, 8082), 'acme.wrappers.gym_wrapper._convert_to_spec', '_convert_to_spec', (['self._environment.action_spaces[agent]'], {}), '(self._environment.action_spaces[agent])\n', (8042, 8082), False, 'from acme.wrappers.gym_wrapper import _convert_to_spec\n'), ((8298, 8325), 'acme.specs.Array', 'specs.Array', (['()', 'np.float32'], {}), '((), np.float32)\n', (8309, 8325), False, 'from acme import specs\n'), ((8525, 8583), 'acme.specs.BoundedArray', 'specs.BoundedArray', (['()', 'np.float32'], {'minimum': '(0)', 'maximum': '(1.0)'}), '((), np.float32, minimum=0, maximum=1.0)\n', (8543, 8583), False, 'from acme import specs\n'), ((10013, 10088), 'mava.utils.wrapper_utils.apply_env_wrapper_preprocessers', 'apply_env_wrapper_preprocessers', (['self._environment', 'env_preprocess_wrappers'], {}), '(self._environment, env_preprocess_wrappers)\n', (10044, 10088), False, 'from mava.utils.wrapper_utils import apply_env_wrapper_preprocessers, convert_np_type, parameterized_restart\n'), ((10417, 10463), 'mava.utils.wrapper_utils.convert_np_type', 'convert_np_type', (['discount_spec[agent].dtype', '(1)'], {}), '(discount_spec[agent].dtype, 1)\n', (10432, 10463), False, 'from mava.utils.wrapper_utils import apply_env_wrapper_preprocessers, convert_np_type, parameterized_restart\n'), ((10853, 10898), 'mava.utils.wrapper_utils.convert_np_type', 'convert_np_type', (['rewards_spec[agent].dtype', '(0)'], {}), '(rewards_spec[agent].dtype, 0)\n', (10868, 10898), False, 'from mava.utils.wrapper_utils import apply_env_wrapper_preprocessers, convert_np_type, parameterized_restart\n'), ((10971, 11032), 'mava.utils.wrapper_utils.parameterized_restart', 'parameterized_restart', (['rewards', 'self._discounts', 'observations'], {}), '(rewards, self._discounts, observations)\n', (10992, 11032), False, 'from mava.utils.wrapper_utils import apply_env_wrapper_preprocessers, convert_np_type, parameterized_restart\n'), ((14233, 14271), 'acme.wrappers.gym_wrapper._convert_to_spec', '_convert_to_spec', (['action_spaces[agent]'], {}), '(action_spaces[agent])\n', (14249, 14271), False, 'from acme.wrappers.gym_wrapper import _convert_to_spec\n'), ((14457, 14484), 'acme.specs.Array', 'specs.Array', (['()', 'np.float32'], {}), '((), np.float32)\n', (14468, 14484), False, 'from acme import specs\n'), ((14684, 14742), 'acme.specs.BoundedArray', 'specs.BoundedArray', (['()', 'np.float32'], {'minimum': '(0)', 'maximum': '(1.0)'}), '((), np.float32, minimum=0, maximum=1.0)\n', (14702, 14742), False, 'from acme import specs\n'), ((5283, 5319), 'numpy.asarray', 'np.asarray', (['[done]'], {'dtype': 'np.float32'}), '([done], dtype=np.float32)\n', (5293, 5319), True, 'import numpy as np\n'), ((6871, 6944), 'copy.deepcopy', 'copy.deepcopy', (["self._environment.observation_spaces[agent]['observation']"], {}), "(self._environment.observation_spaces[agent]['observation'])\n", (6884, 6944), False, 'import copy\n'), ((7021, 7094), 'copy.deepcopy', 'copy.deepcopy', (["self._environment.observation_spaces[agent]['action_mask']"], {}), "(self._environment.observation_spaces[agent]['action_mask'])\n", (7034, 7094), False, 'import copy\n'), ((7179, 7237), 'copy.deepcopy', 'copy.deepcopy', (['self._environment.observation_spaces[agent]'], {}), '(self._environment.observation_spaces[agent])\n', (7192, 7237), False, 'import copy\n'), ((7276, 7329), 'copy.deepcopy', 'copy.deepcopy', (['self._environment.action_spaces[agent]'], {}), '(self._environment.action_spaces[agent])\n', (7289, 7329), False, 'import copy\n'), ((7445, 7465), 'numpy.dtype', 'np.dtype', (['np.float32'], {}), '(np.float32)\n', (7453, 7465), True, 'import numpy as np\n'), ((7563, 7581), 'numpy.dtype', 'np.dtype', (['np.int64'], {}), '(np.int64)\n', (7571, 7581), True, 'import numpy as np\n'), ((11446, 11491), 'mava.utils.wrapper_utils.convert_np_type', 'convert_np_type', (['rewards_spec[agent].dtype', '(0)'], {}), '(rewards_spec[agent].dtype, 0)\n', (11461, 11491), False, 'from mava.utils.wrapper_utils import apply_env_wrapper_preprocessers, convert_np_type, parameterized_restart\n'), ((11617, 11667), 'mava.utils.wrapper_utils.convert_np_type', 'convert_np_type', (['rewards_spec[agent].dtype', 'reward'], {}), '(rewards_spec[agent].dtype, reward)\n', (11632, 11667), False, 'from mava.utils.wrapper_utils import apply_env_wrapper_preprocessers, convert_np_type, parameterized_restart\n'), ((4710, 4766), 'acme.wrappers.gym_wrapper._convert_to_spec', '_convert_to_spec', (['self._environment.action_spaces[agent]'], {}), '(self._environment.action_spaces[agent])\n', (4726, 4766), False, 'from acme.wrappers.gym_wrapper import _convert_to_spec\n'), ((4957, 4977), 'numpy.dtype', 'np.dtype', (['np.float32'], {}), '(np.float32)\n', (4965, 4977), True, 'import numpy as np\n'), ((5125, 5143), 'numpy.dtype', 'np.dtype', (['np.int64'], {}), '(np.int64)\n', (5133, 5143), True, 'import numpy as np\n'), ((7660, 7687), 'acme.wrappers.gym_wrapper._convert_to_spec', '_convert_to_spec', (['obs_space'], {}), '(obs_space)\n', (7676, 7687), False, 'from acme.wrappers.gym_wrapper import _convert_to_spec\n'), ((7719, 7756), 'acme.wrappers.gym_wrapper._convert_to_spec', '_convert_to_spec', (['legal_actions_space'], {}), '(legal_actions_space)\n', (7735, 7756), False, 'from acme.wrappers.gym_wrapper import _convert_to_spec\n'), ((7783, 7812), 'acme.specs.Array', 'specs.Array', (['(1,)', 'np.float32'], {}), '((1,), np.float32)\n', (7794, 7812), False, 'from acme import specs\n'), ((13398, 13442), 'numpy.asarray', 'np.asarray', (['[dones[agent]]'], {'dtype': 'np.float32'}), '([dones[agent]], dtype=np.float32)\n', (13408, 13442), True, 'import numpy as np\n'), ((13693, 13754), 'acme.wrappers.gym_wrapper._convert_to_spec', '_convert_to_spec', (['self._environment.observation_spaces[agent]'], {}), '(self._environment.observation_spaces[agent])\n', (13709, 13754), False, 'from acme.wrappers.gym_wrapper import _convert_to_spec\n'), ((13824, 13880), 'acme.wrappers.gym_wrapper._convert_to_spec', '_convert_to_spec', (['self._environment.action_spaces[agent]'], {}), '(self._environment.action_spaces[agent])\n', (13840, 13880), False, 'from acme.wrappers.gym_wrapper import _convert_to_spec\n'), ((13907, 13936), 'acme.specs.Array', 'specs.Array', (['(1,)', 'np.float32'], {}), '((1,), np.float32)\n', (13918, 13936), False, 'from acme import specs\n'), ((13094, 13150), 'acme.wrappers.gym_wrapper._convert_to_spec', '_convert_to_spec', (['self._environment.action_spaces[agent]'], {}), '(self._environment.action_spaces[agent])\n', (13110, 13150), False, 'from acme.wrappers.gym_wrapper import _convert_to_spec\n')]
import os import numpy as np from matplotlib import pyplot as plt from mpl_toolkits import mplot3d from mpl_toolkits.mplot3d import Axes3D from sklearn.decomposition import IncrementalPCA ## set paths inputpath = './input/' ## Toggle PCA for better visualization of clusters pca_flag = int(input("\nPerform PCA for cluster visualization?:" \ + " press 1 if YES, and 0 if NO \n")) # number of PCA components N = 3 #import data set print("\n\n") print("\nLoading data from " + os.path.dirname(os.path.realpath(__file__)) + inputpath[1:]) print("\nPlease be patient...this can take a while for large files...") print("\n\n") with open(inputpath + 'X.txt') as file: X = np.array([[float(digit) for digit in line.split()] for line in file]) # import clustering labels but check to see if files exist first if not os.path.exists(inputpath + 'kx.txt'): kx = np.ones((np.shape(X)[0],)) else: kx = np.genfromtxt(inputpath + 'kx.txt') if not os.path.exists(inputpath + 'cx.txt'): cx = np.ones((len(kx),)) else: cx = np.genfromtxt(inputpath + 'cx.txt') # create index for plotting indx = np.vstack((kx.astype(int), cx.astype(int))) # get number of clustering instances K = len(np.unique(indx[0,:])) if pca_flag: # batch size for incremental PCA batchsz = 10 # perform PCA for visualization of clusters pca = IncrementalPCA(n_components = N, batch_size = batchsz) X = pca.fit_transform(X) # main loop notdone = True while notdone: instance = input("\nWhat is the clustering instance you wish to plot? ") instance = int(instance) print('\nProcessing...\n') # project onto 3D axes plt.figure(figsize=(10,8)) ax = plt.axes(projection='3d') title = "Cluster plot: instance %d" % instance kindx = np.asarray(np.where(indx[0,:] == instance)) cindx = np.unique(indx[1,kindx]) #for i, target_name in zip(range(Nc), iris.target_names): for i in cindx: ax.scatter3D(X[kindx[indx[1,kindx] == i], 0], X[kindx[indx[1,kindx] == i], 1], X[kindx[indx[1,kindx] == i], 2], label = str(i), s = 4) #print(np.std(X[kindx[indx[1,kindx] == i],:], axis = 0)) plt.title(title + " of %d" % K) #if display_clusternum: # ax.text2D(1, 1, r'y ='+str(y[instance-1]), fontsize=10, transform=ax.transAxes) plt.legend(loc="upper left", shadow=False, scatterpoints=1) plt.show() getuserinput = input("Want to continue?: press 1 if YES," \ + " and 0 to EXIT \n\n") if(eval(getuserinput) == 0): notdone = False print('\nExiting...\n\n')
[ "os.path.exists", "numpy.shape", "numpy.unique", "numpy.where", "os.path.realpath", "matplotlib.pyplot.figure", "matplotlib.pyplot.axes", "matplotlib.pyplot.title", "sklearn.decomposition.IncrementalPCA", "numpy.genfromtxt", "matplotlib.pyplot.legend", "matplotlib.pyplot.show" ]
[((935, 971), 'os.path.exists', 'os.path.exists', (["(inputpath + 'kx.txt')"], {}), "(inputpath + 'kx.txt')\n", (949, 971), False, 'import os\n'), ((1027, 1062), 'numpy.genfromtxt', 'np.genfromtxt', (["(inputpath + 'kx.txt')"], {}), "(inputpath + 'kx.txt')\n", (1040, 1062), True, 'import numpy as np\n'), ((1071, 1107), 'os.path.exists', 'os.path.exists', (["(inputpath + 'cx.txt')"], {}), "(inputpath + 'cx.txt')\n", (1085, 1107), False, 'import os\n'), ((1156, 1191), 'numpy.genfromtxt', 'np.genfromtxt', (["(inputpath + 'cx.txt')"], {}), "(inputpath + 'cx.txt')\n", (1169, 1191), True, 'import numpy as np\n'), ((1324, 1345), 'numpy.unique', 'np.unique', (['indx[0, :]'], {}), '(indx[0, :])\n', (1333, 1345), True, 'import numpy as np\n'), ((1478, 1528), 'sklearn.decomposition.IncrementalPCA', 'IncrementalPCA', ([], {'n_components': 'N', 'batch_size': 'batchsz'}), '(n_components=N, batch_size=batchsz)\n', (1492, 1528), False, 'from sklearn.decomposition import IncrementalPCA\n'), ((1783, 1810), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 8)'}), '(figsize=(10, 8))\n', (1793, 1810), True, 'from matplotlib import pyplot as plt\n'), ((1820, 1845), 'matplotlib.pyplot.axes', 'plt.axes', ([], {'projection': '"""3d"""'}), "(projection='3d')\n", (1828, 1845), True, 'from matplotlib import pyplot as plt\n'), ((1968, 1993), 'numpy.unique', 'np.unique', (['indx[1, kindx]'], {}), '(indx[1, kindx])\n', (1977, 1993), True, 'import numpy as np\n'), ((2379, 2410), 'matplotlib.pyplot.title', 'plt.title', (["(title + ' of %d' % K)"], {}), "(title + ' of %d' % K)\n", (2388, 2410), True, 'from matplotlib import pyplot as plt\n'), ((2535, 2594), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper left"""', 'shadow': '(False)', 'scatterpoints': '(1)'}), "(loc='upper left', shadow=False, scatterpoints=1)\n", (2545, 2594), True, 'from matplotlib import pyplot as plt\n'), ((2600, 2610), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2608, 2610), True, 'from matplotlib import pyplot as plt\n'), ((1922, 1954), 'numpy.where', 'np.where', (['(indx[0, :] == instance)'], {}), '(indx[0, :] == instance)\n', (1930, 1954), True, 'import numpy as np\n'), ((541, 567), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (557, 567), False, 'import os\n'), ((992, 1003), 'numpy.shape', 'np.shape', (['X'], {}), '(X)\n', (1000, 1003), True, 'import numpy as np\n')]
import unittest from database import Database from search_database import SearchDatabase class SearchDatabaseTestCase(unittest.TestCase): def test_update(self): database = Database('purplePolitics', 'events') events = database.get_events(False) search_database = SearchDatabase() search_database.clear() search_database.update(events) if __name__ == '__main__': unittest.main()
[ "unittest.main", "search_database.SearchDatabase", "database.Database" ]
[((415, 430), 'unittest.main', 'unittest.main', ([], {}), '()\n', (428, 430), False, 'import unittest\n'), ((187, 223), 'database.Database', 'Database', (['"""purplePolitics"""', '"""events"""'], {}), "('purplePolitics', 'events')\n", (195, 223), False, 'from database import Database\n'), ((294, 310), 'search_database.SearchDatabase', 'SearchDatabase', ([], {}), '()\n', (308, 310), False, 'from search_database import SearchDatabase\n')]
from sys import stdin def main_s(): print("---------------------------------------------------------------------") print("Bienvenido al simulador de Pac-Man") print("OPCIONES:") print("1. Aleatorio") print("2. Manual") val = str(stdin.readline().strip()) print("---------------------------------------------------------------------") if val == "2": from Español.No_aleatorio import no_aleatorio no_aleatorio() elif val == "1": from Español.aleatorio import aleatorio aleatorio() main_s()
[ "sys.stdin.readline", "Español.No_aleatorio.no_aleatorio", "Español.aleatorio.aleatorio" ]
[((445, 459), 'Español.No_aleatorio.no_aleatorio', 'no_aleatorio', ([], {}), '()\n', (457, 459), False, 'from Español.No_aleatorio import no_aleatorio\n'), ((537, 548), 'Español.aleatorio.aleatorio', 'aleatorio', ([], {}), '()\n', (546, 548), False, 'from Español.aleatorio import aleatorio\n'), ((255, 271), 'sys.stdin.readline', 'stdin.readline', ([], {}), '()\n', (269, 271), False, 'from sys import stdin\n')]
import discord from bs4 import BeautifulSoup import asyncio import datetime from io import BytesIO class Archiver: def __init__(self, bot): self.bot = bot async def on_message(self, message): await self.archive_message(message) async def on_message_edit(self, before, after): await self.archive_message(after, True, before) async def archive_message(self, message, edit: bool = False, before=None): # if message not in whitelisted server if message.guild not in (self.bot.whitelisted_servers if self.bot.whitelisted_servers else []): # (or bot still booting up) return # if the message is in an archive channel, ignore if(await self.is_in_archive_channel(message)): return # sending an message with an embed somehow triggers on_message_edit - this should prevent double messages if(len(message.embeds) > 0 and edit): return # generate archive channel name archive_channel_name = f"ar-{message.channel.name}" # if generated channel doesn't exist in either server, default to ar-other if(discord.utils.get(self.bot.backup_server.channels, name=archive_channel_name) == None or discord.utils.get(self.bot.main_server.channels, name=archive_channel_name) == None): archive_channel_name = "ar-other" # get channels for both main and backup servers mainserver_archive_channel = discord.utils.get(self.bot.main_server.channels, name=archive_channel_name) backupserver_archive_channel = discord.utils.get(self.bot.backup_server.channels, name=archive_channel_name) text = None # if the message has an embed anyway (likely bot), send that instead if(len(message.embeds) > 0): archive_embed = message.embeds[0] text = f"{message.author.name}#{message.author.discriminator} sent the following embed at {self.format_time(message.created_at)}:" # else send the message in an embed else: if(len(message.clean_content) > 0): archive_embed = await self.generate_archive_embed(message, edit, before) # if message is empty (likely attachment only), send no embed else: archive_embed = None # if message text is empty, add extra message if(len(message.attachments) > 0): if(len(message.clean_content) == 0): text = f"{message.author.name}#{message.author.discriminator} sent the following attachment(s) at {self.format_time(message.created_at)}:" else: text = "\n---\nAttachments:" # send messages to both servers await mainserver_archive_channel.send(content=text, embed=archive_embed, files=(await self.filify(message.attachments) if message.attachments else None)) await backupserver_archive_channel.send(content=text, embed=archive_embed, files=(await self.filify(message.attachments) if message.attachments else None)) # checks if a message is sent in an archive channel async def is_in_archive_channel(self, message): if(message.channel.name.startswith("ar-")): return True return False async def generate_archive_embed(self, message, edit: bool = False, before = None): embed = discord.Embed() embed.set_author(name=f"{message.author.name}#{message.author.discriminator}", icon_url=message.author.avatar_url) footer_text = f"#{message.channel.name} | Sent: {self.format_time(message.created_at)}" if(edit): embed.title = "Edited message" embed.add_field( name="Before", value=before.clean_content ) embed.add_field( name="After", value=message.clean_content ) footer_text += f" | Edited: {self.format_time(message.edited_at)}" else: embed.add_field( name="Message", value=message.clean_content ) if(len(message.attachments) > 0): embed.add_field( name="Attachments included", value="See above" ) embed.set_footer(text=footer_text) return embed def format_time(self, time): return time.strftime("%Y-%m-%d %H:%M:%S") @staticmethod async def filify(attachments): """Converts attachments of an instance of `discord.Message` to a list of instances of `discord.File`.""" ret = [] for file in attachments: byte = BytesIO() await file.save(byte) byte.seek(0) ret.append(discord.File(byte, filename=file.filename)) return ret def setup(bot): bot.add_cog(Archiver(bot))
[ "discord.utils.get", "discord.Embed", "io.BytesIO", "discord.File" ]
[((1493, 1568), 'discord.utils.get', 'discord.utils.get', (['self.bot.main_server.channels'], {'name': 'archive_channel_name'}), '(self.bot.main_server.channels, name=archive_channel_name)\n', (1510, 1568), False, 'import discord\n'), ((1608, 1685), 'discord.utils.get', 'discord.utils.get', (['self.bot.backup_server.channels'], {'name': 'archive_channel_name'}), '(self.bot.backup_server.channels, name=archive_channel_name)\n', (1625, 1685), False, 'import discord\n'), ((3386, 3401), 'discord.Embed', 'discord.Embed', ([], {}), '()\n', (3399, 3401), False, 'import discord\n'), ((4699, 4708), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (4706, 4708), False, 'from io import BytesIO\n'), ((1178, 1255), 'discord.utils.get', 'discord.utils.get', (['self.bot.backup_server.channels'], {'name': 'archive_channel_name'}), '(self.bot.backup_server.channels, name=archive_channel_name)\n', (1195, 1255), False, 'import discord\n'), ((1267, 1342), 'discord.utils.get', 'discord.utils.get', (['self.bot.main_server.channels'], {'name': 'archive_channel_name'}), '(self.bot.main_server.channels, name=archive_channel_name)\n', (1284, 1342), False, 'import discord\n'), ((4791, 4833), 'discord.File', 'discord.File', (['byte'], {'filename': 'file.filename'}), '(byte, filename=file.filename)\n', (4803, 4833), False, 'import discord\n')]
# Copyright (c) 2012-2018 SoftBank Robotics. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the COPYING file. from qisrc.test.conftest import git_server, qisrc_action # pylint: disable=unused-import # pylint: disable=redefined-outer-name def test_info(qibuild_action, qisrc_action, git_server, record_messages): git_server.create_repo("foo.git") qisrc_action("init", git_server.manifest_url) foo_project = qibuild_action.create_project("foo") record_messages.reset() qibuild_action("info", "foo") assert record_messages.find("src: foo") assert record_messages.find("repo: foo.git") qibuild_action.chdir(foo_project.path) record_messages.reset() qibuild_action("info") assert record_messages.find("Build project: foo")
[ "qisrc.test.conftest.qisrc_action", "qisrc.test.conftest.git_server.create_repo" ]
[((379, 412), 'qisrc.test.conftest.git_server.create_repo', 'git_server.create_repo', (['"""foo.git"""'], {}), "('foo.git')\n", (401, 412), False, 'from qisrc.test.conftest import git_server, qisrc_action\n'), ((417, 462), 'qisrc.test.conftest.qisrc_action', 'qisrc_action', (['"""init"""', 'git_server.manifest_url'], {}), "('init', git_server.manifest_url)\n", (429, 462), False, 'from qisrc.test.conftest import git_server, qisrc_action\n')]
import sqlite3 mem_db = sqlite3.connect(':memory:') db = sqlite3.connect('db/mydb') cr = db.cursor() """ Syntax or Purpose ------ --------- ---------------------- ------------ SELECT DISTINCT Exclude duplicate records for fields selected. SELECT DISTINCT CASE WHEN expression Conditional expression SELECT DISTINCT THEN expression SELECT DISTINCT ELSE expression END SELECT FROM table_name Multiple table names are separated by commas. SELECT WHERE Row level filtering SELECT WHERE expression AND expression SELECT WHERE expression OR expression SELECT WHERE IN Comma delimited list enclosed in parenthesis SELECT WHERE NOT IN Comma delimited list enclosed in parenthesis SELECT WHERE BETWEEN Select records within the specified numeric range SELECT WHERE NOT BETWEEN Select records outside of the specified numeric range SELECT WHERE LIKE String with wildcard (%) enclosed in parenthesis SELECT WHERE NOT LIKE String with wildcard (%) enclosed in parenthesis SELECT GROUP BY SELECT HAVING ORDER BY Sorting of the output using a comma delimited list of column names SELECT HAVING LIMIT Limit the number of rows returned """ db.close()
[ "sqlite3.connect" ]
[((25, 52), 'sqlite3.connect', 'sqlite3.connect', (['""":memory:"""'], {}), "(':memory:')\n", (40, 52), False, 'import sqlite3\n'), ((58, 84), 'sqlite3.connect', 'sqlite3.connect', (['"""db/mydb"""'], {}), "('db/mydb')\n", (73, 84), False, 'import sqlite3\n')]
#!/usr/bin/python # ref: https://gist.github.com/bruienne/f81ea88253629abaf5f9 import objc import plistlib class attrdict(dict): __getattr__ = dict.__getitem__ __setattr__ = dict.__setitem__ ServerInformation = attrdict() ServerInformation_bundle = objc.loadBundle('ServerInformation', ServerInformation, \ bundle_path='/System/Library/PrivateFrameworks/ServerInformation.framework') platformsupport = plistlib.readPlist('/System/Library/CoreServices/PlatformSupport.plist') # disabledsystems = platformsupport.get('SupportedBoardIds') # print('------------------------------------------------------------\n%i Board IDs in list\n------------------------------------------------------------\n' % len(disabledsystems)) unmatchedboardids = [] for system in disabledsystems: for modelid in ServerInformation.ServerInformationComputerModelInfo.modelPropertiesForBoardIDs_([system]): if system not in modelid: print('Board ID: %s = System ID: %s' % (system, modelid)) else: unmatchedboardids.append(system) if len(unmatchedboardids) > 0: print('------------------------------------------------------------') for boardid in unmatchedboardids: print('-- No match for Board ID %s --' % boardid) print('------------------------------------------------------------\n')
[ "plistlib.readPlist", "objc.loadBundle" ]
[((260, 397), 'objc.loadBundle', 'objc.loadBundle', (['"""ServerInformation"""', 'ServerInformation'], {'bundle_path': '"""/System/Library/PrivateFrameworks/ServerInformation.framework"""'}), "('ServerInformation', ServerInformation, bundle_path=\n '/System/Library/PrivateFrameworks/ServerInformation.framework')\n", (275, 397), False, 'import objc\n'), ((418, 490), 'plistlib.readPlist', 'plistlib.readPlist', (['"""/System/Library/CoreServices/PlatformSupport.plist"""'], {}), "('/System/Library/CoreServices/PlatformSupport.plist')\n", (436, 490), False, 'import plistlib\n')]
import numpy as np import matplotlib.pyplot as plt import pickle import pandas as pd import cv2 as cv import os def read_pkl(file_path): obj = pd.read_pickle(file_path) return obj def show_results(obj , folder): for im_num in obj.keys(): data = obj[im_num] file = data['fileName'][12:] file_path = os.path.join(folder, file) im = cv.imread(file_path)#[:,:,::-1] detections = data['detections'] for detection in detections: type = detection[0] roi = detection[2] roi_tl = [roi[0] - roi[2]/2, roi[1] - roi[3]/2, roi[2], roi[3]] roi_ = np.asarray(roi_tl).astype(int) im = cv.rectangle(img=im, rec=roi_, color=(255, 0, 0), thickness=3) plt.imshow(im[:, :, ::-1]) plt.pause(0.0001) a=1 def track_objects(obj , folder): for im_num in obj.keys(): data = obj[im_num] file = data['fileName'][12:] file_path = os.path.join(folder, file) im = cv.imread(file_path)#[:,:,::-1] detections = data['detections'] for detection in detections: # type = detection[0] roi = detection[2] cen = [roi[0], roi[1]] if cen[1] > 500 and cen[0] > 500 and cen[1] < 1300: roi_tl = [roi[0] - roi[2] / 2, roi[1] - roi[3] / 2, roi[2], roi[3]] roi_ = np.asarray(roi_tl).astype(int) im = cv.rectangle(img=im, rec=roi_, color=(0, 0, 255), thickness=3) continue out_path = os.path.join(folder, 'warning', file) cv.imwrite(out_path, im) a=1 if __name__ == '__main__': folder = r'E:\rafi\got_your_back\data\results_files\res\temp_dir - Copy (9)' file_path = os.path.join(folder, r"YoloV3_res\res_pkl.pkl") obj = read_pkl(file_path) track_objects(obj, folder) # show_results(obj, folder)
[ "pandas.read_pickle", "cv2.imwrite", "cv2.rectangle", "matplotlib.pyplot.imshow", "os.path.join", "numpy.asarray", "matplotlib.pyplot.pause", "cv2.imread" ]
[((149, 174), 'pandas.read_pickle', 'pd.read_pickle', (['file_path'], {}), '(file_path)\n', (163, 174), True, 'import pandas as pd\n'), ((1781, 1828), 'os.path.join', 'os.path.join', (['folder', '"""YoloV3_res\\\\res_pkl.pkl"""'], {}), "(folder, 'YoloV3_res\\\\res_pkl.pkl')\n", (1793, 1828), False, 'import os\n'), ((337, 363), 'os.path.join', 'os.path.join', (['folder', 'file'], {}), '(folder, file)\n', (349, 363), False, 'import os\n'), ((377, 397), 'cv2.imread', 'cv.imread', (['file_path'], {}), '(file_path)\n', (386, 397), True, 'import cv2 as cv\n'), ((991, 1017), 'os.path.join', 'os.path.join', (['folder', 'file'], {}), '(folder, file)\n', (1003, 1017), False, 'import os\n'), ((1031, 1051), 'cv2.imread', 'cv.imread', (['file_path'], {}), '(file_path)\n', (1040, 1051), True, 'import cv2 as cv\n'), ((1573, 1610), 'os.path.join', 'os.path.join', (['folder', '"""warning"""', 'file'], {}), "(folder, 'warning', file)\n", (1585, 1610), False, 'import os\n'), ((1619, 1643), 'cv2.imwrite', 'cv.imwrite', (['out_path', 'im'], {}), '(out_path, im)\n', (1629, 1643), True, 'import cv2 as cv\n'), ((693, 755), 'cv2.rectangle', 'cv.rectangle', ([], {'img': 'im', 'rec': 'roi_', 'color': '(255, 0, 0)', 'thickness': '(3)'}), '(img=im, rec=roi_, color=(255, 0, 0), thickness=3)\n', (705, 755), True, 'import cv2 as cv\n'), ((770, 796), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im[:, :, ::-1]'], {}), '(im[:, :, ::-1])\n', (780, 796), True, 'import matplotlib.pyplot as plt\n'), ((809, 826), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.0001)'], {}), '(0.0001)\n', (818, 826), True, 'import matplotlib.pyplot as plt\n'), ((1464, 1526), 'cv2.rectangle', 'cv.rectangle', ([], {'img': 'im', 'rec': 'roi_', 'color': '(0, 0, 255)', 'thickness': '(3)'}), '(img=im, rec=roi_, color=(0, 0, 255), thickness=3)\n', (1476, 1526), True, 'import cv2 as cv\n'), ((644, 662), 'numpy.asarray', 'np.asarray', (['roi_tl'], {}), '(roi_tl)\n', (654, 662), True, 'import numpy as np\n'), ((1412, 1430), 'numpy.asarray', 'np.asarray', (['roi_tl'], {}), '(roi_tl)\n', (1422, 1430), True, 'import numpy as np\n')]
from os import path import sqlite3 as sql ROOT = path.dirname(path.relpath(__file__)) dbname = 'database.sqlite' def create_post(user, post, parent, location, item, tags, ats, created, status): con = sql.connect(path.join(ROOT, dbname)) cur = con.cursor() cur.execute( """insert into post (user, post, parent, location, item, tags, ats, created, status) values (?,?,?,?,?,?,?,?,?) """ , (user, post, parent, location, item, tags, ats, created, status) ) con.commit() con.close() def get_posts(): con = sql.connect(path.join(ROOT, dbname)) cur = con.cursor() posts = cur.execute('select * from post') con.commit() con.close() return posts def make_db(): schema = open(path.join(ROOT,'schema.sql')).read() con = sql.connect(path.join(ROOT, dbname)) con.execute(schema) con.commit() con.close()
[ "os.path.join", "os.path.relpath" ]
[((63, 85), 'os.path.relpath', 'path.relpath', (['__file__'], {}), '(__file__)\n', (75, 85), False, 'from os import path\n'), ((218, 241), 'os.path.join', 'path.join', (['ROOT', 'dbname'], {}), '(ROOT, dbname)\n', (227, 241), False, 'from os import path\n'), ((605, 628), 'os.path.join', 'path.join', (['ROOT', 'dbname'], {}), '(ROOT, dbname)\n', (614, 628), False, 'from os import path\n'), ((843, 866), 'os.path.join', 'path.join', (['ROOT', 'dbname'], {}), '(ROOT, dbname)\n', (852, 866), False, 'from os import path\n'), ((784, 813), 'os.path.join', 'path.join', (['ROOT', '"""schema.sql"""'], {}), "(ROOT, 'schema.sql')\n", (793, 813), False, 'from os import path\n')]