hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f728cae8d089e59f933fd28ab320f6164d185da9 | 2,480 | py | Python | decoder.py | MaZhanyu007/MSDGAN | 037ad33025c29869dbc9cb233a45b8762d31179d | [
"MIT"
] | 8 | 2018-06-04T13:38:35.000Z | 2021-05-28T08:42:39.000Z | decoder.py | MaZhanyu007/MSDGAN | 037ad33025c29869dbc9cb233a45b8762d31179d | [
"MIT"
] | null | null | null | decoder.py | MaZhanyu007/MSDGAN | 037ad33025c29869dbc9cb233a45b8762d31179d | [
"MIT"
] | 2 | 2019-01-15T02:12:22.000Z | 2021-08-08T10:35:50.000Z | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import torch
import torch.nn as nn
import torch.nn.functional as F
# In[2]:
class Decoder(nn.Module):
def __init__(self, output_dim, emb_dim, enc_hid_dim, dec_hid_dim, dropout_rate, attention):
super().__init__()
self.output_dim = output_dim
self.emb_dim = emb_dim
self.enc_hid_dim = enc_hid_dim
self.dec_hid_dim = dec_hid_dim
self.dropout_rate = dropout_rate
self.attention = attention
self.embedding = nn.Embedding(output_dim, emb_dim)
self.gru = nn.GRU(enc_hid_dim + emb_dim, dec_hid_dim, batch_first=True)
self.fc = nn.Linear(enc_hid_dim + dec_hid_dim + emb_dim, output_dim)
self.dropout = nn.Dropout(dropout_rate)
def forward(self, input, hidden, encoder_outputs):
# input = [batch_size]
# hidden = [batch_size, dec_hid_dim]
# encoder_outputs = [batch_size, seq_len, enc_hid_dim * 2]
input = input.unsqueeze(1)
# input = [batch_size, 1]
embedded = self.dropout(self.embedding(input))
# embedded = [batch_size, 1, emb_dim]
a = self.attention(hidden, encoder_outputs)
# a = [batch_size, seq_len]
a = a.unsqueeze(1)
# a = [batch_size, 1, seq_len]
context = torch.bmm(a, encoder_outputs)
# context = [batch_size, 1, enc_hid_dim * 2]
gru_input = torch.cat((embedded, context), dim=2)
# gru_input = [batch_size, 1, (enc hid dim * 2) + emb dim]
output, hidden = self.gru(gru_input, hidden.unsqueeze(0))
# output = [batch_size, seq_len, dec hid dim * n directions]
# hidden = [n layers * n directions, batch size, dec hid dim]
#seq_len, n layers and n directions will always be 1 in this decoder, therefore:
#output = [batch_size, 1, dec_hid_dim]
#hidden = [1, batch_size, dec_hid_dim]
#this also means that output == hidden
#assert (output == hidden).all()
embedded = embedded.squeeze(1) #[batch_size, emb_dim]
output = output.squeeze(1) #[batch_size, dec_hid_dim * n directions]??????????
context = context.squeeze(1) #[batch_size, enc_hid_dim * 2]
output = self.fc(torch.cat((output, context, embedded), dim=1))
# output = [batch_size, output_dim]
return output, hidden.squeeze(0) | 34.929577 | 95 | 0.602016 |
import torch
import torch.nn as nn
import torch.nn.functional as F
class Decoder(nn.Module):
def __init__(self, output_dim, emb_dim, enc_hid_dim, dec_hid_dim, dropout_rate, attention):
super().__init__()
self.output_dim = output_dim
self.emb_dim = emb_dim
self.enc_hid_dim = enc_hid_dim
self.dec_hid_dim = dec_hid_dim
self.dropout_rate = dropout_rate
self.attention = attention
self.embedding = nn.Embedding(output_dim, emb_dim)
self.gru = nn.GRU(enc_hid_dim + emb_dim, dec_hid_dim, batch_first=True)
self.fc = nn.Linear(enc_hid_dim + dec_hid_dim + emb_dim, output_dim)
self.dropout = nn.Dropout(dropout_rate)
def forward(self, input, hidden, encoder_outputs):
input = input.unsqueeze(1)
embedded = self.dropout(self.embedding(input))
a = self.attention(hidden, encoder_outputs)
a = a.unsqueeze(1)
context = torch.bmm(a, encoder_outputs)
gru_input = torch.cat((embedded, context), dim=2)
output, hidden = self.gru(gru_input, hidden.unsqueeze(0))
embedded = embedded.squeeze(1)
output = output.squeeze(1)
context = context.squeeze(1)
output = self.fc(torch.cat((output, context, embedded), dim=1))
return output, hidden.squeeze(0) | true | true |
f728cbd493181dd21158048ab66c126c7e93eb52 | 271 | py | Python | tests/artificial/transf_Difference/trend_Lag1Trend/cycle_30/ar_/test_artificial_128_Difference_Lag1Trend_30__20.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | tests/artificial/transf_Difference/trend_Lag1Trend/cycle_30/ar_/test_artificial_128_Difference_Lag1Trend_30__20.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | 1 | 2019-11-30T23:39:38.000Z | 2019-12-01T04:34:35.000Z | tests/artificial/transf_Difference/trend_Lag1Trend/cycle_30/ar_/test_artificial_128_Difference_Lag1Trend_30__20.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 30, transform = "Difference", sigma = 0.0, exog_count = 20, ar_order = 0); | 38.714286 | 166 | 0.734317 | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 30, transform = "Difference", sigma = 0.0, exog_count = 20, ar_order = 0); | true | true |
f728cbe29fe970f9a50eb70f3297589d9896dc90 | 18,663 | py | Python | corehq/apps/accounting/migrations/0005_auto__chg_field_invoice_date_created__chg_field_featurerate_date_creat.py | dslowikowski/commcare-hq | ad8885cf8dab69dc85cb64f37aeaf06106124797 | [
"BSD-3-Clause"
] | 1 | 2015-02-10T23:26:39.000Z | 2015-02-10T23:26:39.000Z | corehq/apps/accounting/migrations/0005_auto__chg_field_invoice_date_created__chg_field_featurerate_date_creat.py | SEL-Columbia/commcare-hq | 992ee34a679c37f063f86200e6df5a197d5e3ff6 | [
"BSD-3-Clause"
] | null | null | null | corehq/apps/accounting/migrations/0005_auto__chg_field_invoice_date_created__chg_field_featurerate_date_creat.py | SEL-Columbia/commcare-hq | 992ee34a679c37f063f86200e6df5a197d5e3ff6 | [
"BSD-3-Clause"
] | null | null | null | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Invoice.date_created'
db.alter_column(u'accounting_invoice', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
# Changing field 'FeatureRate.date_created'
db.alter_column(u'accounting_featurerate', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
# Changing field 'CreditLine.date_created'
db.alter_column(u'accounting_creditline', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
# Changing field 'BillingAccount.date_created'
db.alter_column(u'accounting_billingaccount', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
# Changing field 'CreditAdjustment.date_created'
db.alter_column(u'accounting_creditadjustment', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
# Changing field 'SoftwarePlanVersion.date_created'
db.alter_column(u'accounting_softwareplanversion', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
# Changing field 'SoftwareProductRate.date_created'
db.alter_column(u'accounting_softwareproductrate', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
# Changing field 'Subscription.date_created'
db.alter_column(u'accounting_subscription', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
def backwards(self, orm):
# Changing field 'Invoice.date_created'
db.alter_column(u'accounting_invoice', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
# Changing field 'FeatureRate.date_created'
db.alter_column(u'accounting_featurerate', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
# Changing field 'CreditLine.date_created'
db.alter_column(u'accounting_creditline', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
# Changing field 'BillingAccount.date_created'
db.alter_column(u'accounting_billingaccount', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
# Changing field 'CreditAdjustment.date_created'
db.alter_column(u'accounting_creditadjustment', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
# Changing field 'SoftwarePlanVersion.date_created'
db.alter_column(u'accounting_softwareplanversion', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
# Changing field 'SoftwareProductRate.date_created'
db.alter_column(u'accounting_softwareproductrate', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
# Changing field 'Subscription.date_created'
db.alter_column(u'accounting_subscription', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
models = {
u'accounting.billingaccount': {
'Meta': {'object_name': 'BillingAccount'},
'account_type': ('django.db.models.fields.CharField', [], {'default': "'CONTRACT'", 'max_length': '25'}),
'billing_admins': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['accounting.BillingAccountAdmin']", 'null': 'True', 'symmetrical': 'False'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'currency': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Currency']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auto_invoiceable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'salesforce_account_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'})
},
u'accounting.billingaccountadmin': {
'Meta': {'object_name': 'BillingAccountAdmin'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'web_user': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80', 'db_index': 'True'})
},
u'accounting.billingcontactinfo': {
'Meta': {'object_name': 'BillingContactInfo'},
'account': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['accounting.BillingAccount']", 'unique': 'True', 'primary_key': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'emails': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'first_line': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'second_line': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'state_province_region': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'accounting.billingrecord': {
'Meta': {'object_name': 'BillingRecord'},
'date_emailed': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'emailed_to': ('django.db.models.fields.CharField', [], {'max_length': '254', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Invoice']"}),
'pdf_data_id': ('django.db.models.fields.CharField', [], {'max_length': '48'})
},
u'accounting.creditadjustment': {
'Meta': {'object_name': 'CreditAdjustment'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'credit_line': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.CreditLine']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Invoice']", 'null': 'True'}),
'line_item': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.LineItem']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'reason': ('django.db.models.fields.CharField', [], {'default': "'MANUAL'", 'max_length': '25'}),
'web_user': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True'})
},
u'accounting.creditline': {
'Meta': {'object_name': 'CreditLine'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.BillingAccount']"}),
'balance': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feature_rate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.FeatureRate']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product_rate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwareProductRate']", 'null': 'True', 'blank': 'True'}),
'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Subscription']", 'null': 'True', 'blank': 'True'})
},
u'accounting.currency': {
'Meta': {'object_name': 'Currency'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '3'}),
'date_updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'db_index': 'True'}),
'rate_to_default': ('django.db.models.fields.DecimalField', [], {'default': '1.0', 'max_digits': '20', 'decimal_places': '9'}),
'symbol': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
u'accounting.defaultproductplan': {
'Meta': {'object_name': 'DefaultProductPlan'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwarePlan']"}),
'product_type': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'})
},
u'accounting.feature': {
'Meta': {'object_name': 'Feature'},
'feature_type': ('django.db.models.fields.CharField', [], {'max_length': '10', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'})
},
u'accounting.featurerate': {
'Meta': {'object_name': 'FeatureRate'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feature': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Feature']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'monthly_fee': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '10', 'decimal_places': '2'}),
'monthly_limit': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'per_excess_fee': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '10', 'decimal_places': '2'})
},
u'accounting.invoice': {
'Meta': {'object_name': 'Invoice'},
'balance': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_due': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'date_end': ('django.db.models.fields.DateField', [], {}),
'date_paid': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_received': ('django.db.models.fields.DateField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Subscription']"}),
'tax_rate': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'})
},
u'accounting.lineitem': {
'Meta': {'object_name': 'LineItem'},
'base_cost': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'base_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'feature_rate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.FeatureRate']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Invoice']"}),
'product_rate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwareProductRate']", 'null': 'True'}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'unit_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'accounting.softwareplan': {
'Meta': {'object_name': 'SoftwarePlan'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'edition': ('django.db.models.fields.CharField', [], {'default': "'Enterprise'", 'max_length': '25'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'visibility': ('django.db.models.fields.CharField', [], {'default': "'INTERNAL'", 'max_length': '10'})
},
u'accounting.softwareplanversion': {
'Meta': {'object_name': 'SoftwarePlanVersion'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feature_rates': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['accounting.FeatureRate']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwarePlan']"}),
'product_rates': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['accounting.SoftwareProductRate']", 'symmetrical': 'False', 'blank': 'True'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_prbac.Role']"})
},
u'accounting.softwareproduct': {
'Meta': {'object_name': 'SoftwareProduct'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'product_type': ('django.db.models.fields.CharField', [], {'max_length': '25', 'db_index': 'True'})
},
u'accounting.softwareproductrate': {
'Meta': {'object_name': 'SoftwareProductRate'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'monthly_fee': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '10', 'decimal_places': '2'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwareProduct']"})
},
u'accounting.subscriber': {
'Meta': {'object_name': 'Subscriber'},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'db_index': 'True'})
},
u'accounting.subscription': {
'Meta': {'object_name': 'Subscription'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.BillingAccount']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_delay_invoicing': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'plan_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwarePlanVersion']"}),
'salesforce_contract_id': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True', 'blank': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Subscriber']"})
},
u'django_prbac.role': {
'Meta': {'object_name': 'Role'},
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'parameters': ('django_prbac.fields.StringSetField', [], {'default': '[]', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256'})
}
}
complete_apps = ['accounting']
| 76.487705 | 176 | 0.595778 |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.alter_column(u'accounting_invoice', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
db.alter_column(u'accounting_featurerate', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
db.alter_column(u'accounting_creditline', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
db.alter_column(u'accounting_billingaccount', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
db.alter_column(u'accounting_creditadjustment', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
db.alter_column(u'accounting_softwareplanversion', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
db.alter_column(u'accounting_softwareproductrate', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
db.alter_column(u'accounting_subscription', 'date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True))
def backwards(self, orm):
db.alter_column(u'accounting_invoice', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
db.alter_column(u'accounting_featurerate', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
db.alter_column(u'accounting_creditline', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
db.alter_column(u'accounting_billingaccount', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
db.alter_column(u'accounting_creditadjustment', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
db.alter_column(u'accounting_softwareplanversion', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
db.alter_column(u'accounting_softwareproductrate', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
db.alter_column(u'accounting_subscription', 'date_created', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
models = {
u'accounting.billingaccount': {
'Meta': {'object_name': 'BillingAccount'},
'account_type': ('django.db.models.fields.CharField', [], {'default': "'CONTRACT'", 'max_length': '25'}),
'billing_admins': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['accounting.BillingAccountAdmin']", 'null': 'True', 'symmetrical': 'False'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'currency': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Currency']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auto_invoiceable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'salesforce_account_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'})
},
u'accounting.billingaccountadmin': {
'Meta': {'object_name': 'BillingAccountAdmin'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'web_user': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80', 'db_index': 'True'})
},
u'accounting.billingcontactinfo': {
'Meta': {'object_name': 'BillingContactInfo'},
'account': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['accounting.BillingAccount']", 'unique': 'True', 'primary_key': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'emails': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'first_line': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'second_line': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'state_province_region': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'accounting.billingrecord': {
'Meta': {'object_name': 'BillingRecord'},
'date_emailed': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'emailed_to': ('django.db.models.fields.CharField', [], {'max_length': '254', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Invoice']"}),
'pdf_data_id': ('django.db.models.fields.CharField', [], {'max_length': '48'})
},
u'accounting.creditadjustment': {
'Meta': {'object_name': 'CreditAdjustment'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'credit_line': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.CreditLine']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Invoice']", 'null': 'True'}),
'line_item': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.LineItem']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'reason': ('django.db.models.fields.CharField', [], {'default': "'MANUAL'", 'max_length': '25'}),
'web_user': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True'})
},
u'accounting.creditline': {
'Meta': {'object_name': 'CreditLine'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.BillingAccount']"}),
'balance': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feature_rate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.FeatureRate']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product_rate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwareProductRate']", 'null': 'True', 'blank': 'True'}),
'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Subscription']", 'null': 'True', 'blank': 'True'})
},
u'accounting.currency': {
'Meta': {'object_name': 'Currency'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '3'}),
'date_updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'db_index': 'True'}),
'rate_to_default': ('django.db.models.fields.DecimalField', [], {'default': '1.0', 'max_digits': '20', 'decimal_places': '9'}),
'symbol': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
u'accounting.defaultproductplan': {
'Meta': {'object_name': 'DefaultProductPlan'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwarePlan']"}),
'product_type': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'})
},
u'accounting.feature': {
'Meta': {'object_name': 'Feature'},
'feature_type': ('django.db.models.fields.CharField', [], {'max_length': '10', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'})
},
u'accounting.featurerate': {
'Meta': {'object_name': 'FeatureRate'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feature': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Feature']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'monthly_fee': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '10', 'decimal_places': '2'}),
'monthly_limit': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'per_excess_fee': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '10', 'decimal_places': '2'})
},
u'accounting.invoice': {
'Meta': {'object_name': 'Invoice'},
'balance': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_due': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'date_end': ('django.db.models.fields.DateField', [], {}),
'date_paid': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_received': ('django.db.models.fields.DateField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Subscription']"}),
'tax_rate': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'})
},
u'accounting.lineitem': {
'Meta': {'object_name': 'LineItem'},
'base_cost': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'base_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'feature_rate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.FeatureRate']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Invoice']"}),
'product_rate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwareProductRate']", 'null': 'True'}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'unit_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'accounting.softwareplan': {
'Meta': {'object_name': 'SoftwarePlan'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'edition': ('django.db.models.fields.CharField', [], {'default': "'Enterprise'", 'max_length': '25'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'visibility': ('django.db.models.fields.CharField', [], {'default': "'INTERNAL'", 'max_length': '10'})
},
u'accounting.softwareplanversion': {
'Meta': {'object_name': 'SoftwarePlanVersion'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feature_rates': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['accounting.FeatureRate']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwarePlan']"}),
'product_rates': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['accounting.SoftwareProductRate']", 'symmetrical': 'False', 'blank': 'True'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_prbac.Role']"})
},
u'accounting.softwareproduct': {
'Meta': {'object_name': 'SoftwareProduct'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'product_type': ('django.db.models.fields.CharField', [], {'max_length': '25', 'db_index': 'True'})
},
u'accounting.softwareproductrate': {
'Meta': {'object_name': 'SoftwareProductRate'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'monthly_fee': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '10', 'decimal_places': '2'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwareProduct']"})
},
u'accounting.subscriber': {
'Meta': {'object_name': 'Subscriber'},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'db_index': 'True'})
},
u'accounting.subscription': {
'Meta': {'object_name': 'Subscription'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.BillingAccount']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_delay_invoicing': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'plan_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwarePlanVersion']"}),
'salesforce_contract_id': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True', 'blank': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Subscriber']"})
},
u'django_prbac.role': {
'Meta': {'object_name': 'Role'},
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'parameters': ('django_prbac.fields.StringSetField', [], {'default': '[]', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256'})
}
}
complete_apps = ['accounting']
| true | true |
f728cbf9eae1c2d10f23f415ed861466f2e9d67e | 1,772 | py | Python | jobs/migrations/0011_jobreviewcomment.py | buketkonuk/pythondotorg | 4d8d9728eea7c7b2fef32eb6f24fda409cf24a06 | [
"Apache-2.0"
] | 1 | 2019-09-02T00:43:27.000Z | 2019-09-02T00:43:27.000Z | jobs/migrations/0011_jobreviewcomment.py | buketkonuk/pythondotorg | 4d8d9728eea7c7b2fef32eb6f24fda409cf24a06 | [
"Apache-2.0"
] | null | null | null | jobs/migrations/0011_jobreviewcomment.py | buketkonuk/pythondotorg | 4d8d9728eea7c7b2fef32eb6f24fda409cf24a06 | [
"Apache-2.0"
] | 1 | 2019-09-02T00:51:38.000Z | 2019-09-02T00:51:38.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import markupfield.fields
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('jobs', '0010_auto_20150416_1853'),
]
operations = [
migrations.CreateModel(
name='JobReviewComment',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('created', models.DateTimeField(blank=True, default=django.utils.timezone.now, db_index=True)),
('updated', models.DateTimeField(blank=True)),
('comment', markupfield.fields.MarkupField(rendered_field=True)),
('comment_markup_type', models.CharField(choices=[('', '--'), ('html', 'HTML'), ('plain', 'Plain'), ('markdown', 'Markdown'), ('restructuredtext', 'Restructured Text')], max_length=30, default='restructuredtext')),
('_comment_rendered', models.TextField(editable=False)),
('creator', models.ForeignKey(related_name='jobs_jobreviewcomment_creator', to=settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)),
('job', models.ForeignKey(related_name='review_comments', to='jobs.Job', on_delete=models.CASCADE)),
('last_modified_by', models.ForeignKey(related_name='jobs_jobreviewcomment_modified', to=settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
| 47.891892 | 230 | 0.645598 |
from __future__ import unicode_literals
from django.db import models, migrations
import markupfield.fields
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('jobs', '0010_auto_20150416_1853'),
]
operations = [
migrations.CreateModel(
name='JobReviewComment',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('created', models.DateTimeField(blank=True, default=django.utils.timezone.now, db_index=True)),
('updated', models.DateTimeField(blank=True)),
('comment', markupfield.fields.MarkupField(rendered_field=True)),
('comment_markup_type', models.CharField(choices=[('', '--'), ('html', 'HTML'), ('plain', 'Plain'), ('markdown', 'Markdown'), ('restructuredtext', 'Restructured Text')], max_length=30, default='restructuredtext')),
('_comment_rendered', models.TextField(editable=False)),
('creator', models.ForeignKey(related_name='jobs_jobreviewcomment_creator', to=settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)),
('job', models.ForeignKey(related_name='review_comments', to='jobs.Job', on_delete=models.CASCADE)),
('last_modified_by', models.ForeignKey(related_name='jobs_jobreviewcomment_modified', to=settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
| true | true |
f728cc0fc288f09ecb77a715285a788cdae8416c | 2,286 | py | Python | package/data/datasets/cuhk03_np_detected_jpg.py | MikeCun/PersonReID | 9c7c122212410f79edc2b5582ed75e8a727ea88a | [
"MIT"
] | 1 | 2020-08-28T05:54:39.000Z | 2020-08-28T05:54:39.000Z | package/data/datasets/cuhk03_np_detected_jpg.py | MikeCun/PersonReID | 9c7c122212410f79edc2b5582ed75e8a727ea88a | [
"MIT"
] | 6 | 2020-06-05T20:45:43.000Z | 2022-03-11T23:45:39.000Z | package/data/datasets/cuhk03_np_detected_jpg.py | MikeCun/PersonReID | 9c7c122212410f79edc2b5582ed75e8a727ea88a | [
"MIT"
] | null | null | null | import os.path as osp
from PIL import Image
import numpy as np
from scipy.misc import imsave
from tqdm import tqdm
import shutil
from ...utils.file import walkdir
from ...utils.file import may_make_dir
from .market1501 import Market1501
class CUHK03NpDetectedJpg(Market1501):
has_pap_mask = True
has_ps_label = True
png_im_root = 'cuhk03-np'
im_root = 'cuhk03-np-jpg'
split_spec = {
'train': {'pattern': '{}/detected/bounding_box_train/*.jpg'.format(im_root), 'map_label': True},
'query': {'pattern': '{}/detected/query/*.jpg'.format(im_root), 'map_label': False},
'gallery': {'pattern': '{}/detected/bounding_box_test/*.jpg'.format(im_root), 'map_label': False},
}
def __init__(self, cfg, samples=None):
self.root = osp.join(cfg.root, cfg.name)
self._save_png_as_jpg()
super(CUHK03NpDetectedJpg, self).__init__(cfg, samples=samples)
def _get_kpt_key(self, im_path):
return im_path
def _get_ps_label_path(self, im_path):
return osp.join(self.root, im_path.replace(self.im_root, self.im_root + '_ps_label').replace('.jpg', '.png'))
def _save_png_as_jpg(self):
png_im_dir = osp.join(self.root, self.png_im_root)
jpg_im_dir = osp.join(self.root, self.im_root)
assert osp.exists(png_im_dir), "The PNG image dir {} should be placed inside {}".format(png_im_dir, self.root)
png_paths = list(walkdir(png_im_dir, '.png'))
if osp.exists(jpg_im_dir):
jpg_paths = list(walkdir(jpg_im_dir, '.jpg'))
if len(png_paths) == len(jpg_paths):
print('=> Found same number of JPG images. Skip transforming PNG to JPG.')
return
else:
shutil.rmtree(jpg_im_dir)
print('=> JPG image dir exists but does not contain same number of images. So it is removed and would be re-generated.')
# CUHK03 contains 14097 detected + 14096 labeled images = 28193
for png_path in tqdm(png_paths, desc='PNG->JPG', miniters=2000, ncols=120, unit=' images'):
jpg_path = png_path.replace(self.png_im_root, self.im_root).replace('.png', '.jpg')
may_make_dir(osp.dirname(jpg_path))
imsave(jpg_path, np.array(Image.open(png_path)))
| 43.961538 | 136 | 0.654418 | import os.path as osp
from PIL import Image
import numpy as np
from scipy.misc import imsave
from tqdm import tqdm
import shutil
from ...utils.file import walkdir
from ...utils.file import may_make_dir
from .market1501 import Market1501
class CUHK03NpDetectedJpg(Market1501):
has_pap_mask = True
has_ps_label = True
png_im_root = 'cuhk03-np'
im_root = 'cuhk03-np-jpg'
split_spec = {
'train': {'pattern': '{}/detected/bounding_box_train/*.jpg'.format(im_root), 'map_label': True},
'query': {'pattern': '{}/detected/query/*.jpg'.format(im_root), 'map_label': False},
'gallery': {'pattern': '{}/detected/bounding_box_test/*.jpg'.format(im_root), 'map_label': False},
}
def __init__(self, cfg, samples=None):
self.root = osp.join(cfg.root, cfg.name)
self._save_png_as_jpg()
super(CUHK03NpDetectedJpg, self).__init__(cfg, samples=samples)
def _get_kpt_key(self, im_path):
return im_path
def _get_ps_label_path(self, im_path):
return osp.join(self.root, im_path.replace(self.im_root, self.im_root + '_ps_label').replace('.jpg', '.png'))
def _save_png_as_jpg(self):
png_im_dir = osp.join(self.root, self.png_im_root)
jpg_im_dir = osp.join(self.root, self.im_root)
assert osp.exists(png_im_dir), "The PNG image dir {} should be placed inside {}".format(png_im_dir, self.root)
png_paths = list(walkdir(png_im_dir, '.png'))
if osp.exists(jpg_im_dir):
jpg_paths = list(walkdir(jpg_im_dir, '.jpg'))
if len(png_paths) == len(jpg_paths):
print('=> Found same number of JPG images. Skip transforming PNG to JPG.')
return
else:
shutil.rmtree(jpg_im_dir)
print('=> JPG image dir exists but does not contain same number of images. So it is removed and would be re-generated.')
for png_path in tqdm(png_paths, desc='PNG->JPG', miniters=2000, ncols=120, unit=' images'):
jpg_path = png_path.replace(self.png_im_root, self.im_root).replace('.png', '.jpg')
may_make_dir(osp.dirname(jpg_path))
imsave(jpg_path, np.array(Image.open(png_path)))
| true | true |
f728ccac088840d001707593dd1b8230a93eeb2a | 29 | py | Python | learn/python/module/Tadd/add.py | zhmz90/Daily | 25e13f6334c58d3a075b3fc502ecb34832392be7 | [
"MIT"
] | null | null | null | learn/python/module/Tadd/add.py | zhmz90/Daily | 25e13f6334c58d3a075b3fc502ecb34832392be7 | [
"MIT"
] | 25 | 2016-01-03T14:23:44.000Z | 2016-03-05T07:34:40.000Z | learn/python/module/Tadd/add.py | zhmz90/Daily | 25e13f6334c58d3a075b3fc502ecb34832392be7 | [
"MIT"
] | null | null | null | def add(x,y):
return x+y
| 9.666667 | 14 | 0.551724 | def add(x,y):
return x+y
| true | true |
f728cff82a71bcf8462465adc1eb5c0e3f090535 | 493 | py | Python | dev/logger.py | BenSDuggan/DAPT | 7f3f066e72aa51752220d598ba7cd7486835c485 | [
"BSD-3-Clause"
] | 1 | 2021-03-21T05:51:42.000Z | 2021-03-21T05:51:42.000Z | dev/logger.py | BenSDuggan/DAPT | 7f3f066e72aa51752220d598ba7cd7486835c485 | [
"BSD-3-Clause"
] | null | null | null | dev/logger.py | BenSDuggan/DAPT | 7f3f066e72aa51752220d598ba7cd7486835c485 | [
"BSD-3-Clause"
] | null | null | null | # Test logging stuff
import dapt, logging
logger = logging.getLogger('sample logger')
logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# add formatter to ch
ch.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
# add ch to logger
logger.addHandler(ch)
logger.warning('sup')
db = dapt.tools.sample_db()
param = dapt.Param(db)
params = param.next_parameters()
| 17.607143 | 90 | 0.732252 |
import dapt, logging
logger = logging.getLogger('sample logger')
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
logger.addHandler(ch)
logger.warning('sup')
db = dapt.tools.sample_db()
param = dapt.Param(db)
params = param.next_parameters()
| true | true |
f728d1273e634e458a98a003a6719a0316202510 | 3,444 | py | Python | events/animecon2016/models.py | darkismus/kompassi | 35dea2c7af2857a69cae5c5982b48f01ba56da1f | [
"CC-BY-3.0"
] | 13 | 2015-11-29T12:19:12.000Z | 2021-02-21T15:42:11.000Z | events/animecon2016/models.py | darkismus/kompassi | 35dea2c7af2857a69cae5c5982b48f01ba56da1f | [
"CC-BY-3.0"
] | 23 | 2015-04-29T19:43:34.000Z | 2021-02-10T05:50:17.000Z | events/animecon2016/models.py | darkismus/kompassi | 35dea2c7af2857a69cae5c5982b48f01ba56da1f | [
"CC-BY-3.0"
] | 11 | 2015-09-20T18:59:00.000Z | 2020-02-07T08:47:34.000Z | from django.db import models
from core.utils import validate_slug
from labour.models import ObsoleteSignupExtraBaseV1
TOTAL_WORK_CHOICES = [
('minimi', 'Haluan tehdä vain minimityöpanoksen (JV: 10h, muut: 8h)'),
('ekstra', 'Olen valmis tekemään lisätunteja'),
]
KORTITON_JV_HETU_LABEL = 'Henkilötunnus'
KORTITON_JV_HETU_HELP_TEXT = 'HUOM! Täytä tämä kenttä vain, jos haet <strong>kortittomaksi järjestyksenvalvojaksi</strong>.'
class SimpleChoice(models.Model):
name = models.CharField(max_length=63)
def __str__(self):
return self.name
class Meta:
abstract = True
class SpecialDiet(SimpleChoice):
pass
class Night(SimpleChoice):
pass
class SignupExtra(ObsoleteSignupExtraBaseV1):
total_work = models.CharField(max_length=15,
verbose_name='Toivottu kokonaistyömäärä',
help_text='Kuinka paljon haluat tehdä töitä yhteensä tapahtuman aikana?',
choices=TOTAL_WORK_CHOICES,
)
personal_identification_number = models.CharField(
max_length=12,
verbose_name=KORTITON_JV_HETU_LABEL,
help_text=KORTITON_JV_HETU_HELP_TEXT,
default='',
blank=True,
)
want_certificate = models.BooleanField(
default=False,
verbose_name='Haluan todistuksen työskentelystäni Animeconissa',
)
certificate_delivery_address = models.TextField(
blank=True,
verbose_name='Työtodistuksen toimitusosoite',
help_text='Jos haluat työtodistuksen, täytä tähän kenttään postiosoite (katuosoite, '
'postinumero ja postitoimipaikka) johon haluat todistuksen toimitettavan.',
)
special_diet = models.ManyToManyField(
SpecialDiet,
blank=True,
verbose_name='Erikoisruokavalio'
)
special_diet_other = models.TextField(
blank=True,
verbose_name='Muu erikoisruokavalio',
help_text='Jos noudatat erikoisruokavaliota, jota ei ole yllä olevassa listassa, '
'ilmoita se tässä. Tapahtuman järjestäjä pyrkii ottamaan erikoisruokavaliot '
'huomioon, mutta kaikkia erikoisruokavalioita ei välttämättä pystytä järjestämään.'
)
lodging_needs = models.ManyToManyField(Night,
blank=True,
verbose_name='Tarvitsen lattiamajoitusta',
help_text='Ruksaa ne yöt, joille tarvitset lattiamajoitusta. Lattiamajoitus sijaitsee '
'kävelymatkan päässä tapahtumapaikalta.',
)
prior_experience = models.TextField(
blank=True,
verbose_name='Työkokemus',
help_text='Kerro tässä kentässä, jos sinulla on aiempaa kokemusta vastaavista '
'tehtävistä tai muuta sellaista työkokemusta, josta arvioit olevan hyötyä '
'hakemassasi tehtävässä.'
)
free_text = models.TextField(
blank=True,
verbose_name='Vapaa alue',
help_text='Jos haluat sanoa hakemuksesi käsittelijöille jotain sellaista, jolle ei ole '
'omaa kenttää yllä, käytä tätä kenttää.'
)
@classmethod
def get_form_class(cls):
from .forms import SignupExtraForm
return SignupExtraForm
@property
def formatted_lodging_needs(self):
return "\n".join("{night}: {need}".format(
night=night.name,
need='Tarvitsee lattiamajoitusta' if self.lodging_needs.filter(pk=night.pk).exists() else 'Ei tarvetta lattiamajoitukselle',
) for night in Night.objects.all())
| 31.888889 | 136 | 0.695412 | from django.db import models
from core.utils import validate_slug
from labour.models import ObsoleteSignupExtraBaseV1
TOTAL_WORK_CHOICES = [
('minimi', 'Haluan tehdä vain minimityöpanoksen (JV: 10h, muut: 8h)'),
('ekstra', 'Olen valmis tekemään lisätunteja'),
]
KORTITON_JV_HETU_LABEL = 'Henkilötunnus'
KORTITON_JV_HETU_HELP_TEXT = 'HUOM! Täytä tämä kenttä vain, jos haet <strong>kortittomaksi järjestyksenvalvojaksi</strong>.'
class SimpleChoice(models.Model):
name = models.CharField(max_length=63)
def __str__(self):
return self.name
class Meta:
abstract = True
class SpecialDiet(SimpleChoice):
pass
class Night(SimpleChoice):
pass
class SignupExtra(ObsoleteSignupExtraBaseV1):
total_work = models.CharField(max_length=15,
verbose_name='Toivottu kokonaistyömäärä',
help_text='Kuinka paljon haluat tehdä töitä yhteensä tapahtuman aikana?',
choices=TOTAL_WORK_CHOICES,
)
personal_identification_number = models.CharField(
max_length=12,
verbose_name=KORTITON_JV_HETU_LABEL,
help_text=KORTITON_JV_HETU_HELP_TEXT,
default='',
blank=True,
)
want_certificate = models.BooleanField(
default=False,
verbose_name='Haluan todistuksen työskentelystäni Animeconissa',
)
certificate_delivery_address = models.TextField(
blank=True,
verbose_name='Työtodistuksen toimitusosoite',
help_text='Jos haluat työtodistuksen, täytä tähän kenttään postiosoite (katuosoite, '
'postinumero ja postitoimipaikka) johon haluat todistuksen toimitettavan.',
)
special_diet = models.ManyToManyField(
SpecialDiet,
blank=True,
verbose_name='Erikoisruokavalio'
)
special_diet_other = models.TextField(
blank=True,
verbose_name='Muu erikoisruokavalio',
help_text='Jos noudatat erikoisruokavaliota, jota ei ole yllä olevassa listassa, '
'ilmoita se tässä. Tapahtuman järjestäjä pyrkii ottamaan erikoisruokavaliot '
'huomioon, mutta kaikkia erikoisruokavalioita ei välttämättä pystytä järjestämään.'
)
lodging_needs = models.ManyToManyField(Night,
blank=True,
verbose_name='Tarvitsen lattiamajoitusta',
help_text='Ruksaa ne yöt, joille tarvitset lattiamajoitusta. Lattiamajoitus sijaitsee '
'kävelymatkan päässä tapahtumapaikalta.',
)
prior_experience = models.TextField(
blank=True,
verbose_name='Työkokemus',
help_text='Kerro tässä kentässä, jos sinulla on aiempaa kokemusta vastaavista '
'tehtävistä tai muuta sellaista työkokemusta, josta arvioit olevan hyötyä '
'hakemassasi tehtävässä.'
)
free_text = models.TextField(
blank=True,
verbose_name='Vapaa alue',
help_text='Jos haluat sanoa hakemuksesi käsittelijöille jotain sellaista, jolle ei ole '
'omaa kenttää yllä, käytä tätä kenttää.'
)
@classmethod
def get_form_class(cls):
from .forms import SignupExtraForm
return SignupExtraForm
@property
def formatted_lodging_needs(self):
return "\n".join("{night}: {need}".format(
night=night.name,
need='Tarvitsee lattiamajoitusta' if self.lodging_needs.filter(pk=night.pk).exists() else 'Ei tarvetta lattiamajoitukselle',
) for night in Night.objects.all())
| true | true |
f728d140f24c12ba2622aebd1b1ce26996d4e5c0 | 478 | py | Python | configs/denseclip_plus/anno_free/denseclip_plus_r50x16_deeplabv2_r101-d8_512x512_2k_voc12aug_20.py | buoyancy99/DenseCLIP | eac7810ca17c93aaf59f2a6b6c86ccbb4cdfcc9b | [
"Apache-2.0"
] | 11 | 2022-02-04T01:09:45.000Z | 2022-03-08T05:49:16.000Z | configs/denseclip_plus/anno_free/denseclip_plus_r50x16_deeplabv2_r101-d8_512x512_2k_voc12aug_20.py | buoyancy99/DenseCLIP | eac7810ca17c93aaf59f2a6b6c86ccbb4cdfcc9b | [
"Apache-2.0"
] | null | null | null | configs/denseclip_plus/anno_free/denseclip_plus_r50x16_deeplabv2_r101-d8_512x512_2k_voc12aug_20.py | buoyancy99/DenseCLIP | eac7810ca17c93aaf59f2a6b6c86ccbb4cdfcc9b | [
"Apache-2.0"
] | 1 | 2022-02-03T10:29:40.000Z | 2022-02-03T10:29:40.000Z | _base_ = './denseclip_plus_r50_deeplabv2_r101-d8_512x512_2k_voc12aug_20.py'
model = dict(
decode_head=dict(
clip_channels=3072,
channels=768,
text_channels=768,
text_embeddings_path='pretrain/voc_RN50x16_clip_text.pth',
clip_weights_path='pretrain/RN50x16_clip_weights.pth',
clip_cfg=dict(
type='ResNetClip',
stem_channels=96,
base_channels=96,
depth='50x16'
),
)
) | 28.117647 | 75 | 0.627615 | _base_ = './denseclip_plus_r50_deeplabv2_r101-d8_512x512_2k_voc12aug_20.py'
model = dict(
decode_head=dict(
clip_channels=3072,
channels=768,
text_channels=768,
text_embeddings_path='pretrain/voc_RN50x16_clip_text.pth',
clip_weights_path='pretrain/RN50x16_clip_weights.pth',
clip_cfg=dict(
type='ResNetClip',
stem_channels=96,
base_channels=96,
depth='50x16'
),
)
) | true | true |
f728d153eaba3bceb0411414e7065dcc10802f7d | 3,103 | py | Python | Other/client_classes.py | botexpert/Chatter | 8cd3262e38c58eae0fcbc49b3973f883a3185a48 | [
"MIT"
] | null | null | null | Other/client_classes.py | botexpert/Chatter | 8cd3262e38c58eae0fcbc49b3973f883a3185a48 | [
"MIT"
] | null | null | null | Other/client_classes.py | botexpert/Chatter | 8cd3262e38c58eae0fcbc49b3973f883a3185a48 | [
"MIT"
] | 2 | 2019-07-18T08:35:23.000Z | 2019-07-18T09:24:00.000Z | import zmq
from threading import Thread
import time
import json
class StandaloneClient:
def __init__(self, interface, host, user, target):
self.interface = interface
self.host = host
self.connect_port = target.message_port
self.bind_port = user.message_port
self.heart_port = user.heart_port
self.echo_port = target.heart_port
self.context = zmq.Context.instance()
self.ID = user.name
self.target_ID = target.name
def run(self):
heart_thread = Thread(target=self.heart, name='heart_thread')
heart_thread.daemon = True
heart_thread.start()
echo_thread = Thread(target=self.echo, name='echo_thread')
echo_thread.daemon = True
echo_thread.start()
listen_thread = Thread(target=self.listener, name='listener_thread')
listen_thread.daemon = True
listen_thread.start()
self.sender()
# This part opens up the sockets for receiving messages and loops the
# new message check, that will be improved later.
def listener(self):
listen_socket = self.context.socket(zmq.DEALER)
listen_socket.setsockopt(zmq.IDENTITY, self.ID)
listen_socket.connect(
"tcp://{}:{}".format(self.host, self.connect_port))
while True:
if listen_socket.poll():
message = listen_socket.recv_json()
print(message)
# This part starts a separate thread that activates the receiving part of
# the client, enabling messages to come while the user is inputting a new
# message.
def sender(self):
sender_socket = self.context.socket(zmq.ROUTER)
sender_socket.bind(
"tcp://{}:{}".format(self.interface, self.bind_port))
while True:
send_text = json.dumps(input()).encode()
send_message = [self.target_ID, send_text]
sender_socket.send_multipart(send_message)
# Waits for ping from other client and responds if alive.
def heart(self):
heart_socket = self.context.socket(zmq.REP)
heart_socket.bind(
"tcp://{}:{}".format(self.interface, self.heart_port))
while True:
time.sleep(1)
heart_socket.recv()
heart_socket.send(b"pong")
# Pings at intervals at the other client and detects if there is a
# response within the time limit. Ping interval should be changed.
def echo(self):
echo_socket = self.context.socket(zmq.REQ)
echo_socket.connect("tcp://{}:{}".format(self.host, self.echo_port))
echo_socket.setsockopt(zmq.REQ_RELAXED, 1)
while True:
echo_socket.send(b"ping")
if not echo_socket.poll(2500):
print('The other client is offline.')
print('They might not receive sent messages.')
# Class consisting of basic user information.
class User:
def __init__(self, name, message_port, heart_port):
self.name = name.encode()
self.message_port = message_port
self.heart_port = heart_port
| 36.505882 | 77 | 0.63777 | import zmq
from threading import Thread
import time
import json
class StandaloneClient:
def __init__(self, interface, host, user, target):
self.interface = interface
self.host = host
self.connect_port = target.message_port
self.bind_port = user.message_port
self.heart_port = user.heart_port
self.echo_port = target.heart_port
self.context = zmq.Context.instance()
self.ID = user.name
self.target_ID = target.name
def run(self):
heart_thread = Thread(target=self.heart, name='heart_thread')
heart_thread.daemon = True
heart_thread.start()
echo_thread = Thread(target=self.echo, name='echo_thread')
echo_thread.daemon = True
echo_thread.start()
listen_thread = Thread(target=self.listener, name='listener_thread')
listen_thread.daemon = True
listen_thread.start()
self.sender()
def listener(self):
listen_socket = self.context.socket(zmq.DEALER)
listen_socket.setsockopt(zmq.IDENTITY, self.ID)
listen_socket.connect(
"tcp://{}:{}".format(self.host, self.connect_port))
while True:
if listen_socket.poll():
message = listen_socket.recv_json()
print(message)
def sender(self):
sender_socket = self.context.socket(zmq.ROUTER)
sender_socket.bind(
"tcp://{}:{}".format(self.interface, self.bind_port))
while True:
send_text = json.dumps(input()).encode()
send_message = [self.target_ID, send_text]
sender_socket.send_multipart(send_message)
def heart(self):
heart_socket = self.context.socket(zmq.REP)
heart_socket.bind(
"tcp://{}:{}".format(self.interface, self.heart_port))
while True:
time.sleep(1)
heart_socket.recv()
heart_socket.send(b"pong")
def echo(self):
echo_socket = self.context.socket(zmq.REQ)
echo_socket.connect("tcp://{}:{}".format(self.host, self.echo_port))
echo_socket.setsockopt(zmq.REQ_RELAXED, 1)
while True:
echo_socket.send(b"ping")
if not echo_socket.poll(2500):
print('The other client is offline.')
print('They might not receive sent messages.')
class User:
def __init__(self, name, message_port, heart_port):
self.name = name.encode()
self.message_port = message_port
self.heart_port = heart_port
| true | true |
f728d1953d721dca8d38921c3ee6a5ba731b6ad2 | 5,511 | py | Python | pandagg/types.py | alkemics/panda | 39e50d9b2408480ebf70c1a2d1a9fa12c55907c5 | [
"Apache-2.0"
] | 13 | 2020-03-03T20:59:08.000Z | 2022-02-16T09:51:40.000Z | pandagg/types.py | alkemics/panda | 39e50d9b2408480ebf70c1a2d1a9fa12c55907c5 | [
"Apache-2.0"
] | 57 | 2020-06-21T18:48:20.000Z | 2022-03-16T09:52:39.000Z | pandagg/types.py | alkemics/panda | 39e50d9b2408480ebf70c1a2d1a9fa12c55907c5 | [
"Apache-2.0"
] | 2 | 2020-03-03T20:59:20.000Z | 2020-05-10T20:31:53.000Z | from typing_extensions import TypedDict, Literal
from typing import Optional, Dict, Any, List, Union
ClauseName = str
ClauseType = str
ClauseBody = Dict[str, Any]
Meta = Dict[str, Any]
# Script
class Script(TypedDict, total=False):
lang: str
id: str
source: str
params: Dict[str, Any]
GapPolicy = Literal["skip", "insert_zeros", "keep_values"]
# Query
QueryType = ClauseType
QueryName = ClauseName
QueryClauseDict = Dict[QueryType, ClauseBody]
# Aggs
AggName = ClauseName
AggType = ClauseType
AggClauseDict = Dict[AggType, ClauseBody]
NamedAggsDict = Dict[AggName, AggClauseDict]
BucketKeyAtom = Union[None, str, float]
CompositeBucketKey = Dict[AggName, BucketKeyAtom]
BucketKey = Union[BucketKeyAtom, CompositeBucketKey]
BucketDict = Dict[str, Any]
RangeDict = TypedDict(
"RangeDict", {"from": float, "to": float, "key": str, "mask": str}, total=False
)
DistanceType = Literal["arc", "plane"]
ValidationMethod = Literal["STRICT", "COERCE", "IGNORE_MALFORMED"]
ExecutionHint = Literal["map", "global_ordinals", "bytes_hash"]
# https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-composite-aggregation.html#_value_sources
CompositeSource = AggClauseDict
# https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-composite-aggregation.html#_pagination
AfterKey = Dict[str, Any]
DocSource = Dict[str, Any]
SettingsDict = Dict[str, Any]
AliasName = str
class AliasValue(TypedDict, total=False):
filter: QueryClauseDict
index_routing: str
is_hidden: bool
is_write_index: bool
rooting: str
search_routing: str
IndexAliases = Dict[AliasName, AliasValue]
IndexName = str
# Mappings
FieldName = ClauseName
FieldType = ClauseType
FieldClauseDict = Dict[str, Any]
FieldPropertiesDict = Dict[FieldName, FieldClauseDict]
class MappingsDict(TypedDict, total=False):
properties: FieldPropertiesDict
dynamic: bool
class SourceIncludeDict(TypedDict, total=False):
includes: Union[str, List[str]]
excludes: Union[str, List[str]]
class RunTimeMappingDict(TypedDict, total=False):
type: str
script: str
class PointInTimeDict(TypedDict, total=False):
id: str
keep_alive: str
class FieldDict(TypedDict, total=False):
field: str
format: str
SearchDict = TypedDict(
"SearchDict",
{
"aggs": NamedAggsDict,
"aggregations": NamedAggsDict,
"docvalue_fields": List[Union[str, FieldDict]],
"fields": List[Union[str, FieldDict]],
"explain": bool,
"from": int,
"highlight": Dict[str, Any],
"indices_boost": List[Dict[IndexName, float]],
"min_score": float,
"pit": PointInTimeDict,
"query": QueryClauseDict,
"post_filter": QueryClauseDict,
"runtime_mappings": Dict[FieldName, RunTimeMappingDict],
"seq_no_primary_term": bool,
"script_fields": Dict[str, Any],
"size": int,
"suggest": Dict[str, Any],
"_source": Union[bool, str, List[str], SourceIncludeDict],
"sort": List[Union[str, Dict[str, Any]]],
"stats": List[str],
"terminate_after": int,
"timeout": Any,
"version": bool,
},
total=False,
)
BucketsDict = Dict[BucketKeyAtom, BucketDict]
Buckets = Union[BucketsDict, List[BucketDict]]
class BucketsWrapperDict(TypedDict, total=False):
buckets: Buckets
doc_count_error_upper_bound: int
sum_other_doc_count: int
AggClauseResponseDict = Union[BucketsWrapperDict, BucketDict]
AggregationsResponseDict = Dict[AggName, AggClauseResponseDict]
class HitDict(TypedDict, total=False):
_index: str
_id: str
_source: DocSource
_score: float
fields: Dict[str, List[Any]]
highlight: Dict[str, List[str]]
Relation = Literal["eq", "gte"]
class TotalDict(TypedDict, total=False):
value: int
relation: Relation
class HitsDict(TypedDict, total=False):
total: TotalDict
hits: List[HitDict]
max_score: Optional[float]
class ShardsDict(TypedDict, total=False):
total: int
successful: int
skipped: int
failed: int
class ProfileShardDict(TypedDict, total=False):
id: str
searches: List
aggregations: List
class ProfileDict(TypedDict, total=False):
shards: List[ProfileShardDict]
class SuggestedItemDict(TypedDict, total=False):
text: str
offset: int
length: int
options: List[Dict[str, Any]]
class SearchResponseDict(TypedDict, total=False):
_scroll_id: str
_shards: ShardsDict
timed_out: bool
terminated_early: bool
took: int
hits: HitsDict
aggregations: AggregationsResponseDict
profile: ProfileDict
suggest: Dict[str, List[SuggestedItemDict]]
class RetriesDict(TypedDict, total=False):
bulk: int
search: int
class DeleteByQueryResponse(TypedDict, total=False):
took: int
timed_out: bool
total: int
deleted: int
batches: int
version_conflicts: int
noops: int
retries: RetriesDict
throttled_millis: int
requests_per_second: float
throttled_until_millis: int
failures: List[Dict[str, Any]]
OpType = Literal["create", "index", "update", "delete"]
class Action(TypedDict, total=False):
_op_type: OpType
_id: str
_index: IndexName
retry_on_conflict: int
routing: str
version: int
version_type: Literal["external", "external_gte"]
_source: DocSource
doc: DocSource
require_alias: bool
dynamic_templates: Dict
| 23.253165 | 134 | 0.700417 | from typing_extensions import TypedDict, Literal
from typing import Optional, Dict, Any, List, Union
ClauseName = str
ClauseType = str
ClauseBody = Dict[str, Any]
Meta = Dict[str, Any]
class Script(TypedDict, total=False):
lang: str
id: str
source: str
params: Dict[str, Any]
GapPolicy = Literal["skip", "insert_zeros", "keep_values"]
QueryType = ClauseType
QueryName = ClauseName
QueryClauseDict = Dict[QueryType, ClauseBody]
AggName = ClauseName
AggType = ClauseType
AggClauseDict = Dict[AggType, ClauseBody]
NamedAggsDict = Dict[AggName, AggClauseDict]
BucketKeyAtom = Union[None, str, float]
CompositeBucketKey = Dict[AggName, BucketKeyAtom]
BucketKey = Union[BucketKeyAtom, CompositeBucketKey]
BucketDict = Dict[str, Any]
RangeDict = TypedDict(
"RangeDict", {"from": float, "to": float, "key": str, "mask": str}, total=False
)
DistanceType = Literal["arc", "plane"]
ValidationMethod = Literal["STRICT", "COERCE", "IGNORE_MALFORMED"]
ExecutionHint = Literal["map", "global_ordinals", "bytes_hash"]
e = AggClauseDict
Dict[str, Any]
DocSource = Dict[str, Any]
SettingsDict = Dict[str, Any]
AliasName = str
class AliasValue(TypedDict, total=False):
filter: QueryClauseDict
index_routing: str
is_hidden: bool
is_write_index: bool
rooting: str
search_routing: str
IndexAliases = Dict[AliasName, AliasValue]
IndexName = str
FieldName = ClauseName
FieldType = ClauseType
FieldClauseDict = Dict[str, Any]
FieldPropertiesDict = Dict[FieldName, FieldClauseDict]
class MappingsDict(TypedDict, total=False):
properties: FieldPropertiesDict
dynamic: bool
class SourceIncludeDict(TypedDict, total=False):
includes: Union[str, List[str]]
excludes: Union[str, List[str]]
class RunTimeMappingDict(TypedDict, total=False):
type: str
script: str
class PointInTimeDict(TypedDict, total=False):
id: str
keep_alive: str
class FieldDict(TypedDict, total=False):
field: str
format: str
SearchDict = TypedDict(
"SearchDict",
{
"aggs": NamedAggsDict,
"aggregations": NamedAggsDict,
"docvalue_fields": List[Union[str, FieldDict]],
"fields": List[Union[str, FieldDict]],
"explain": bool,
"from": int,
"highlight": Dict[str, Any],
"indices_boost": List[Dict[IndexName, float]],
"min_score": float,
"pit": PointInTimeDict,
"query": QueryClauseDict,
"post_filter": QueryClauseDict,
"runtime_mappings": Dict[FieldName, RunTimeMappingDict],
"seq_no_primary_term": bool,
"script_fields": Dict[str, Any],
"size": int,
"suggest": Dict[str, Any],
"_source": Union[bool, str, List[str], SourceIncludeDict],
"sort": List[Union[str, Dict[str, Any]]],
"stats": List[str],
"terminate_after": int,
"timeout": Any,
"version": bool,
},
total=False,
)
BucketsDict = Dict[BucketKeyAtom, BucketDict]
Buckets = Union[BucketsDict, List[BucketDict]]
class BucketsWrapperDict(TypedDict, total=False):
buckets: Buckets
doc_count_error_upper_bound: int
sum_other_doc_count: int
AggClauseResponseDict = Union[BucketsWrapperDict, BucketDict]
AggregationsResponseDict = Dict[AggName, AggClauseResponseDict]
class HitDict(TypedDict, total=False):
_index: str
_id: str
_source: DocSource
_score: float
fields: Dict[str, List[Any]]
highlight: Dict[str, List[str]]
Relation = Literal["eq", "gte"]
class TotalDict(TypedDict, total=False):
value: int
relation: Relation
class HitsDict(TypedDict, total=False):
total: TotalDict
hits: List[HitDict]
max_score: Optional[float]
class ShardsDict(TypedDict, total=False):
total: int
successful: int
skipped: int
failed: int
class ProfileShardDict(TypedDict, total=False):
id: str
searches: List
aggregations: List
class ProfileDict(TypedDict, total=False):
shards: List[ProfileShardDict]
class SuggestedItemDict(TypedDict, total=False):
text: str
offset: int
length: int
options: List[Dict[str, Any]]
class SearchResponseDict(TypedDict, total=False):
_scroll_id: str
_shards: ShardsDict
timed_out: bool
terminated_early: bool
took: int
hits: HitsDict
aggregations: AggregationsResponseDict
profile: ProfileDict
suggest: Dict[str, List[SuggestedItemDict]]
class RetriesDict(TypedDict, total=False):
bulk: int
search: int
class DeleteByQueryResponse(TypedDict, total=False):
took: int
timed_out: bool
total: int
deleted: int
batches: int
version_conflicts: int
noops: int
retries: RetriesDict
throttled_millis: int
requests_per_second: float
throttled_until_millis: int
failures: List[Dict[str, Any]]
OpType = Literal["create", "index", "update", "delete"]
class Action(TypedDict, total=False):
_op_type: OpType
_id: str
_index: IndexName
retry_on_conflict: int
routing: str
version: int
version_type: Literal["external", "external_gte"]
_source: DocSource
doc: DocSource
require_alias: bool
dynamic_templates: Dict
| true | true |
f728d1b47053ea894f30592b58ae6215583dad16 | 11,971 | py | Python | Lib/asynchat.py | Hadron/python | 73137f499ed658169f49273eee46845e3b53e800 | [
"PSF-2.0"
] | 332 | 2015-08-22T12:43:56.000Z | 2022-03-17T01:05:43.000Z | Lib/asynchat.py | Hadron/python | 73137f499ed658169f49273eee46845e3b53e800 | [
"PSF-2.0"
] | 36 | 2015-05-30T08:39:19.000Z | 2022-03-04T20:42:33.000Z | Lib/asynchat.py | Hadron/python | 73137f499ed658169f49273eee46845e3b53e800 | [
"PSF-2.0"
] | 74 | 2015-05-29T17:18:53.000Z | 2022-01-15T14:06:44.000Z | # -*- Mode: Python; tab-width: 4 -*-
# Id: asynchat.py,v 2.26 2000/09/07 22:29:26 rushing Exp
# Author: Sam Rushing <rushing@nightmare.com>
# ======================================================================
# Copyright 1996 by Sam Rushing
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of Sam
# Rushing not be used in advertising or publicity pertaining to
# distribution of the software without specific, written prior
# permission.
#
# SAM RUSHING DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
# NO EVENT SHALL SAM RUSHING BE LIABLE FOR ANY SPECIAL, INDIRECT OR
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# ======================================================================
r"""A class supporting chat-style (command/response) protocols.
This class adds support for 'chat' style protocols - where one side
sends a 'command', and the other sends a response (examples would be
the common internet protocols - smtp, nntp, ftp, etc..).
The handle_read() method looks at the input stream for the current
'terminator' (usually '\r\n' for single-line responses, '\r\n.\r\n'
for multi-line output), calling self.found_terminator() on its
receipt.
for example:
Say you build an async nntp client using this class. At the start
of the connection, you'll have self.terminator set to '\r\n', in
order to process the single-line greeting. Just before issuing a
'LIST' command you'll set it to '\r\n.\r\n'. The output of the LIST
command will be accumulated (using your own 'collect_incoming_data'
method) up to the terminator, and then control will be returned to
you - by calling your self.found_terminator() method.
"""
import asyncore
from collections import deque
class async_chat(asyncore.dispatcher):
"""This is an abstract class. You must derive from this class, and add
the two methods collect_incoming_data() and found_terminator()"""
# these are overridable defaults
ac_in_buffer_size = 65536
ac_out_buffer_size = 65536
# we don't want to enable the use of encoding by default, because that is a
# sign of an application bug that we don't want to pass silently
use_encoding = 0
encoding = 'latin-1'
def __init__(self, sock=None, map=None):
# for string terminator matching
self.ac_in_buffer = b''
# we use a list here rather than io.BytesIO for a few reasons...
# del lst[:] is faster than bio.truncate(0)
# lst = [] is faster than bio.truncate(0)
self.incoming = []
# we toss the use of the "simple producer" and replace it with
# a pure deque, which the original fifo was a wrapping of
self.producer_fifo = deque()
asyncore.dispatcher.__init__(self, sock, map)
def collect_incoming_data(self, data):
raise NotImplementedError("must be implemented in subclass")
def _collect_incoming_data(self, data):
self.incoming.append(data)
def _get_data(self):
d = b''.join(self.incoming)
del self.incoming[:]
return d
def found_terminator(self):
raise NotImplementedError("must be implemented in subclass")
def set_terminator(self, term):
"""Set the input delimiter.
Can be a fixed string of any length, an integer, or None.
"""
if isinstance(term, str) and self.use_encoding:
term = bytes(term, self.encoding)
elif isinstance(term, int) and term < 0:
raise ValueError('the number of received bytes must be positive')
self.terminator = term
def get_terminator(self):
return self.terminator
# grab some more data from the socket,
# throw it to the collector method,
# check for the terminator,
# if found, transition to the next state.
def handle_read(self):
try:
data = self.recv(self.ac_in_buffer_size)
except BlockingIOError:
return
except OSError as why:
self.handle_error()
return
if isinstance(data, str) and self.use_encoding:
data = bytes(str, self.encoding)
self.ac_in_buffer = self.ac_in_buffer + data
# Continue to search for self.terminator in self.ac_in_buffer,
# while calling self.collect_incoming_data. The while loop
# is necessary because we might read several data+terminator
# combos with a single recv(4096).
while self.ac_in_buffer:
lb = len(self.ac_in_buffer)
terminator = self.get_terminator()
if not terminator:
# no terminator, collect it all
self.collect_incoming_data(self.ac_in_buffer)
self.ac_in_buffer = b''
elif isinstance(terminator, int):
# numeric terminator
n = terminator
if lb < n:
self.collect_incoming_data(self.ac_in_buffer)
self.ac_in_buffer = b''
self.terminator = self.terminator - lb
else:
self.collect_incoming_data(self.ac_in_buffer[:n])
self.ac_in_buffer = self.ac_in_buffer[n:]
self.terminator = 0
self.found_terminator()
else:
# 3 cases:
# 1) end of buffer matches terminator exactly:
# collect data, transition
# 2) end of buffer matches some prefix:
# collect data to the prefix
# 3) end of buffer does not match any prefix:
# collect data
terminator_len = len(terminator)
index = self.ac_in_buffer.find(terminator)
if index != -1:
# we found the terminator
if index > 0:
# don't bother reporting the empty string
# (source of subtle bugs)
self.collect_incoming_data(self.ac_in_buffer[:index])
self.ac_in_buffer = self.ac_in_buffer[index+terminator_len:]
# This does the Right Thing if the terminator
# is changed here.
self.found_terminator()
else:
# check for a prefix of the terminator
index = find_prefix_at_end(self.ac_in_buffer, terminator)
if index:
if index != lb:
# we found a prefix, collect up to the prefix
self.collect_incoming_data(self.ac_in_buffer[:-index])
self.ac_in_buffer = self.ac_in_buffer[-index:]
break
else:
# no prefix, collect it all
self.collect_incoming_data(self.ac_in_buffer)
self.ac_in_buffer = b''
def handle_write(self):
self.initiate_send()
def handle_close(self):
self.close()
def push(self, data):
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError('data argument must be byte-ish (%r)',
type(data))
sabs = self.ac_out_buffer_size
if len(data) > sabs:
for i in range(0, len(data), sabs):
self.producer_fifo.append(data[i:i+sabs])
else:
self.producer_fifo.append(data)
self.initiate_send()
def push_with_producer(self, producer):
self.producer_fifo.append(producer)
self.initiate_send()
def readable(self):
"predicate for inclusion in the readable for select()"
# cannot use the old predicate, it violates the claim of the
# set_terminator method.
# return (len(self.ac_in_buffer) <= self.ac_in_buffer_size)
return 1
def writable(self):
"predicate for inclusion in the writable for select()"
return self.producer_fifo or (not self.connected)
def close_when_done(self):
"automatically close this channel once the outgoing queue is empty"
self.producer_fifo.append(None)
def initiate_send(self):
while self.producer_fifo and self.connected:
first = self.producer_fifo[0]
# handle empty string/buffer or None entry
if not first:
del self.producer_fifo[0]
if first is None:
self.handle_close()
return
# handle classic producer behavior
obs = self.ac_out_buffer_size
try:
data = first[:obs]
except TypeError:
data = first.more()
if data:
self.producer_fifo.appendleft(data)
else:
del self.producer_fifo[0]
continue
if isinstance(data, str) and self.use_encoding:
data = bytes(data, self.encoding)
# send the data
try:
num_sent = self.send(data)
except OSError:
self.handle_error()
return
if num_sent:
if num_sent < len(data) or obs < len(first):
self.producer_fifo[0] = first[num_sent:]
else:
del self.producer_fifo[0]
# we tried to send some actual data
return
def discard_buffers(self):
# Emergencies only!
self.ac_in_buffer = b''
del self.incoming[:]
self.producer_fifo.clear()
class simple_producer:
def __init__(self, data, buffer_size=512):
self.data = data
self.buffer_size = buffer_size
def more(self):
if len(self.data) > self.buffer_size:
result = self.data[:self.buffer_size]
self.data = self.data[self.buffer_size:]
return result
else:
result = self.data
self.data = b''
return result
class fifo:
def __init__(self, list=None):
import warnings
warnings.warn('fifo class will be removed in Python 3.6',
DeprecationWarning, stacklevel=2)
if not list:
self.list = deque()
else:
self.list = deque(list)
def __len__(self):
return len(self.list)
def is_empty(self):
return not self.list
def first(self):
return self.list[0]
def push(self, data):
self.list.append(data)
def pop(self):
if self.list:
return (1, self.list.popleft())
else:
return (0, None)
# Given 'haystack', see if any prefix of 'needle' is at its end. This
# assumes an exact match has already been checked. Return the number of
# characters matched.
# for example:
# f_p_a_e("qwerty\r", "\r\n") => 1
# f_p_a_e("qwertydkjf", "\r\n") => 0
# f_p_a_e("qwerty\r\n", "\r\n") => <undefined>
# this could maybe be made faster with a computed regex?
# [answer: no; circa Python-2.0, Jan 2001]
# new python: 28961/s
# old python: 18307/s
# re: 12820/s
# regex: 14035/s
def find_prefix_at_end(haystack, needle):
l = len(needle) - 1
while l and not haystack.endswith(needle[:l]):
l -= 1
return l
| 35.522255 | 82 | 0.589174 |
import asyncore
from collections import deque
class async_chat(asyncore.dispatcher):
ac_in_buffer_size = 65536
ac_out_buffer_size = 65536
# sign of an application bug that we don't want to pass silently
use_encoding = 0
encoding = 'latin-1'
def __init__(self, sock=None, map=None):
self.ac_in_buffer = b''
self.incoming = []
self.producer_fifo = deque()
asyncore.dispatcher.__init__(self, sock, map)
def collect_incoming_data(self, data):
raise NotImplementedError("must be implemented in subclass")
def _collect_incoming_data(self, data):
self.incoming.append(data)
def _get_data(self):
d = b''.join(self.incoming)
del self.incoming[:]
return d
def found_terminator(self):
raise NotImplementedError("must be implemented in subclass")
def set_terminator(self, term):
if isinstance(term, str) and self.use_encoding:
term = bytes(term, self.encoding)
elif isinstance(term, int) and term < 0:
raise ValueError('the number of received bytes must be positive')
self.terminator = term
def get_terminator(self):
return self.terminator
def handle_read(self):
try:
data = self.recv(self.ac_in_buffer_size)
except BlockingIOError:
return
except OSError as why:
self.handle_error()
return
if isinstance(data, str) and self.use_encoding:
data = bytes(str, self.encoding)
self.ac_in_buffer = self.ac_in_buffer + data
while self.ac_in_buffer:
lb = len(self.ac_in_buffer)
terminator = self.get_terminator()
if not terminator:
self.collect_incoming_data(self.ac_in_buffer)
self.ac_in_buffer = b''
elif isinstance(terminator, int):
n = terminator
if lb < n:
self.collect_incoming_data(self.ac_in_buffer)
self.ac_in_buffer = b''
self.terminator = self.terminator - lb
else:
self.collect_incoming_data(self.ac_in_buffer[:n])
self.ac_in_buffer = self.ac_in_buffer[n:]
self.terminator = 0
self.found_terminator()
else:
terminator_len = len(terminator)
index = self.ac_in_buffer.find(terminator)
if index != -1:
if index > 0:
# (source of subtle bugs)
self.collect_incoming_data(self.ac_in_buffer[:index])
self.ac_in_buffer = self.ac_in_buffer[index+terminator_len:]
# This does the Right Thing if the terminator
# is changed here.
self.found_terminator()
else:
# check for a prefix of the terminator
index = find_prefix_at_end(self.ac_in_buffer, terminator)
if index:
if index != lb:
# we found a prefix, collect up to the prefix
self.collect_incoming_data(self.ac_in_buffer[:-index])
self.ac_in_buffer = self.ac_in_buffer[-index:]
break
else:
# no prefix, collect it all
self.collect_incoming_data(self.ac_in_buffer)
self.ac_in_buffer = b''
def handle_write(self):
self.initiate_send()
def handle_close(self):
self.close()
def push(self, data):
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError('data argument must be byte-ish (%r)',
type(data))
sabs = self.ac_out_buffer_size
if len(data) > sabs:
for i in range(0, len(data), sabs):
self.producer_fifo.append(data[i:i+sabs])
else:
self.producer_fifo.append(data)
self.initiate_send()
def push_with_producer(self, producer):
self.producer_fifo.append(producer)
self.initiate_send()
def readable(self):
# cannot use the old predicate, it violates the claim of the
# set_terminator method.
# return (len(self.ac_in_buffer) <= self.ac_in_buffer_size)
return 1
def writable(self):
return self.producer_fifo or (not self.connected)
def close_when_done(self):
self.producer_fifo.append(None)
def initiate_send(self):
while self.producer_fifo and self.connected:
first = self.producer_fifo[0]
# handle empty string/buffer or None entry
if not first:
del self.producer_fifo[0]
if first is None:
self.handle_close()
return
# handle classic producer behavior
obs = self.ac_out_buffer_size
try:
data = first[:obs]
except TypeError:
data = first.more()
if data:
self.producer_fifo.appendleft(data)
else:
del self.producer_fifo[0]
continue
if isinstance(data, str) and self.use_encoding:
data = bytes(data, self.encoding)
# send the data
try:
num_sent = self.send(data)
except OSError:
self.handle_error()
return
if num_sent:
if num_sent < len(data) or obs < len(first):
self.producer_fifo[0] = first[num_sent:]
else:
del self.producer_fifo[0]
# we tried to send some actual data
return
def discard_buffers(self):
# Emergencies only!
self.ac_in_buffer = b''
del self.incoming[:]
self.producer_fifo.clear()
class simple_producer:
def __init__(self, data, buffer_size=512):
self.data = data
self.buffer_size = buffer_size
def more(self):
if len(self.data) > self.buffer_size:
result = self.data[:self.buffer_size]
self.data = self.data[self.buffer_size:]
return result
else:
result = self.data
self.data = b''
return result
class fifo:
def __init__(self, list=None):
import warnings
warnings.warn('fifo class will be removed in Python 3.6',
DeprecationWarning, stacklevel=2)
if not list:
self.list = deque()
else:
self.list = deque(list)
def __len__(self):
return len(self.list)
def is_empty(self):
return not self.list
def first(self):
return self.list[0]
def push(self, data):
self.list.append(data)
def pop(self):
if self.list:
return (1, self.list.popleft())
else:
return (0, None)
# Given 'haystack', see if any prefix of 'needle' is at its end. This
# assumes an exact match has already been checked. Return the number of
# characters matched.
# for example:
# f_p_a_e("qwerty\r", "\r\n") => 1
# f_p_a_e("qwertydkjf", "\r\n") => 0
# f_p_a_e("qwerty\r\n", "\r\n") => <undefined>
# this could maybe be made faster with a computed regex?
# [answer: no; circa Python-2.0, Jan 2001]
# new python: 28961/s
# old python: 18307/s
# re: 12820/s
# regex: 14035/s
def find_prefix_at_end(haystack, needle):
l = len(needle) - 1
while l and not haystack.endswith(needle[:l]):
l -= 1
return l
| true | true |
f728d21e7458186f8eea9ec854144e518d6767f5 | 9,283 | py | Python | test/functional/example_test.py | wolfoxonly/cocn | dc29dc24b79882765ecc09d7ba2241492d999124 | [
"MIT"
] | null | null | null | test/functional/example_test.py | wolfoxonly/cocn | dc29dc24b79882765ecc09d7ba2241492d999124 | [
"MIT"
] | null | null | null | test/functional/example_test.py | wolfoxonly/cocn | dc29dc24b79882765ecc09d7ba2241492d999124 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2017 The CloudComputingChain Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""An example functional test
The module-level docstring should include a high-level description of
what the test is doing. It's the first thing people see when they open
the file and should give the reader information about *what* the test
is testing and *how* it's being tested
"""
# Imports should be in PEP8 ordering (std library first, then third party
# libraries then local imports).
from collections import defaultdict
# Avoid wildcard * imports if possible
from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.mininode import (
CInv,
P2PInterface,
mininode_lock,
msg_block,
msg_getdata,
network_thread_join,
network_thread_start,
)
from test_framework.test_framework import CloudComputingChainTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
wait_until,
)
# P2PInterface is a class containing callbacks to be executed when a P2P
# message is received from the node-under-test. Subclass P2PInterface and
# override the on_*() methods if you need custom behaviour.
class BaseNode(P2PInterface):
def __init__(self):
"""Initialize the P2PInterface
Used to inialize custom properties for the Node that aren't
included by default in the base class. Be aware that the P2PInterface
base class already stores a counter for each P2P message type and the
last received message of each type, which should be sufficient for the
needs of most tests.
Call super().__init__() first for standard initialization and then
initialize custom properties."""
super().__init__()
# Stores a dictionary of all blocks received
self.block_receive_map = defaultdict(int)
def on_block(self, message):
"""Override the standard on_block callback
Store the hash of a received block in the dictionary."""
message.block.calc_sha256()
self.block_receive_map[message.block.sha256] += 1
def on_inv(self, message):
"""Override the standard on_inv callback"""
pass
def custom_function():
"""Do some custom behaviour
If this function is more generally useful for other tests, consider
moving it to a module in test_framework."""
# self.log.info("running custom_function") # Oops! Can't run self.log outside the CloudComputingChainTestFramework
pass
class ExampleTest(CloudComputingChainTestFramework):
# Each functional test is a subclass of the CloudComputingChainTestFramework class.
# Override the set_test_params(), add_options(), setup_chain(), setup_network()
# and setup_nodes() methods to customize the test setup as required.
def set_test_params(self):
"""Override test parameters for your individual test.
This method must be overridden and num_nodes must be exlicitly set."""
self.setup_clean_chain = True
self.num_nodes = 3
# Use self.extra_args to change command-line arguments for the nodes
self.extra_args = [[], ["-logips"], []]
# self.log.info("I've finished set_test_params") # Oops! Can't run self.log before run_test()
# Use add_options() to add specific command-line options for your test.
# In practice this is not used very much, since the tests are mostly written
# to be run in automated environments without command-line options.
# def add_options()
# pass
# Use setup_chain() to customize the node data directories. In practice
# this is not used very much since the default behaviour is almost always
# fine
# def setup_chain():
# pass
def setup_network(self):
"""Setup the test network topology
Often you won't need to override this, since the standard network topology
(linear: node0 <-> node1 <-> node2 <-> ...) is fine for most tests.
If you do override this method, remember to start the nodes, assign
them to self.nodes, connect them and then sync."""
self.setup_nodes()
# In this test, we're not connecting node2 to node0 or node1. Calls to
# sync_all() should not include node2, since we're not expecting it to
# sync.
connect_nodes(self.nodes[0], 1)
self.sync_all([self.nodes[0:1]])
# Use setup_nodes() to customize the node start behaviour (for example if
# you don't want to start all nodes at the start of the test).
# def setup_nodes():
# pass
def custom_method(self):
"""Do some custom behaviour for this test
Define it in a method here because you're going to use it repeatedly.
If you think it's useful in general, consider moving it to the base
CloudComputingChainTestFramework class so other tests can use it."""
self.log.info("Running custom_method")
def run_test(self):
"""Main test logic"""
# Create P2P connections to two of the nodes
self.nodes[0].add_p2p_connection(BaseNode())
# Start up network handling in another thread. This needs to be called
# after the P2P connections have been created.
network_thread_start()
# wait_for_verack ensures that the P2P connection is fully up.
self.nodes[0].p2p.wait_for_verack()
# Generating a block on one of the nodes will get us out of IBD
blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)]
self.sync_all([self.nodes[0:1]])
# Notice above how we called an RPC by calling a method with the same
# name on the node object. Notice also how we used a keyword argument
# to specify a named RPC argument. Neither of those are defined on the
# node object. Instead there's some __getattr__() magic going on under
# the covers to dispatch unrecognised attribute calls to the RPC
# interface.
# Logs are nice. Do plenty of them. They can be used in place of comments for
# breaking the test into sub-sections.
self.log.info("Starting test!")
self.log.info("Calling a custom function")
custom_function()
self.log.info("Calling a custom method")
self.custom_method()
self.log.info("Create some blocks")
self.tip = int(self.nodes[0].getbestblockhash(), 16)
self.block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + 1
height = 1
for i in range(10):
# Use the mininode and blocktools functionality to manually build a block
# Calling the generate() rpc is easier, but this allows us to exactly
# control the blocks and transactions.
block = create_block(self.tip, create_coinbase(height), self.block_time)
block.solve()
block_message = msg_block(block)
# Send message is used to send a P2P message to the node over our P2PInterface
self.nodes[0].p2p.send_message(block_message)
self.tip = block.sha256
blocks.append(self.tip)
self.block_time += 1
height += 1
self.log.info("Wait for node1 to reach current tip (height 11) using RPC")
self.nodes[1].waitforblockheight(11)
self.log.info("Connect node2 and node1")
connect_nodes(self.nodes[1], 2)
self.log.info("Add P2P connection to node2")
# We can't add additional P2P connections once the network thread has started. Disconnect the connection
# to node0, wait for the network thread to terminate, then connect to node2. This is specific to
# the current implementation of the network thread and may be improved in future.
self.nodes[0].disconnect_p2ps()
network_thread_join()
self.nodes[2].add_p2p_connection(BaseNode())
network_thread_start()
self.nodes[2].p2p.wait_for_verack()
self.log.info("Wait for node2 reach current tip. Test that it has propagated all the blocks to us")
getdata_request = msg_getdata()
for block in blocks:
getdata_request.inv.append(CInv(2, block))
self.nodes[2].p2p.send_message(getdata_request)
# wait_until() will loop until a predicate condition is met. Use it to test properties of the
# P2PInterface objects.
wait_until(lambda: sorted(blocks) == sorted(list(self.nodes[2].p2p.block_receive_map.keys())), timeout=5, lock=mininode_lock)
self.log.info("Check that each block was received only once")
# The network thread uses a global lock on data access to the P2PConnection objects when sending and receiving
# messages. The test thread should acquire the global lock before accessing any P2PConnection data to avoid locking
# and synchronization issues. Note wait_until() acquires this global lock when testing the predicate.
with mininode_lock:
for block in self.nodes[2].p2p.block_receive_map.values():
assert_equal(block, 1)
if __name__ == '__main__':
ExampleTest().main()
| 41.627803 | 133 | 0.684692 |
from collections import defaultdict
from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.mininode import (
CInv,
P2PInterface,
mininode_lock,
msg_block,
msg_getdata,
network_thread_join,
network_thread_start,
)
from test_framework.test_framework import CloudComputingChainTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
wait_until,
)
class BaseNode(P2PInterface):
def __init__(self):
super().__init__()
self.block_receive_map = defaultdict(int)
def on_block(self, message):
message.block.calc_sha256()
self.block_receive_map[message.block.sha256] += 1
def on_inv(self, message):
pass
def custom_function():
ach functional test is a subclass of the CloudComputingChainTestFramework class.
# Override the set_test_params(), add_options(), setup_chain(), setup_network()
# and setup_nodes() methods to customize the test setup as required.
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
# Use self.extra_args to change command-line arguments for the nodes
self.extra_args = [[], ["-logips"], []]
# self.log.info("I've finished set_test_params")
# Use add_options() to add specific command-line options for your test.
# In practice this is not used very much, since the tests are mostly written
# to be run in automated environments without command-line options.
# def add_options()
# pass
# Use setup_chain() to customize the node data directories. In practice
# this is not used very much since the default behaviour is almost always
# fine
# def setup_chain():
# pass
def setup_network(self):
self.setup_nodes()
# In this test, we're not connecting node2 to node0 or node1. Calls to
# sync.
connect_nodes(self.nodes[0], 1)
self.sync_all([self.nodes[0:1]])
# Use setup_nodes() to customize the node start behaviour (for example if
# you don't want to start all nodes at the start of the test).
def custom_method(self):
self.log.info("Running custom_method")
def run_test(self):
self.nodes[0].add_p2p_connection(BaseNode())
network_thread_start()
self.nodes[0].p2p.wait_for_verack()
blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)]
self.sync_all([self.nodes[0:1]])
# the covers to dispatch unrecognised attribute calls to the RPC
# interface.
# Logs are nice. Do plenty of them. They can be used in place of comments for
# breaking the test into sub-sections.
self.log.info("Starting test!")
self.log.info("Calling a custom function")
custom_function()
self.log.info("Calling a custom method")
self.custom_method()
self.log.info("Create some blocks")
self.tip = int(self.nodes[0].getbestblockhash(), 16)
self.block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + 1
height = 1
for i in range(10):
# Use the mininode and blocktools functionality to manually build a block
# Calling the generate() rpc is easier, but this allows us to exactly
# control the blocks and transactions.
block = create_block(self.tip, create_coinbase(height), self.block_time)
block.solve()
block_message = msg_block(block)
# Send message is used to send a P2P message to the node over our P2PInterface
self.nodes[0].p2p.send_message(block_message)
self.tip = block.sha256
blocks.append(self.tip)
self.block_time += 1
height += 1
self.log.info("Wait for node1 to reach current tip (height 11) using RPC")
self.nodes[1].waitforblockheight(11)
self.log.info("Connect node2 and node1")
connect_nodes(self.nodes[1], 2)
self.log.info("Add P2P connection to node2")
# We can't add additional P2P connections once the network thread has started. Disconnect the connection
self.nodes[0].disconnect_p2ps()
network_thread_join()
self.nodes[2].add_p2p_connection(BaseNode())
network_thread_start()
self.nodes[2].p2p.wait_for_verack()
self.log.info("Wait for node2 reach current tip. Test that it has propagated all the blocks to us")
getdata_request = msg_getdata()
for block in blocks:
getdata_request.inv.append(CInv(2, block))
self.nodes[2].p2p.send_message(getdata_request)
wait_until(lambda: sorted(blocks) == sorted(list(self.nodes[2].p2p.block_receive_map.keys())), timeout=5, lock=mininode_lock)
self.log.info("Check that each block was received only once")
with mininode_lock:
for block in self.nodes[2].p2p.block_receive_map.values():
assert_equal(block, 1)
if __name__ == '__main__':
ExampleTest().main()
| true | true |
f728d223c4a4e54299bf0b1904134c960004cb1f | 2,497 | py | Python | core/learner.py | qwfy/cerl | 4d120929915ecf5ecf1b0fb07c862e80b5073f45 | [
"Apache-2.0"
] | null | null | null | core/learner.py | qwfy/cerl | 4d120929915ecf5ecf1b0fb07c862e80b5073f45 | [
"Apache-2.0"
] | null | null | null | core/learner.py | qwfy/cerl | 4d120929915ecf5ecf1b0fb07c862e80b5073f45 | [
"Apache-2.0"
] | null | null | null | # ******************************************************************************
# Copyright 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
from core.off_policy_algo import Off_Policy_Algo
class Learner:
"""Learner object encapsulating a local learner
Parameters:
algo_name (str): Algorithm Identifier
state_dim (int): State size
action_dim (int): Action size
actor_lr (float): Actor learning rate
critic_lr (float): Critic learning rate
gamma (float): DIscount rate
tau (float): Target network sync generate
init_w (bool): Use kaimling normal to initialize?
**td3args (**kwargs): arguments for TD3 algo
"""
def __init__(self, wwid, algo_name, state_dim, action_dim, actor_lr, critic_lr, gamma, tau, init_w=True, **td3args):
self.td3args = td3args;
self.id = id
self.algo = Off_Policy_Algo(wwid, algo_name, state_dim, action_dim, actor_lr, critic_lr, gamma, tau, init_w)
# LEARNER STATISTICS
self.fitnesses = []
self.ep_lens = []
self.value = None
self.visit_count = 0
def update_parameters(self, replay_buffer, buffer_gpu, batch_size, iterations):
for _ in range(iterations):
s, ns, a, r, done = replay_buffer.sample(batch_size)
if not buffer_gpu:
s = s.cuda();
ns = ns.cuda();
a = a.cuda();
r = r.cuda();
done = done.cuda()
self.algo.update_parameters(s, ns, a, r, done, 1, **self.td3args)
def update_stats(self, fitness, ep_len, gamma=0.2):
self.visit_count += 1
self.fitnesses.append(fitness)
self.ep_lens.append(ep_len)
if self.value == None:
self.value = fitness
else:
self.value = gamma * fitness + (1 - gamma) * self.value
| 36.720588 | 120 | 0.594313 |
from core.off_policy_algo import Off_Policy_Algo
class Learner:
def __init__(self, wwid, algo_name, state_dim, action_dim, actor_lr, critic_lr, gamma, tau, init_w=True, **td3args):
self.td3args = td3args;
self.id = id
self.algo = Off_Policy_Algo(wwid, algo_name, state_dim, action_dim, actor_lr, critic_lr, gamma, tau, init_w)
self.fitnesses = []
self.ep_lens = []
self.value = None
self.visit_count = 0
def update_parameters(self, replay_buffer, buffer_gpu, batch_size, iterations):
for _ in range(iterations):
s, ns, a, r, done = replay_buffer.sample(batch_size)
if not buffer_gpu:
s = s.cuda();
ns = ns.cuda();
a = a.cuda();
r = r.cuda();
done = done.cuda()
self.algo.update_parameters(s, ns, a, r, done, 1, **self.td3args)
def update_stats(self, fitness, ep_len, gamma=0.2):
self.visit_count += 1
self.fitnesses.append(fitness)
self.ep_lens.append(ep_len)
if self.value == None:
self.value = fitness
else:
self.value = gamma * fitness + (1 - gamma) * self.value
| true | true |
f728d22c5e806d12c24f9ac4884010f227897260 | 27,787 | py | Python | tests/tiling/explosive_source.py | opesci/seigen | 7d12eab05ed5a857601babe2933aa804c853de66 | [
"MIT"
] | 12 | 2017-08-05T00:01:33.000Z | 2019-06-12T13:18:29.000Z | tests/tiling/explosive_source.py | opesci/seigen | 7d12eab05ed5a857601babe2933aa804c853de66 | [
"MIT"
] | 8 | 2017-08-03T19:49:54.000Z | 2018-07-30T18:45:28.000Z | tests/tiling/explosive_source.py | opesci/seigen | 7d12eab05ed5a857601babe2933aa804c853de66 | [
"MIT"
] | 4 | 2017-08-04T10:15:10.000Z | 2018-08-03T05:41:18.000Z | """
This is an explicit DG method: we invert the mass matrix and perform
a matrix-vector multiplication to get the solution in a time step
"""
from math import *
import mpi4py
import numpy as np
from time import time
import sys
import os
import cProfile
from firedrake import *
from firedrake.petsc import PETSc
from pyop2.utils import cached_property
from pyop2.profiling import timed_region
from pyop2.base import _trace, Dat, DataSet
from pyop2.fusion.interface import loop_chain
from pyop2.logger import info, set_log_level, INFO
import coffee.base as ast
from utils import parser, output_time, calculate_sdepth, FusionSchemes
class ElasticLF4(object):
r"""An elastic wave equation solver, using the finite element method
for spatial discretisation, and a fourth-order leap-frog time-stepping scheme."""
loop_chain_length = 28
num_solves = 8
def __init__(self, mesh, family, degree, dimension, output=1, params=None):
r""" Initialise a new elastic wave simulation.
:param mesh: The underlying computational mesh of vertices and edges.
:param str family: Specify whether CG or DG should be used.
:param int degree: Use polynomial basis functions of this degree.
:param int dimension: The spatial dimension of the problem (1, 2 or 3).
:param int output: period, in timesteps, to write solution fields to a file.
:param dict params: simulation and optimisation parameters
:returns: None
"""
self.degree = degree
self.mesh = mesh
self.dimension = dimension
self.output = output
self.tofile = params['tofile']
self.S = TensorFunctionSpace(mesh, family, degree, name='S')
self.U = VectorFunctionSpace(mesh, family, degree, name='U')
# Assumes that the S and U function spaces are the same.
self.S_tot_dofs = op2.MPI.COMM_WORLD.allreduce(self.S.dof_count, op=mpi4py.MPI.SUM)
self.U_tot_dofs = op2.MPI.COMM_WORLD.allreduce(self.U.dof_count, op=mpi4py.MPI.SUM)
info("Number of degrees of freedom (Velocity): %d" % self.U_tot_dofs)
info("Number of degrees of freedom (Stress): %d" % self.S_tot_dofs)
self.s = TrialFunction(self.S)
self.v = TestFunction(self.S)
self.u = TrialFunction(self.U)
self.w = TestFunction(self.U)
self.s0 = Function(self.S, name="StressOld")
self.sh1 = Function(self.S, name="StressHalf1")
self.stemp = Function(self.S, name="StressTemp")
self.sh2 = Function(self.S, name="StressHalf2")
self.s1 = Function(self.S, name="StressNew")
self.u0 = Function(self.U, name="VelocityOld")
self.uh1 = Function(self.U, name="VelocityHalf1")
self.utemp = Function(self.U, name="VelocityTemp")
self.uh2 = Function(self.U, name="VelocityHalf2")
self.u1 = Function(self.U, name="VelocityNew")
self.absorption_function = None
self.source_function = None
self.source_expression = None
self._dt = None
self._density = None
self._mu = None
self._l = None
self.n = FacetNormal(self.mesh)
self.I = Identity(self.dimension)
# Tiling options
self.tiling_size = params['tile_size']
self.tiling_uf = params['num_unroll']
self.tiling_mode = params['mode']
self.tiling_halo = params['extra_halo']
self.tiling_explicit = params['explicit_mode']
self.tiling_explicit_id = params['explicit_mode_id']
self.tiling_log = params['log']
self.tiling_sdepth = params['s_depth']
self.tiling_part = params['partitioning']
self.tiling_coloring = params['coloring']
self.tiling_glb_maps = params['use_glb_maps']
self.tiling_prefetch = params['use_prefetch']
# Mat-vec AST cache
self.asts = {}
if self.tofile:
# File output streams
platform = os.environ.get('NODENAME', 'unknown')
tmpdir = os.environ['TMPDIR']
base = os.path.join(tmpdir, 'output', platform,
'p%d' % self.degree, 'uf%d' % self.tiling_uf)
if op2.MPI.COMM_WORLD.rank == 0:
if not os.path.exists(base):
os.makedirs(base)
sub_dirs = [d for d in os.listdir(base)
if os.path.isdir(os.path.join(base, d))]
sub_dir = "%d_em%d_part%s_tile%s" % (len(sub_dirs),
self.tiling_explicit_id,
self.tiling_size if self.tiling_uf else 0,
self.tiling_part if self.tiling_uf else 'None')
base = os.path.join(base, sub_dir)
os.makedirs(base)
op2.MPI.COMM_WORLD.barrier()
base = op2.MPI.COMM_WORLD.bcast(base, root=0)
self.u_stream = File(os.path.join(base, 'velocity.pvd'))
self.s_stream = File(os.path.join(base, 'stress.pvd'))
@property
def absorption(self):
r""" The absorption coefficient :math:`\sigma` for the absorption term
.. math:: \sigma\mathbf{u}
where :math:`\mathbf{u}` is the velocity field.
"""
return self.absorption_function
@absorption.setter
def absorption(self, expression):
r""" Setter function for the absorption field.
:param firedrake.Expression expression: The expression to interpolate onto the absorption field.
"""
self.absorption_function.interpolate(expression)
# Source term
@property
def source(self):
r""" The source term on the RHS of the velocity (or stress) equation. """
return self.source_function
@source.setter
def source(self, expression):
r""" Setter function for the source field.
:param firedrake.Expression expression: The expression to interpolate onto the source field.
"""
self.source_function.interpolate(expression)
def assemble_inverse_mass(self):
r""" Compute the inverse of the consistent mass matrix for the velocity and stress equations.
:returns: None
"""
# Inverse of the (consistent) mass matrix for the velocity equation.
self.inverse_mass_velocity = assemble(inner(self.w, self.u)*dx, inverse=True)
self.inverse_mass_velocity.assemble()
self.imass_velocity = self.inverse_mass_velocity.M
# Inverse of the (consistent) mass matrix for the stress equation.
self.inverse_mass_stress = assemble(inner(self.v, self.s)*dx, inverse=True)
self.inverse_mass_stress.assemble()
self.imass_stress = self.inverse_mass_stress.M
def copy_massmatrix_into_dat(self):
# Copy the velocity mass matrix into a Dat
vmat = self.imass_velocity.handle
dofs_per_entity = self.U.fiat_element.entity_dofs()
dofs_per_entity = sum(self.mesh.make_dofs_per_plex_entity(dofs_per_entity))
arity = dofs_per_entity*self.U.topological.dim
self.velocity_mass_asdat = Dat(DataSet(self.mesh.cell_set, arity*arity), dtype='double')
istart, iend = vmat.getOwnershipRange()
idxs = [PETSc.IS().createGeneral(np.arange(i, i+arity, dtype=np.int32),
comm=PETSc.COMM_SELF)
for i in range(istart, iend, arity)]
submats = vmat.getSubMatrices(idxs, idxs)
for i, m in enumerate(submats):
self.velocity_mass_asdat.data[i] = m[:, :].flatten()
info("Computed velocity mass matrix")
# Copy the stress mass matrix into a Dat
smat = self.imass_stress.handle
dofs_per_entity = self.S.fiat_element.entity_dofs()
dofs_per_entity = sum(self.mesh.make_dofs_per_plex_entity(dofs_per_entity))
arity = dofs_per_entity*self.S.topological.dim
self.stress_mass_asdat = Dat(DataSet(self.mesh.cell_set, arity*arity), dtype='double')
istart, iend = smat.getOwnershipRange()
idxs = [PETSc.IS().createGeneral(np.arange(i, i+arity, dtype=np.int32),
comm=PETSc.COMM_SELF)
for i in range(istart, iend, arity)]
submats = smat.getSubMatrices(idxs, idxs)
for i, m in enumerate(submats):
self.stress_mass_asdat.data[i] = m[:, :].flatten()
info("Computed stress mass matrix")
@property
def form_uh1(self):
""" UFL for uh1 equation. """
F = inner(self.w, self.u)*dx - self.f(self.w, self.s0, self.u0, self.n, self.absorption)
return F
@cached_property
def rhs_uh1(self):
""" RHS for uh1 equation. """
return rhs(self.form_uh1)
@property
def form_stemp(self):
""" UFL for stemp equation. """
F = inner(self.v, self.s)*dx - self.g(self.v, self.uh1, self.I, self.n, self.l, self.mu, self.source)
return F
@cached_property
def rhs_stemp(self):
""" RHS for stemp equation. """
return rhs(self.form_stemp)
@property
def form_uh2(self):
""" UFL for uh2 equation. """
F = inner(self.w, self.u)*dx - self.f(self.w, self.stemp, self.u0, self.n, self.absorption)
return F
@cached_property
def rhs_uh2(self):
""" RHS for uh2 equation. """
return rhs(self.form_uh2)
@property
def form_u1(self):
""" UFL for u1 equation. """
# Note that we have multiplied through by dt here.
F = self.density*inner(self.w, self.u)*dx - self.density*inner(self.w, self.u0)*dx - self.dt*inner(self.w, self.uh1)*dx - ((self.dt**3)/24.0)*inner(self.w, self.uh2)*dx
return F
@cached_property
def rhs_u1(self):
""" RHS for u1 equation. """
return rhs(self.form_u1)
@property
def form_sh1(self):
""" UFL for sh1 equation. """
F = inner(self.v, self.s)*dx - self.g(self.v, self.u1, self.I, self.n, self.l, self.mu, self.source)
return F
@cached_property
def rhs_sh1(self):
""" RHS for sh1 equation. """
return rhs(self.form_sh1)
@property
def form_utemp(self):
""" UFL for utemp equation. """
F = inner(self.w, self.u)*dx - self.f(self.w, self.sh1, self.u1, self.n, self.absorption)
return F
@cached_property
def rhs_utemp(self):
""" RHS for utemp equation. """
return rhs(self.form_utemp)
@property
def form_sh2(self):
""" UFL for sh2 equation. """
F = inner(self.v, self.s)*dx - self.g(self.v, self.utemp, self.I, self.n, self.l, self.mu, self.source)
return F
@cached_property
def rhs_sh2(self):
""" RHS for sh2 equation. """
return rhs(self.form_sh2)
@property
def form_s1(self):
""" UFL for s1 equation. """
# Note that we have multiplied through by dt here.
F = inner(self.v, self.s)*dx - inner(self.v, self.s0)*dx - self.dt*inner(self.v, self.sh1)*dx - ((self.dt**3)/24.0)*inner(self.v, self.sh2)*dx
return F
@cached_property
def rhs_s1(self):
""" RHS for s1 equation. """
return rhs(self.form_s1)
def f(self, w, s0, u0, n, absorption=None):
""" The RHS of the velocity equation. """
f = -inner(grad(w), s0)*dx + inner(avg(s0)*n('+'), w('+'))*dS + inner(avg(s0)*n('-'), w('-'))*dS
if(absorption):
f += -inner(w, absorption*u0)*dx
return f
def g(self, v, u1, I, n, l, mu, source=None):
""" The RHS of the stress equation. """
g = - l*(v[i, j]*I[i, j]).dx(k)*u1[k]*dx + l*(jump(v[i, j], n[k])*I[i, j]*avg(u1[k]))*dS + l*(v[i, j]*I[i, j]*u1[k]*n[k])*ds - mu*inner(div(v), u1)*dx + mu*inner(avg(u1), jump(v, n))*dS - mu*inner(div(v.T), u1)*dx + mu*inner(avg(u1), jump(v.T, n))*dS + mu*inner(u1, dot(v, n))*ds + mu*inner(u1, dot(v.T, n))*ds
if(source):
g += inner(v, source)*dx
return g
def ast_matmul(self, F_a, implementation='optimized'):
"""Generate an AST for a PyOP2 kernel performing a matrix-vector multiplication."""
# The number of dofs on each element is /ndofs*cdim/
F_a_fs = F_a.function_space()
ndofs = F_a_fs.fiat_element.entity_dofs()
ndofs = sum(self.mesh.make_dofs_per_plex_entity(ndofs))
cdim = F_a_fs.dim
name = 'mat_vec_mul_kernel_%s' % F_a_fs.name
identifier = (ndofs, cdim, name, implementation)
if identifier in self.asts:
return self.asts[identifier]
from coffee import isa, options
if cdim and cdim % isa['dp_reg'] == 0:
simd_pragma = '#pragma simd reduction(+:sum)'
else:
simd_pragma = ''
# Craft the AST
if implementation == 'optimized' and cdim >= 4:
body = ast.Incr(ast.Symbol('sum'),
ast.Prod(ast.Symbol('A', ('i',), ((ndofs*cdim, 'j*%d + k' % cdim),)),
ast.Symbol('B', ('j', 'k'))))
body = ast.c_for('k', cdim, body, simd_pragma).children[0]
body = [ast.Decl('const int', ast.Symbol('index'), init=ast.Symbol('i%%%d' % cdim)),
ast.Decl('double', ast.Symbol('sum'), init=ast.Symbol('0.0')),
ast.c_for('j', ndofs, body).children[0],
ast.Assign(ast.Symbol('C', ('i/%d' % cdim, 'index')), 'sum')]
body = ast.Block([ast.c_for('i', ndofs*cdim, body).children[0]])
funargs = [ast.Decl('double* restrict', 'A'),
ast.Decl('double *restrict *restrict', 'B'),
ast.Decl('double *restrict *', 'C')]
fundecl = ast.FunDecl('void', name, funargs, body, ['static', 'inline'])
else:
body = ast.Incr(ast.Symbol('C', ('i/%d' % cdim, 'index')),
ast.Prod(ast.Symbol('A', ('i',), ((ndofs*cdim, 'j*%d + k' % cdim),)),
ast.Symbol('B', ('j', 'k'))))
body = ast.c_for('k', cdim, body).children[0]
body = [ast.Decl('const int', ast.Symbol('index'), init=ast.Symbol('i%%%d' % cdim)),
ast.Assign(ast.Symbol('C', ('i/%d' % cdim, 'index' % cdim)), '0.0'),
ast.c_for('j', ndofs, body).children[0]]
body = ast.Block([ast.c_for('i', ndofs*cdim, body).children[0]])
funargs = [ast.Decl('double* restrict', 'A'),
ast.Decl('double *restrict *restrict', 'B'),
ast.Decl('double *restrict *', 'C')]
fundecl = ast.FunDecl('void', name, funargs, body, ['static', 'inline'])
# Track the AST for later fast retrieval
self.asts[identifier] = fundecl
return fundecl
def solve(self, rhs, matrix_asdat, result):
F_a = assemble(rhs)
ast_matmul = self.ast_matmul(F_a)
# Create the par loop (automatically added to the trace of loops to be executed)
kernel = op2.Kernel(ast_matmul, ast_matmul.name)
op2.par_loop(kernel, self.mesh.cell_set,
matrix_asdat(op2.READ),
F_a.dat(op2.READ, F_a.cell_node_map()),
result.dat(op2.WRITE, result.cell_node_map()))
def write(self, u=None, s=None, output=True):
r""" Write the velocity and/or stress fields to file.
:param firedrake.Function u: The velocity field.
:param firedrake.Function s: The stress field.
:returns: None
"""
_trace.evaluate_all()
if output:
with timed_region('i/o'):
if(u):
self.u_stream.write(u)
if(s):
# FIXME: Cannot currently write tensor valued fields to a VTU file.
# See https://github.com/firedrakeproject/firedrake/issues/538
#self.s_stream << s
pass
def run(self, T, TS=0):
""" Run the elastic wave simulation until t = T or ntimesteps = TS.
:param float T: The finish time of the simulation.
:param float TS: The maximum number of timesteps performed; ignored if = 0.
:returns: The final solution fields for velocity and stress.
"""
# Write out the initial condition.
self.write(self.u1, self.s1, self.tofile)
info("Generating inverse mass matrix")
# Pre-assemble the inverse mass matrices, which should stay
# constant throughout the simulation (assuming no mesh adaptivity).
start = time()
self.assemble_inverse_mass()
end = time()
info("DONE! (Elapsed: %f s)" % round(end - start, 3))
op2.MPI.COMM_WORLD.barrier()
info("Copying inverse mass matrix into a dat...")
start = time()
self.copy_massmatrix_into_dat()
end = time()
info("DONE! (Elapsed: %f s)" % round(end - start, 3))
op2.MPI.COMM_WORLD.barrier()
start = time()
t = self.dt
timestep = 0
ntimesteps = sys.maxint if TS == 0 else TS
while t <= T + 1e-12 and timestep < ntimesteps:
if op2.MPI.COMM_WORLD.rank == 0 and timestep % self.output == 0:
info("t = %f, (timestep = %d)" % (t, timestep))
with loop_chain("main1",
tile_size=self.tiling_size,
num_unroll=self.tiling_uf,
mode=self.tiling_mode,
extra_halo=self.tiling_halo,
explicit=self.tiling_explicit,
use_glb_maps=self.tiling_glb_maps,
use_prefetch=self.tiling_prefetch,
coloring=self.tiling_coloring,
ignore_war=True,
log=self.tiling_log):
# In case the source is time-dependent, update the time 't' here.
if(self.source):
with timed_region('source term update'):
self.source_expression.t = t
self.source = self.source_expression
# Solve for the velocity vector field.
self.solve(self.rhs_uh1, self.velocity_mass_asdat, self.uh1)
self.solve(self.rhs_stemp, self.stress_mass_asdat, self.stemp)
self.solve(self.rhs_uh2, self.velocity_mass_asdat, self.uh2)
self.solve(self.rhs_u1, self.velocity_mass_asdat, self.u1)
# Solve for the stress tensor field.
self.solve(self.rhs_sh1, self.stress_mass_asdat, self.sh1)
self.solve(self.rhs_utemp, self.velocity_mass_asdat, self.utemp)
self.solve(self.rhs_sh2, self.stress_mass_asdat, self.sh2)
self.solve(self.rhs_s1, self.stress_mass_asdat, self.s1)
self.u0.assign(self.u1)
self.s0.assign(self.s1)
# Write out the new fields
self.write(self.u1, self.s1, self.tofile and timestep % self.output == 0)
# Move onto next timestep
t += self.dt
timestep += 1
# Write out the final state of the fields
self.write(self.u1, self.s1, self.tofile)
end = time()
return start, end, timestep, self.u1, self.s1
# Helper stuff
def Vp(mu, l, density):
r""" Calculate the P-wave velocity, given by
.. math:: \sqrt{\frac{(\lambda + 2\mu)}{\rho}}
where :math:`\rho` is the density, and :math:`\lambda` and :math:`\mu` are
the first and second Lame parameters, respectively.
:param mu: The second Lame parameter.
:param l: The first Lame parameter.
:param density: The density.
:returns: The P-wave velocity.
:rtype: float
"""
return sqrt((l + 2*mu)/density)
def Vs(mu, density):
r""" Calculate the S-wave velocity, given by
.. math:: \sqrt{\frac{\mu}{\rho}}
where :math:`\rho` is the density, and :math:`\mu` is the second Lame parameter.
:param mu: The second Lame parameter.
:param density: The density.
:returns: The P-wave velocity.
:rtype: float
"""
return sqrt(mu/density)
def cfl_dt(dx, Vp, courant_number):
r""" Computes the maximum permitted value for the timestep math:`\delta t`.
:param float dx: The characteristic element length.
:param float Vp: The P-wave velocity.
:param float courant_number: The desired Courant number
:returns: The maximum permitted timestep, math:`\delta t`.
:rtype: float
"""
return (courant_number*dx)/Vp
class ExplosiveSourceLF4(object):
def explosive_source_lf4(self, T=2.5, TS=0, Lx=300.0, Ly=150.0, h=2.5, cn=0.05,
mesh_file=None, output=1, poly_order=2, params=None):
tile_size = params['tile_size']
num_unroll = params['num_unroll']
extra_halo = params['extra_halo']
part_mode = params['partitioning']
explicit_mode = params['explicit_mode']
if explicit_mode:
fusion_scheme = FusionSchemes.get(explicit_mode, part_mode, tile_size)
num_solves, params['explicit_mode'] = fusion_scheme
else:
num_solves = ElasticLF4.num_solves
if mesh_file:
mesh = Mesh(mesh_file)
else:
mesh = RectangleMesh(int(Lx/h), int(Ly/h), Lx, Ly)
set_log_level(INFO)
kwargs = {}
if params['mode'] in ['tile', 'only_tile']:
s_depth = calculate_sdepth(num_solves, num_unroll, extra_halo)
if part_mode == 'metis':
kwargs['reorder'] = ('metis-rcm', mesh.num_cells() / tile_size)
else:
s_depth = 1
# FIXME: need s_depth in firedrake to be able to use this
# kwargs['s_depth'] = s_depth
params['s_depth'] = s_depth
mesh.topology.init(**kwargs)
slope(mesh, debug=True)
# Instantiate the model
self.elastic = ElasticLF4(mesh, "DG", poly_order, 2, output, params)
info("S-depth used: %d" % s_depth)
info("Polynomial order: %d" % poly_order)
# Constants
self.elastic.density = 1.0
self.elastic.mu = 3600.0
self.elastic.l = 3599.3664
self.Vp = Vp(self.elastic.mu, self.elastic.l, self.elastic.density)
self.Vs = Vs(self.elastic.mu, self.elastic.density)
info("P-wave velocity: %f" % self.Vp)
info("S-wave velocity: %f" % self.Vs)
self.dx = h
self.courant_number = cn
self.elastic.dt = cfl_dt(self.dx, self.Vp, self.courant_number)
info("Using a timestep of %f" % self.elastic.dt)
# Source
exp_area = (44.5, 45.5, Ly - 1.5, Ly - 0.5)
if poly_order == 1:
# Adjust explosion area
exp_area = (149.5, 150.5, Ly - 1.5, Ly - 0.5)
a = 159.42
self.elastic.source_expression = Expression((("x[0] >= %f && x[0] <= %f && x[1] >= %f && x[1] <= %f ? (-1.0 + 2*a*pow(t - 0.3, 2))*exp(-a*pow(t - 0.3, 2)) : 0.0" % exp_area, "0.0"),
("0.0", "x[0] >= %f && x[0] <= %f && x[1] >= %f && x[1] <= %f ? (-1.0 + 2*a*pow(t - 0.3, 2))*exp(-a*pow(t - 0.3, 2)) : 0.0" % exp_area)), a=a, t=0)
self.elastic.source_function = Function(self.elastic.S)
self.elastic.source = self.elastic.source_expression
# Absorption
F = FunctionSpace(mesh, "DG", poly_order, name='F')
self.elastic.absorption_function = Function(F)
self.elastic.absorption = Expression("x[0] <= 20 || x[0] >= %f || x[1] <= 20.0 ? 1000 : 0" % (Lx - 20,))
# Initial conditions
uic = Expression(('0.0', '0.0'))
self.elastic.u0.assign(Function(self.elastic.U).interpolate(uic))
sic = Expression((('0', '0'), ('0', '0')))
self.elastic.s0.assign(Function(self.elastic.S).interpolate(sic))
# Run the simulation
start, end, ntimesteps, u1, s1 = self.elastic.run(T, TS=TS)
# Print runtime summary
output_time(start, end,
tofile=params['tofile'],
verbose=params['verbose'],
meshid=("h%s" % h).replace('.', ''),
ntimesteps=ntimesteps,
nloops=ElasticLF4.loop_chain_length*num_unroll,
partitioning=part_mode,
tile_size=tile_size,
extra_halo=extra_halo,
explicit_mode=explicit_mode,
glb_maps=params['use_glb_maps'],
prefetch=params['use_prefetch'],
coloring=params['coloring'],
poly_order=poly_order,
domain=os.path.splitext(os.path.basename(mesh.name))[0],
function_spaces=[self.elastic.S, self.elastic.U])
return u1, s1
if __name__ == '__main__':
set_log_level(INFO)
# Parse the input
args = parser()
params = {
'num_unroll': args.num_unroll,
'tile_size': args.tile_size,
'mode': args.fusion_mode,
'partitioning': args.part_mode,
'coloring': args.coloring,
'extra_halo': args.extra_halo,
'explicit_mode': args.explicit_mode,
'explicit_mode_id': args.explicit_mode,
'use_glb_maps': args.glb_maps,
'use_prefetch': args.prefetch,
'log': args.log,
'tofile': args.tofile,
'verbose': args.verbose
}
# Set the kernel optimizaation level (default: O2)
parameters['coffee']['optlevel'] = args.coffee_opt
# Is it just a run to check correctness?
if args.check:
Lx, Ly, h, time_max, tolerance = 20, 20, 2.5, 0.01, 1e-10
info("Checking correctness of original and tiled versions, with:")
info(" (Lx, Ly, T, tolerance)=%s" % str((Lx, Ly, time_max, tolerance)))
info(" %s" % params)
# Run the tiled variant
u1, s1 = ExplosiveSourceLF4().explosive_source_lf4(time_max, Lx, Ly, h,
sys.maxint, params)
# Run the original code
original = {'num_unroll': 0, 'tile_size': 0, 'mode': None,
'partitioning': 'chunk', 'extra_halo': 0}
u1_orig, s1_orig = ExplosiveSourceLF4().explosive_source_lf4(time_max, Lx, Ly, h,
sys.maxint, original)
# Check output
info("Checking output...")
assert np.allclose(u1.dat.data, u1_orig.dat.data, rtol=1e-10)
assert np.allclose(s1.dat.data, s1_orig.dat.data, rtol=1e-10)
info("Results OK!")
sys.exit(0)
# Set the input mesh
if args.mesh_file:
info("Using the unstructured mesh %s" % args.mesh_file)
kwargs = {'T': args.time_max, 'TS': args.timesteps_max, 'mesh_file': args.mesh_file,
'h': args.ms, 'cn': args.cn, 'output': args.output, 'poly_order': args.poly_order,
'params': params}
else:
Lx, Ly = eval(args.mesh_size)
info("Using the structured mesh with values (Lx,Ly,h)=%s" % str((Lx, Ly, args.ms)))
kwargs = {'T': args.time_max, 'TS': args.timesteps_max, 'Lx': Lx, 'Ly': Ly, 'h': args.ms,
'output': args.output, 'poly_order': args.poly_order, 'params': params}
info("h=%f, courant number=%f" % (args.ms, args.cn))
if args.profile:
cProfile.run('ExplosiveSourceLF4().explosive_source_lf4(**kwargs)',
'log_rank%d.cprofile' % op2.MPI.COMM_WORLD.rank)
else:
u1, s1 = ExplosiveSourceLF4().explosive_source_lf4(**kwargs)
| 40.683748 | 318 | 0.575557 |
from math import *
import mpi4py
import numpy as np
from time import time
import sys
import os
import cProfile
from firedrake import *
from firedrake.petsc import PETSc
from pyop2.utils import cached_property
from pyop2.profiling import timed_region
from pyop2.base import _trace, Dat, DataSet
from pyop2.fusion.interface import loop_chain
from pyop2.logger import info, set_log_level, INFO
import coffee.base as ast
from utils import parser, output_time, calculate_sdepth, FusionSchemes
class ElasticLF4(object):
loop_chain_length = 28
num_solves = 8
def __init__(self, mesh, family, degree, dimension, output=1, params=None):
self.degree = degree
self.mesh = mesh
self.dimension = dimension
self.output = output
self.tofile = params['tofile']
self.S = TensorFunctionSpace(mesh, family, degree, name='S')
self.U = VectorFunctionSpace(mesh, family, degree, name='U')
self.S_tot_dofs = op2.MPI.COMM_WORLD.allreduce(self.S.dof_count, op=mpi4py.MPI.SUM)
self.U_tot_dofs = op2.MPI.COMM_WORLD.allreduce(self.U.dof_count, op=mpi4py.MPI.SUM)
info("Number of degrees of freedom (Velocity): %d" % self.U_tot_dofs)
info("Number of degrees of freedom (Stress): %d" % self.S_tot_dofs)
self.s = TrialFunction(self.S)
self.v = TestFunction(self.S)
self.u = TrialFunction(self.U)
self.w = TestFunction(self.U)
self.s0 = Function(self.S, name="StressOld")
self.sh1 = Function(self.S, name="StressHalf1")
self.stemp = Function(self.S, name="StressTemp")
self.sh2 = Function(self.S, name="StressHalf2")
self.s1 = Function(self.S, name="StressNew")
self.u0 = Function(self.U, name="VelocityOld")
self.uh1 = Function(self.U, name="VelocityHalf1")
self.utemp = Function(self.U, name="VelocityTemp")
self.uh2 = Function(self.U, name="VelocityHalf2")
self.u1 = Function(self.U, name="VelocityNew")
self.absorption_function = None
self.source_function = None
self.source_expression = None
self._dt = None
self._density = None
self._mu = None
self._l = None
self.n = FacetNormal(self.mesh)
self.I = Identity(self.dimension)
self.tiling_size = params['tile_size']
self.tiling_uf = params['num_unroll']
self.tiling_mode = params['mode']
self.tiling_halo = params['extra_halo']
self.tiling_explicit = params['explicit_mode']
self.tiling_explicit_id = params['explicit_mode_id']
self.tiling_log = params['log']
self.tiling_sdepth = params['s_depth']
self.tiling_part = params['partitioning']
self.tiling_coloring = params['coloring']
self.tiling_glb_maps = params['use_glb_maps']
self.tiling_prefetch = params['use_prefetch']
self.asts = {}
if self.tofile:
platform = os.environ.get('NODENAME', 'unknown')
tmpdir = os.environ['TMPDIR']
base = os.path.join(tmpdir, 'output', platform,
'p%d' % self.degree, 'uf%d' % self.tiling_uf)
if op2.MPI.COMM_WORLD.rank == 0:
if not os.path.exists(base):
os.makedirs(base)
sub_dirs = [d for d in os.listdir(base)
if os.path.isdir(os.path.join(base, d))]
sub_dir = "%d_em%d_part%s_tile%s" % (len(sub_dirs),
self.tiling_explicit_id,
self.tiling_size if self.tiling_uf else 0,
self.tiling_part if self.tiling_uf else 'None')
base = os.path.join(base, sub_dir)
os.makedirs(base)
op2.MPI.COMM_WORLD.barrier()
base = op2.MPI.COMM_WORLD.bcast(base, root=0)
self.u_stream = File(os.path.join(base, 'velocity.pvd'))
self.s_stream = File(os.path.join(base, 'stress.pvd'))
@property
def absorption(self):
return self.absorption_function
@absorption.setter
def absorption(self, expression):
self.absorption_function.interpolate(expression)
@property
def source(self):
return self.source_function
@source.setter
def source(self, expression):
self.source_function.interpolate(expression)
def assemble_inverse_mass(self):
self.inverse_mass_velocity = assemble(inner(self.w, self.u)*dx, inverse=True)
self.inverse_mass_velocity.assemble()
self.imass_velocity = self.inverse_mass_velocity.M
self.inverse_mass_stress = assemble(inner(self.v, self.s)*dx, inverse=True)
self.inverse_mass_stress.assemble()
self.imass_stress = self.inverse_mass_stress.M
def copy_massmatrix_into_dat(self):
vmat = self.imass_velocity.handle
dofs_per_entity = self.U.fiat_element.entity_dofs()
dofs_per_entity = sum(self.mesh.make_dofs_per_plex_entity(dofs_per_entity))
arity = dofs_per_entity*self.U.topological.dim
self.velocity_mass_asdat = Dat(DataSet(self.mesh.cell_set, arity*arity), dtype='double')
istart, iend = vmat.getOwnershipRange()
idxs = [PETSc.IS().createGeneral(np.arange(i, i+arity, dtype=np.int32),
comm=PETSc.COMM_SELF)
for i in range(istart, iend, arity)]
submats = vmat.getSubMatrices(idxs, idxs)
for i, m in enumerate(submats):
self.velocity_mass_asdat.data[i] = m[:, :].flatten()
info("Computed velocity mass matrix")
smat = self.imass_stress.handle
dofs_per_entity = self.S.fiat_element.entity_dofs()
dofs_per_entity = sum(self.mesh.make_dofs_per_plex_entity(dofs_per_entity))
arity = dofs_per_entity*self.S.topological.dim
self.stress_mass_asdat = Dat(DataSet(self.mesh.cell_set, arity*arity), dtype='double')
istart, iend = smat.getOwnershipRange()
idxs = [PETSc.IS().createGeneral(np.arange(i, i+arity, dtype=np.int32),
comm=PETSc.COMM_SELF)
for i in range(istart, iend, arity)]
submats = smat.getSubMatrices(idxs, idxs)
for i, m in enumerate(submats):
self.stress_mass_asdat.data[i] = m[:, :].flatten()
info("Computed stress mass matrix")
@property
def form_uh1(self):
F = inner(self.w, self.u)*dx - self.f(self.w, self.s0, self.u0, self.n, self.absorption)
return F
@cached_property
def rhs_uh1(self):
return rhs(self.form_uh1)
@property
def form_stemp(self):
F = inner(self.v, self.s)*dx - self.g(self.v, self.uh1, self.I, self.n, self.l, self.mu, self.source)
return F
@cached_property
def rhs_stemp(self):
return rhs(self.form_stemp)
@property
def form_uh2(self):
F = inner(self.w, self.u)*dx - self.f(self.w, self.stemp, self.u0, self.n, self.absorption)
return F
@cached_property
def rhs_uh2(self):
return rhs(self.form_uh2)
@property
def form_u1(self):
F = self.density*inner(self.w, self.u)*dx - self.density*inner(self.w, self.u0)*dx - self.dt*inner(self.w, self.uh1)*dx - ((self.dt**3)/24.0)*inner(self.w, self.uh2)*dx
return F
@cached_property
def rhs_u1(self):
return rhs(self.form_u1)
@property
def form_sh1(self):
F = inner(self.v, self.s)*dx - self.g(self.v, self.u1, self.I, self.n, self.l, self.mu, self.source)
return F
@cached_property
def rhs_sh1(self):
return rhs(self.form_sh1)
@property
def form_utemp(self):
F = inner(self.w, self.u)*dx - self.f(self.w, self.sh1, self.u1, self.n, self.absorption)
return F
@cached_property
def rhs_utemp(self):
return rhs(self.form_utemp)
@property
def form_sh2(self):
F = inner(self.v, self.s)*dx - self.g(self.v, self.utemp, self.I, self.n, self.l, self.mu, self.source)
return F
@cached_property
def rhs_sh2(self):
return rhs(self.form_sh2)
@property
def form_s1(self):
F = inner(self.v, self.s)*dx - inner(self.v, self.s0)*dx - self.dt*inner(self.v, self.sh1)*dx - ((self.dt**3)/24.0)*inner(self.v, self.sh2)*dx
return F
@cached_property
def rhs_s1(self):
return rhs(self.form_s1)
def f(self, w, s0, u0, n, absorption=None):
f = -inner(grad(w), s0)*dx + inner(avg(s0)*n('+'), w('+'))*dS + inner(avg(s0)*n('-'), w('-'))*dS
if(absorption):
f += -inner(w, absorption*u0)*dx
return f
def g(self, v, u1, I, n, l, mu, source=None):
g = - l*(v[i, j]*I[i, j]).dx(k)*u1[k]*dx + l*(jump(v[i, j], n[k])*I[i, j]*avg(u1[k]))*dS + l*(v[i, j]*I[i, j]*u1[k]*n[k])*ds - mu*inner(div(v), u1)*dx + mu*inner(avg(u1), jump(v, n))*dS - mu*inner(div(v.T), u1)*dx + mu*inner(avg(u1), jump(v.T, n))*dS + mu*inner(u1, dot(v, n))*ds + mu*inner(u1, dot(v.T, n))*ds
if(source):
g += inner(v, source)*dx
return g
def ast_matmul(self, F_a, implementation='optimized'):
F_a_fs = F_a.function_space()
ndofs = F_a_fs.fiat_element.entity_dofs()
ndofs = sum(self.mesh.make_dofs_per_plex_entity(ndofs))
cdim = F_a_fs.dim
name = 'mat_vec_mul_kernel_%s' % F_a_fs.name
identifier = (ndofs, cdim, name, implementation)
if identifier in self.asts:
return self.asts[identifier]
from coffee import isa, options
if cdim and cdim % isa['dp_reg'] == 0:
simd_pragma = '#pragma simd reduction(+:sum)'
else:
simd_pragma = ''
if implementation == 'optimized' and cdim >= 4:
body = ast.Incr(ast.Symbol('sum'),
ast.Prod(ast.Symbol('A', ('i',), ((ndofs*cdim, 'j*%d + k' % cdim),)),
ast.Symbol('B', ('j', 'k'))))
body = ast.c_for('k', cdim, body, simd_pragma).children[0]
body = [ast.Decl('const int', ast.Symbol('index'), init=ast.Symbol('i%%%d' % cdim)),
ast.Decl('double', ast.Symbol('sum'), init=ast.Symbol('0.0')),
ast.c_for('j', ndofs, body).children[0],
ast.Assign(ast.Symbol('C', ('i/%d' % cdim, 'index')), 'sum')]
body = ast.Block([ast.c_for('i', ndofs*cdim, body).children[0]])
funargs = [ast.Decl('double* restrict', 'A'),
ast.Decl('double *restrict *restrict', 'B'),
ast.Decl('double *restrict *', 'C')]
fundecl = ast.FunDecl('void', name, funargs, body, ['static', 'inline'])
else:
body = ast.Incr(ast.Symbol('C', ('i/%d' % cdim, 'index')),
ast.Prod(ast.Symbol('A', ('i',), ((ndofs*cdim, 'j*%d + k' % cdim),)),
ast.Symbol('B', ('j', 'k'))))
body = ast.c_for('k', cdim, body).children[0]
body = [ast.Decl('const int', ast.Symbol('index'), init=ast.Symbol('i%%%d' % cdim)),
ast.Assign(ast.Symbol('C', ('i/%d' % cdim, 'index' % cdim)), '0.0'),
ast.c_for('j', ndofs, body).children[0]]
body = ast.Block([ast.c_for('i', ndofs*cdim, body).children[0]])
funargs = [ast.Decl('double* restrict', 'A'),
ast.Decl('double *restrict *restrict', 'B'),
ast.Decl('double *restrict *', 'C')]
fundecl = ast.FunDecl('void', name, funargs, body, ['static', 'inline'])
self.asts[identifier] = fundecl
return fundecl
def solve(self, rhs, matrix_asdat, result):
F_a = assemble(rhs)
ast_matmul = self.ast_matmul(F_a)
kernel = op2.Kernel(ast_matmul, ast_matmul.name)
op2.par_loop(kernel, self.mesh.cell_set,
matrix_asdat(op2.READ),
F_a.dat(op2.READ, F_a.cell_node_map()),
result.dat(op2.WRITE, result.cell_node_map()))
def write(self, u=None, s=None, output=True):
_trace.evaluate_all()
if output:
with timed_region('i/o'):
if(u):
self.u_stream.write(u)
if(s):
pass
def run(self, T, TS=0):
self.write(self.u1, self.s1, self.tofile)
info("Generating inverse mass matrix")
start = time()
self.assemble_inverse_mass()
end = time()
info("DONE! (Elapsed: %f s)" % round(end - start, 3))
op2.MPI.COMM_WORLD.barrier()
info("Copying inverse mass matrix into a dat...")
start = time()
self.copy_massmatrix_into_dat()
end = time()
info("DONE! (Elapsed: %f s)" % round(end - start, 3))
op2.MPI.COMM_WORLD.barrier()
start = time()
t = self.dt
timestep = 0
ntimesteps = sys.maxint if TS == 0 else TS
while t <= T + 1e-12 and timestep < ntimesteps:
if op2.MPI.COMM_WORLD.rank == 0 and timestep % self.output == 0:
info("t = %f, (timestep = %d)" % (t, timestep))
with loop_chain("main1",
tile_size=self.tiling_size,
num_unroll=self.tiling_uf,
mode=self.tiling_mode,
extra_halo=self.tiling_halo,
explicit=self.tiling_explicit,
use_glb_maps=self.tiling_glb_maps,
use_prefetch=self.tiling_prefetch,
coloring=self.tiling_coloring,
ignore_war=True,
log=self.tiling_log):
if(self.source):
with timed_region('source term update'):
self.source_expression.t = t
self.source = self.source_expression
self.solve(self.rhs_uh1, self.velocity_mass_asdat, self.uh1)
self.solve(self.rhs_stemp, self.stress_mass_asdat, self.stemp)
self.solve(self.rhs_uh2, self.velocity_mass_asdat, self.uh2)
self.solve(self.rhs_u1, self.velocity_mass_asdat, self.u1)
self.solve(self.rhs_sh1, self.stress_mass_asdat, self.sh1)
self.solve(self.rhs_utemp, self.velocity_mass_asdat, self.utemp)
self.solve(self.rhs_sh2, self.stress_mass_asdat, self.sh2)
self.solve(self.rhs_s1, self.stress_mass_asdat, self.s1)
self.u0.assign(self.u1)
self.s0.assign(self.s1)
self.write(self.u1, self.s1, self.tofile and timestep % self.output == 0)
t += self.dt
timestep += 1
self.write(self.u1, self.s1, self.tofile)
end = time()
return start, end, timestep, self.u1, self.s1
def Vp(mu, l, density):
return sqrt((l + 2*mu)/density)
def Vs(mu, density):
return sqrt(mu/density)
def cfl_dt(dx, Vp, courant_number):
return (courant_number*dx)/Vp
class ExplosiveSourceLF4(object):
def explosive_source_lf4(self, T=2.5, TS=0, Lx=300.0, Ly=150.0, h=2.5, cn=0.05,
mesh_file=None, output=1, poly_order=2, params=None):
tile_size = params['tile_size']
num_unroll = params['num_unroll']
extra_halo = params['extra_halo']
part_mode = params['partitioning']
explicit_mode = params['explicit_mode']
if explicit_mode:
fusion_scheme = FusionSchemes.get(explicit_mode, part_mode, tile_size)
num_solves, params['explicit_mode'] = fusion_scheme
else:
num_solves = ElasticLF4.num_solves
if mesh_file:
mesh = Mesh(mesh_file)
else:
mesh = RectangleMesh(int(Lx/h), int(Ly/h), Lx, Ly)
set_log_level(INFO)
kwargs = {}
if params['mode'] in ['tile', 'only_tile']:
s_depth = calculate_sdepth(num_solves, num_unroll, extra_halo)
if part_mode == 'metis':
kwargs['reorder'] = ('metis-rcm', mesh.num_cells() / tile_size)
else:
s_depth = 1
params['s_depth'] = s_depth
mesh.topology.init(**kwargs)
slope(mesh, debug=True)
self.elastic = ElasticLF4(mesh, "DG", poly_order, 2, output, params)
info("S-depth used: %d" % s_depth)
info("Polynomial order: %d" % poly_order)
self.elastic.density = 1.0
self.elastic.mu = 3600.0
self.elastic.l = 3599.3664
self.Vp = Vp(self.elastic.mu, self.elastic.l, self.elastic.density)
self.Vs = Vs(self.elastic.mu, self.elastic.density)
info("P-wave velocity: %f" % self.Vp)
info("S-wave velocity: %f" % self.Vs)
self.dx = h
self.courant_number = cn
self.elastic.dt = cfl_dt(self.dx, self.Vp, self.courant_number)
info("Using a timestep of %f" % self.elastic.dt)
exp_area = (44.5, 45.5, Ly - 1.5, Ly - 0.5)
if poly_order == 1:
exp_area = (149.5, 150.5, Ly - 1.5, Ly - 0.5)
a = 159.42
self.elastic.source_expression = Expression((("x[0] >= %f && x[0] <= %f && x[1] >= %f && x[1] <= %f ? (-1.0 + 2*a*pow(t - 0.3, 2))*exp(-a*pow(t - 0.3, 2)) : 0.0" % exp_area, "0.0"),
("0.0", "x[0] >= %f && x[0] <= %f && x[1] >= %f && x[1] <= %f ? (-1.0 + 2*a*pow(t - 0.3, 2))*exp(-a*pow(t - 0.3, 2)) : 0.0" % exp_area)), a=a, t=0)
self.elastic.source_function = Function(self.elastic.S)
self.elastic.source = self.elastic.source_expression
F = FunctionSpace(mesh, "DG", poly_order, name='F')
self.elastic.absorption_function = Function(F)
self.elastic.absorption = Expression("x[0] <= 20 || x[0] >= %f || x[1] <= 20.0 ? 1000 : 0" % (Lx - 20,))
uic = Expression(('0.0', '0.0'))
self.elastic.u0.assign(Function(self.elastic.U).interpolate(uic))
sic = Expression((('0', '0'), ('0', '0')))
self.elastic.s0.assign(Function(self.elastic.S).interpolate(sic))
start, end, ntimesteps, u1, s1 = self.elastic.run(T, TS=TS)
output_time(start, end,
tofile=params['tofile'],
verbose=params['verbose'],
meshid=("h%s" % h).replace('.', ''),
ntimesteps=ntimesteps,
nloops=ElasticLF4.loop_chain_length*num_unroll,
partitioning=part_mode,
tile_size=tile_size,
extra_halo=extra_halo,
explicit_mode=explicit_mode,
glb_maps=params['use_glb_maps'],
prefetch=params['use_prefetch'],
coloring=params['coloring'],
poly_order=poly_order,
domain=os.path.splitext(os.path.basename(mesh.name))[0],
function_spaces=[self.elastic.S, self.elastic.U])
return u1, s1
if __name__ == '__main__':
set_log_level(INFO)
args = parser()
params = {
'num_unroll': args.num_unroll,
'tile_size': args.tile_size,
'mode': args.fusion_mode,
'partitioning': args.part_mode,
'coloring': args.coloring,
'extra_halo': args.extra_halo,
'explicit_mode': args.explicit_mode,
'explicit_mode_id': args.explicit_mode,
'use_glb_maps': args.glb_maps,
'use_prefetch': args.prefetch,
'log': args.log,
'tofile': args.tofile,
'verbose': args.verbose
}
parameters['coffee']['optlevel'] = args.coffee_opt
if args.check:
Lx, Ly, h, time_max, tolerance = 20, 20, 2.5, 0.01, 1e-10
info("Checking correctness of original and tiled versions, with:")
info(" (Lx, Ly, T, tolerance)=%s" % str((Lx, Ly, time_max, tolerance)))
info(" %s" % params)
u1, s1 = ExplosiveSourceLF4().explosive_source_lf4(time_max, Lx, Ly, h,
sys.maxint, params)
original = {'num_unroll': 0, 'tile_size': 0, 'mode': None,
'partitioning': 'chunk', 'extra_halo': 0}
u1_orig, s1_orig = ExplosiveSourceLF4().explosive_source_lf4(time_max, Lx, Ly, h,
sys.maxint, original)
info("Checking output...")
assert np.allclose(u1.dat.data, u1_orig.dat.data, rtol=1e-10)
assert np.allclose(s1.dat.data, s1_orig.dat.data, rtol=1e-10)
info("Results OK!")
sys.exit(0)
if args.mesh_file:
info("Using the unstructured mesh %s" % args.mesh_file)
kwargs = {'T': args.time_max, 'TS': args.timesteps_max, 'mesh_file': args.mesh_file,
'h': args.ms, 'cn': args.cn, 'output': args.output, 'poly_order': args.poly_order,
'params': params}
else:
Lx, Ly = eval(args.mesh_size)
info("Using the structured mesh with values (Lx,Ly,h)=%s" % str((Lx, Ly, args.ms)))
kwargs = {'T': args.time_max, 'TS': args.timesteps_max, 'Lx': Lx, 'Ly': Ly, 'h': args.ms,
'output': args.output, 'poly_order': args.poly_order, 'params': params}
info("h=%f, courant number=%f" % (args.ms, args.cn))
if args.profile:
cProfile.run('ExplosiveSourceLF4().explosive_source_lf4(**kwargs)',
'log_rank%d.cprofile' % op2.MPI.COMM_WORLD.rank)
else:
u1, s1 = ExplosiveSourceLF4().explosive_source_lf4(**kwargs)
| true | true |
f728d2d5b0214e9814e6ffac8ee3bd2b253d502e | 1,789 | py | Python | src/test/parser/template/graph/test_basics.py | ajit2688/program-y-chatbot | f0a7eb33be2ec8de630644a6393296ddd2576eee | [
"MIT"
] | null | null | null | src/test/parser/template/graph/test_basics.py | ajit2688/program-y-chatbot | f0a7eb33be2ec8de630644a6393296ddd2576eee | [
"MIT"
] | null | null | null | src/test/parser/template/graph/test_basics.py | ajit2688/program-y-chatbot | f0a7eb33be2ec8de630644a6393296ddd2576eee | [
"MIT"
] | null | null | null | import unittest
import xml.etree.ElementTree as ET
from programy.bot import Bot
from programy.brain import Brain
from programy.config import ClientConfiguration, BrainConfiguration
from programy.dialog import Question, Sentence
from programy.parser.template.graph import TemplateGraph
from programy.parser.template.nodes import *
class TemplateGraphBasicTests(unittest.TestCase):
def setUp(self):
self.parser = TemplateGraph()
self.assertIsNotNone(self.parser)
self.test_brain = None
self.test_sentence = Sentence("test sentence")
self.test_sentence._stars = ['one', 'two', 'three', 'four', 'five', 'six']
self.test_sentence._thatstars = ["*"]
self.test_sentence._topicstars = ["*"]
test_config = ClientConfiguration()
self.test_bot = Bot(Brain(BrainConfiguration()), config=test_config.bot_configuration)
self.test_clientid = "testid"
conversation = self.test_bot.get_conversation(self.test_clientid)
question = Question.create_from_sentence(self.test_sentence)
conversation._questions.append(question)
def test_template_no_content(self):
template = ET.fromstring("""
<template>
</template>
""")
with self.assertRaises(ParserException):
ast = self.parser.parse_template_expression(template)
def test_base_template(self):
template = ET.fromstring("""
<template>HELLO WORLD</template>
""")
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertEqual(2, len(ast.children))
self.assertIsInstance(ast.children[0], TemplateWordNode)
self.assertIsInstance(ast.children[1], TemplateWordNode)
if __name__ == '__main__':
unittest.main()
| 32.527273 | 94 | 0.704304 | import unittest
import xml.etree.ElementTree as ET
from programy.bot import Bot
from programy.brain import Brain
from programy.config import ClientConfiguration, BrainConfiguration
from programy.dialog import Question, Sentence
from programy.parser.template.graph import TemplateGraph
from programy.parser.template.nodes import *
class TemplateGraphBasicTests(unittest.TestCase):
def setUp(self):
self.parser = TemplateGraph()
self.assertIsNotNone(self.parser)
self.test_brain = None
self.test_sentence = Sentence("test sentence")
self.test_sentence._stars = ['one', 'two', 'three', 'four', 'five', 'six']
self.test_sentence._thatstars = ["*"]
self.test_sentence._topicstars = ["*"]
test_config = ClientConfiguration()
self.test_bot = Bot(Brain(BrainConfiguration()), config=test_config.bot_configuration)
self.test_clientid = "testid"
conversation = self.test_bot.get_conversation(self.test_clientid)
question = Question.create_from_sentence(self.test_sentence)
conversation._questions.append(question)
def test_template_no_content(self):
template = ET.fromstring("""
<template>
</template>
""")
with self.assertRaises(ParserException):
ast = self.parser.parse_template_expression(template)
def test_base_template(self):
template = ET.fromstring("""
<template>HELLO WORLD</template>
""")
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertEqual(2, len(ast.children))
self.assertIsInstance(ast.children[0], TemplateWordNode)
self.assertIsInstance(ast.children[1], TemplateWordNode)
if __name__ == '__main__':
unittest.main()
| true | true |
f728d2dec5e64da29b8b087fc4f90f8e4c08de12 | 5,641 | py | Python | formcorp/api.py | formcorp/python-formcorp | ed8d3767c260d45d084a7df8c25e485bb62752dc | [
"Apache-2.0"
] | null | null | null | formcorp/api.py | formcorp/python-formcorp | ed8d3767c260d45d084a7df8c25e485bb62752dc | [
"Apache-2.0"
] | null | null | null | formcorp/api.py | formcorp/python-formcorp | ed8d3767c260d45d084a7df8c25e485bb62752dc | [
"Apache-2.0"
] | null | null | null | from collections import OrderedDict
import hashlib
import base64
import hmac
import httplib
import json
import random
import string
import time
import urllib
"""
API configuration.
"""
API_HOSTNAME = 'api.formcorp.com.au'
# Module constants
_constants = {
'SIGNATURE_ENCODING': "utf-8",
'REQUEST_TYPE_POST': 'POST',
'REQUEST_TYPE_GET': 'GET',
'REQUEST_TYPE_PUT': 'PUT',
'HEADER_PARAM_AUTHORIZATION': 'Authorization',
'HEADER_PARAM_SIGNATURE': 'Signature',
'HEADER_PARAM_ACCEPT': 'Accept',
'HEADER_PARAM_BEARER': 'Bearer {0}',
'HEADER_APPLICATION_JSON': 'application/json',
'HEADER_PARAM_CONTENT_TYPE': 'Content-type',
'HEADER_URL_FORM_ENCODED_TYPE': 'application/x-www-form-urlencoded'
}
# Configuration client can alter
_config = dict(private_key=None, public_key=None, form_id=None, use_ssl=True)
def init(private_key, public_key):
"""
Initialise the module.
:type private_key: basestring
:type public_key: basestring
"""
assert isinstance(private_key, basestring)
assert isinstance(public_key, basestring)
_config['private_key'] = private_key
_config['public_key'] = public_key
def use_ssl(ssl=True):
"""
Whether to use SSL when communicating with the remote API
:type ssl: boolean
"""
_config['use_ssl'] = ssl
def set_form_id(form_id):
"""
Set the form id
:type form_id: int
"""
assert isinstance(form_id, int)
_config['form_id'] = form_id
def call(uri, request_method=None, data=None, headers=None):
"""
Shoot off a call to the remote API.
:param uri:
:param request_method:
:param data:
:param headers:
:return:
"""
# Set default values
if request_method is None:
request_method = _constants['REQUEST_TYPE_POST']
if data is None:
data = {}
if headers is None:
headers = {}
# Check to make sure first character is a forward slash
if uri[0:1] != '/':
uri = '/' + uri
# Set base headers
headers[_constants['HEADER_PARAM_AUTHORIZATION']] = _constants['HEADER_PARAM_BEARER'].format(_config['public_key'])
headers[_constants['HEADER_PARAM_SIGNATURE']] = _generate_signature(request_method, uri, data)
headers[_constants['HEADER_PARAM_ACCEPT']] = _constants['HEADER_APPLICATION_JSON']
# Initialise the connection
if _config['use_ssl']:
connection = httplib.HTTPSConnection(API_HOSTNAME)
else:
connection = httplib.HTTPConnection(API_HOSTNAME)
if request_method.upper() == _constants['REQUEST_TYPE_POST']:
# Send a POST request
headers[_constants['HEADER_PARAM_CONTENT_TYPE']] = _constants['HEADER_URL_FORM_ENCODED_TYPE']
connection.request(_constants['REQUEST_TYPE_POST'], uri, urllib.urlencode(data), headers)
elif request_method.upper() == _constants['REQUEST_TYPE_GET']:
# Send a GET request
connection.request(_constants['REQUEST_TYPE_GET'], uri, headers)
# Retrieve the results of the request
result = connection.getresponse()
# Ensure a valid response was received from the server
if result.status != 200:
raise Exception("Unable to connect to remote API")
# Attempt to decode json result
res = result.read()
try:
data = json.loads(res)
except ValueError:
raise Exception("Unable to decode server result")
return data
def get_token():
"""
Retrieve a token from the server.
:return: string
"""
if not _initialised():
return False
post_data = {
'timestamp': int(time.time()),
'nonce': _generate_nonce()
}
result = call('v1/auth/token', 'POST', post_data)
# Attempt to return the result from the server
try:
return result['token']
except KeyError:
return False
def _initialised():
"""
Checks to see if the API has been properly initialised
:return: boolean
"""
try:
assert isinstance(_config['private_key'], basestring)
assert isinstance(_config['public_key'], basestring)
assert isinstance(_config['form_id'], int)
return True
except AssertionError:
return False
def _generate_nonce(length=40):
"""
Generates a random nonce string
:type length: int
"""
return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(length))
def _api_url():
"""
Generates a signature to use to uniquely identify the request
:return: string
"""
url = "http"
if _config["use_ssl"]:
url += "s"
url += '://{0}'.format(API_HOSTNAME)
def _generate_signature(request_method, uri, data=None):
"""
Generate a signature to send with the request.
:param request_method: string
:param uri: string
:param data: dict
:return: string
"""
if data is None:
data = {}
# Typecast each dictionary object to string
if len(data) > 0:
for attr, value in data.iteritems():
data[attr] = str(value)
# Create an ordered dict (python sorts by key hashes, need to sort in order elements were added)
obj = OrderedDict()
obj['method'] = request_method.upper()
obj['uri'] = uri
obj['data'] = data
plaintext = json.dumps(obj)
plaintext = plaintext.replace('": "', '":"').replace('": {', '":{').replace('": [', '":[').replace('/', '\/').replace('", "', '","')
# Encode the string
encoded = unicode(plaintext, _constants['SIGNATURE_ENCODING'])
hash = base64.b64encode(hmac.new(_config['private_key'], encoded, hashlib.sha1).hexdigest())
return hash
| 26.608491 | 136 | 0.655735 | from collections import OrderedDict
import hashlib
import base64
import hmac
import httplib
import json
import random
import string
import time
import urllib
API_HOSTNAME = 'api.formcorp.com.au'
_constants = {
'SIGNATURE_ENCODING': "utf-8",
'REQUEST_TYPE_POST': 'POST',
'REQUEST_TYPE_GET': 'GET',
'REQUEST_TYPE_PUT': 'PUT',
'HEADER_PARAM_AUTHORIZATION': 'Authorization',
'HEADER_PARAM_SIGNATURE': 'Signature',
'HEADER_PARAM_ACCEPT': 'Accept',
'HEADER_PARAM_BEARER': 'Bearer {0}',
'HEADER_APPLICATION_JSON': 'application/json',
'HEADER_PARAM_CONTENT_TYPE': 'Content-type',
'HEADER_URL_FORM_ENCODED_TYPE': 'application/x-www-form-urlencoded'
}
_config = dict(private_key=None, public_key=None, form_id=None, use_ssl=True)
def init(private_key, public_key):
assert isinstance(private_key, basestring)
assert isinstance(public_key, basestring)
_config['private_key'] = private_key
_config['public_key'] = public_key
def use_ssl(ssl=True):
_config['use_ssl'] = ssl
def set_form_id(form_id):
assert isinstance(form_id, int)
_config['form_id'] = form_id
def call(uri, request_method=None, data=None, headers=None):
if request_method is None:
request_method = _constants['REQUEST_TYPE_POST']
if data is None:
data = {}
if headers is None:
headers = {}
if uri[0:1] != '/':
uri = '/' + uri
headers[_constants['HEADER_PARAM_AUTHORIZATION']] = _constants['HEADER_PARAM_BEARER'].format(_config['public_key'])
headers[_constants['HEADER_PARAM_SIGNATURE']] = _generate_signature(request_method, uri, data)
headers[_constants['HEADER_PARAM_ACCEPT']] = _constants['HEADER_APPLICATION_JSON']
if _config['use_ssl']:
connection = httplib.HTTPSConnection(API_HOSTNAME)
else:
connection = httplib.HTTPConnection(API_HOSTNAME)
if request_method.upper() == _constants['REQUEST_TYPE_POST']:
headers[_constants['HEADER_PARAM_CONTENT_TYPE']] = _constants['HEADER_URL_FORM_ENCODED_TYPE']
connection.request(_constants['REQUEST_TYPE_POST'], uri, urllib.urlencode(data), headers)
elif request_method.upper() == _constants['REQUEST_TYPE_GET']:
connection.request(_constants['REQUEST_TYPE_GET'], uri, headers)
result = connection.getresponse()
if result.status != 200:
raise Exception("Unable to connect to remote API")
res = result.read()
try:
data = json.loads(res)
except ValueError:
raise Exception("Unable to decode server result")
return data
def get_token():
if not _initialised():
return False
post_data = {
'timestamp': int(time.time()),
'nonce': _generate_nonce()
}
result = call('v1/auth/token', 'POST', post_data)
try:
return result['token']
except KeyError:
return False
def _initialised():
try:
assert isinstance(_config['private_key'], basestring)
assert isinstance(_config['public_key'], basestring)
assert isinstance(_config['form_id'], int)
return True
except AssertionError:
return False
def _generate_nonce(length=40):
return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(length))
def _api_url():
url = "http"
if _config["use_ssl"]:
url += "s"
url += '://{0}'.format(API_HOSTNAME)
def _generate_signature(request_method, uri, data=None):
if data is None:
data = {}
if len(data) > 0:
for attr, value in data.iteritems():
data[attr] = str(value)
obj = OrderedDict()
obj['method'] = request_method.upper()
obj['uri'] = uri
obj['data'] = data
plaintext = json.dumps(obj)
plaintext = plaintext.replace('": "', '":"').replace('": {', '":{').replace('": [', '":[').replace('/', '\/').replace('", "', '","')
encoded = unicode(plaintext, _constants['SIGNATURE_ENCODING'])
hash = base64.b64encode(hmac.new(_config['private_key'], encoded, hashlib.sha1).hexdigest())
return hash
| true | true |
f728d3c7782a5500e0e5be7cb059e077cdfa6300 | 16,000 | py | Python | webmap/migrations/0001_initial.py | tmszi/django-webmap-corpus | 9c6046dd1d2750fa140cb817504bf9603b16e7fa | [
"MIT"
] | 3 | 2015-11-05T01:29:32.000Z | 2018-03-26T10:48:15.000Z | webmap/migrations/0001_initial.py | tmszi/django-webmap-corpus | 9c6046dd1d2750fa140cb817504bf9603b16e7fa | [
"MIT"
] | 1 | 2017-02-11T12:22:31.000Z | 2017-02-12T21:34:47.000Z | webmap/migrations/0001_initial.py | tmszi/django-webmap-corpus | 9c6046dd1d2750fa140cb817504bf9603b16e7fa | [
"MIT"
] | 3 | 2017-02-11T12:08:45.000Z | 2021-02-23T11:37:00.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import colorful.fields
import django.contrib.gis.db.models.fields
from django.conf import settings
import webmap.utils
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Layer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default='', help_text='Name of the layer', max_length=255, verbose_name='name')),
('slug', models.SlugField(unique=True, verbose_name='name in URL')),
('desc', models.TextField(help_text='Layer description.', null=True, verbose_name='description', blank=True)),
('order', models.IntegerField(default=0, verbose_name='order')),
('remark', models.TextField(help_text='Internal information about layer.', null=True, verbose_name='internal remark', blank=True)),
('enabled', models.BooleanField(default=True, help_text='True = the layer is enabled on map load', verbose_name='Enabled by defalut')),
],
options={
'ordering': ['order'],
'verbose_name': 'layer',
'verbose_name_plural': 'layers',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='BaseLayer',
fields=[
('layer_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='webmap.Layer', on_delete=models.CASCADE)),
('url', models.URLField(help_text='Base layer tiles url. e.g. ', null=True, verbose_name='URL', blank=True)),
],
options={
'verbose_name': 'base layer',
'verbose_name_plural': 'base layers',
},
bases=('webmap.layer',),
),
migrations.CreateModel(
name='Legend',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=255, verbose_name='name')),
('slug', models.SlugField(unique=True, verbose_name='name in URL')),
('desc', models.TextField(null=True, verbose_name='description', blank=True)),
('image', models.ImageField(upload_to='ikony', storage=webmap.utils.SlugifyFileSystemStorage(), verbose_name='image')),
],
options={
'verbose_name': 'legend item',
'verbose_name_plural': 'legend items',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='License',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='License name', max_length=255, verbose_name='name')),
('desc', models.TextField(help_text='License description.', null=True, verbose_name='description', blank=True)),
],
options={
'verbose_name': 'license',
'verbose_name_plural': 'licenses',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Marker',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Name of the marker.', unique=True, max_length=255, verbose_name='name')),
('slug', models.SlugField(unique=True, null=True, verbose_name='name in URL')),
('desc', models.TextField(help_text='Detailed marker descrption.', null=True, verbose_name='description', blank=True)),
('remark', models.TextField(help_text='Internal information about layer.', null=True, verbose_name='internal remark', blank=True)),
('default_icon', models.ImageField(storage=webmap.utils.SlugifyFileSystemStorage(), upload_to='icons', null=True, verbose_name='default icon', blank=True)),
('menu_icon', models.ImageField(storage=webmap.utils.SlugifyFileSystemStorage(), upload_to='icons/marker/menu', null=True, verbose_name='menu icon', blank=True)),
('minzoom', models.PositiveIntegerField(default=1, help_text='Minimal zoom in which the POIs of this marker will be shown on the map.', verbose_name='Minimal zoom')),
('maxzoom', models.PositiveIntegerField(default=10, help_text='Maximal zoom in which the POIs of this marker will be shown on the map.', verbose_name='Maximal zoom')),
('line_width', models.FloatField(default=2, verbose_name='line width')),
('line_color', colorful.fields.RGBColorField(default='#ffc90e', verbose_name='line color')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('last_modification', models.DateTimeField(auto_now=True, verbose_name='last modification at')),
],
options={
'ordering': ['-layer__order', 'name'],
'verbose_name': 'marker',
'verbose_name_plural': 'markers',
'permissions': [('can_only_view', 'Can only view')],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OverlayLayer',
fields=[
('layer_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='webmap.Layer', on_delete=models.CASCADE)),
],
options={
'verbose_name': 'overlay layer',
'verbose_name_plural': 'overlay layers',
},
bases=('webmap.layer',),
),
migrations.CreateModel(
name='Photo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Photo name', max_length=255, verbose_name='name', blank=True)),
('desc', models.TextField(help_text='Photo description.', null=True, verbose_name='description', blank=True)),
('order', models.IntegerField(default=0, verbose_name='order')),
('photographer', models.CharField(help_text='Full name of the author of the photography', max_length=255, verbose_name='Photography author', blank=True)),
('photo', models.ImageField(help_text='Upload photo in full resolution.', upload_to='photo', storage=webmap.utils.SlugifyFileSystemStorage(), verbose_name='photo')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at', null=True)),
('last_modification', models.DateTimeField(auto_now=True, verbose_name='last modification at', null=True)),
('author', models.ForeignKey(related_name='photo_create', verbose_name='author', blank=True, to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE)),
('license', models.ForeignKey(verbose_name='license', to='webmap.License', on_delete=models.CASCADE)),
],
options={
'ordering': ['order'],
'verbose_name': 'photo',
'verbose_name_plural': 'photographies',
'permissions': [('can_view_photo_list', 'Can view photo list')],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Poi',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Exact place name', max_length=255, verbose_name='name')),
('importance', models.SmallIntegerField(default=0, help_text='Minimal zoom modificator (use 20+ to show always).<br/>', verbose_name='importance')),
('geom', django.contrib.gis.db.models.fields.GeometryField(help_text='Add point: Select pencil with plus sign icon and place your point to the map.<br/>\n Add line: Select line icon and by clicking to map draw the line. Finish drawing with double click.<br/>\n Add area: Select area icon and by clicking to mapy draw the area. Finish drawing with double click.<br/>\n Object edition: Select the first icon and then select object in map. Draw points in map to move them, use points in the middle of sections to add new edges.', srid=4326, verbose_name='place geometry')),
('desc', models.TextField(help_text='Text that will be shown after selecting POI.', null=True, verbose_name='description', blank=True)),
('desc_extra', models.TextField(help_text='Text that extends the description.', null=True, verbose_name='detailed description', blank=True)),
('url', models.URLField(help_text='Link to the web page of the place.', null=True, verbose_name='URL', blank=True)),
('address', models.CharField(help_text='Poi address (street, house number)', max_length=255, null=True, verbose_name='adress', blank=True)),
('remark', models.TextField(help_text='Internal information about POI.', null=True, verbose_name='Internal remark', blank=True)),
('properties_cache', models.CharField(max_length=255, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('last_modification', models.DateTimeField(auto_now=True, verbose_name='last modification at')),
('author', models.ForeignKey(related_name='poi_create', verbose_name='author', blank=True, to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE)),
('marker', models.ForeignKey(related_name='pois', verbose_name='marker', to='webmap.Marker', help_text='Select icon, that will be shown in map', on_delete=models.CASCADE)),
],
options={
'verbose_name': 'place',
'verbose_name_plural': 'places',
'permissions': [('can_only_own_data_only', 'Can only edit his own data'), ('can_edit_advanced_fields', 'Can edit importance status')],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Property',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Status name', max_length=255, verbose_name='name')),
('as_filter', models.BooleanField(default=False, help_text='Show as a filter in right map menu?', verbose_name='as filter?')),
('order', models.IntegerField(default=0, verbose_name='order')),
('slug', models.SlugField(unique=True, verbose_name='Name in URL')),
('desc', models.TextField(help_text='Property description.', null=True, verbose_name='description', blank=True)),
('remark', models.TextField(help_text='Internal information about the property.', null=True, verbose_name='Internal remark', blank=True)),
('default_icon', models.ImageField(storage=webmap.utils.SlugifyFileSystemStorage(), upload_to='icons', null=True, verbose_name='default icon', blank=True)),
],
options={
'ordering': ['order'],
'verbose_name': 'property',
'verbose_name_plural': 'properties',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Sector',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255, verbose_name='name')),
('slug', models.SlugField(unique=True, verbose_name='name in URL')),
('geom', django.contrib.gis.db.models.fields.PolygonField(help_text='Sector area', srid=4326, verbose_name='area')),
],
options={
'verbose_name': 'sector',
'verbose_name_plural': 'sectors',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Status',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Status name', unique=True, max_length=255, verbose_name='name')),
('desc', models.TextField(help_text='Status description.', null=True, verbose_name='description', blank=True)),
('show', models.BooleanField(default=False, help_text='Show to map user', verbose_name='show')),
('show_to_mapper', models.BooleanField(default=False, help_text='Show to mapper', verbose_name='show to mapper')),
],
options={
'verbose_name': 'status',
'verbose_name_plural': 'statuses',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='property',
name='status',
field=models.ForeignKey(verbose_name='status', to='webmap.Status', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='poi',
name='properties',
field=models.ManyToManyField(help_text='POI properties', to='webmap.Property', null=True, verbose_name='properties', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='poi',
name='status',
field=models.ForeignKey(default=0, verbose_name='status', to='webmap.Status', help_text='POI status, determinse if it will be shown in map', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='poi',
name='updated_by',
field=models.ForeignKey(related_name='poi_update', verbose_name='last updated by', blank=True, to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='photo',
name='poi',
field=models.ForeignKey(related_name='photos', verbose_name='poi', to='webmap.Poi', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='photo',
name='updated_by',
field=models.ForeignKey(related_name='photo_update', verbose_name='last updated by', blank=True, to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='marker',
name='layer',
field=models.ForeignKey(verbose_name='layer', to='webmap.Layer', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='marker',
name='status',
field=models.ForeignKey(verbose_name='status', to='webmap.Status', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='layer',
name='status',
field=models.ForeignKey(verbose_name='status', to='webmap.Status', on_delete=models.CASCADE),
preserve_default=True,
),
]
| 61.068702 | 619 | 0.605375 |
from __future__ import unicode_literals
from django.db import models, migrations
import colorful.fields
import django.contrib.gis.db.models.fields
from django.conf import settings
import webmap.utils
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Layer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default='', help_text='Name of the layer', max_length=255, verbose_name='name')),
('slug', models.SlugField(unique=True, verbose_name='name in URL')),
('desc', models.TextField(help_text='Layer description.', null=True, verbose_name='description', blank=True)),
('order', models.IntegerField(default=0, verbose_name='order')),
('remark', models.TextField(help_text='Internal information about layer.', null=True, verbose_name='internal remark', blank=True)),
('enabled', models.BooleanField(default=True, help_text='True = the layer is enabled on map load', verbose_name='Enabled by defalut')),
],
options={
'ordering': ['order'],
'verbose_name': 'layer',
'verbose_name_plural': 'layers',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='BaseLayer',
fields=[
('layer_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='webmap.Layer', on_delete=models.CASCADE)),
('url', models.URLField(help_text='Base layer tiles url. e.g. ', null=True, verbose_name='URL', blank=True)),
],
options={
'verbose_name': 'base layer',
'verbose_name_plural': 'base layers',
},
bases=('webmap.layer',),
),
migrations.CreateModel(
name='Legend',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=255, verbose_name='name')),
('slug', models.SlugField(unique=True, verbose_name='name in URL')),
('desc', models.TextField(null=True, verbose_name='description', blank=True)),
('image', models.ImageField(upload_to='ikony', storage=webmap.utils.SlugifyFileSystemStorage(), verbose_name='image')),
],
options={
'verbose_name': 'legend item',
'verbose_name_plural': 'legend items',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='License',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='License name', max_length=255, verbose_name='name')),
('desc', models.TextField(help_text='License description.', null=True, verbose_name='description', blank=True)),
],
options={
'verbose_name': 'license',
'verbose_name_plural': 'licenses',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Marker',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Name of the marker.', unique=True, max_length=255, verbose_name='name')),
('slug', models.SlugField(unique=True, null=True, verbose_name='name in URL')),
('desc', models.TextField(help_text='Detailed marker descrption.', null=True, verbose_name='description', blank=True)),
('remark', models.TextField(help_text='Internal information about layer.', null=True, verbose_name='internal remark', blank=True)),
('default_icon', models.ImageField(storage=webmap.utils.SlugifyFileSystemStorage(), upload_to='icons', null=True, verbose_name='default icon', blank=True)),
('menu_icon', models.ImageField(storage=webmap.utils.SlugifyFileSystemStorage(), upload_to='icons/marker/menu', null=True, verbose_name='menu icon', blank=True)),
('minzoom', models.PositiveIntegerField(default=1, help_text='Minimal zoom in which the POIs of this marker will be shown on the map.', verbose_name='Minimal zoom')),
('maxzoom', models.PositiveIntegerField(default=10, help_text='Maximal zoom in which the POIs of this marker will be shown on the map.', verbose_name='Maximal zoom')),
('line_width', models.FloatField(default=2, verbose_name='line width')),
('line_color', colorful.fields.RGBColorField(default='#ffc90e', verbose_name='line color')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('last_modification', models.DateTimeField(auto_now=True, verbose_name='last modification at')),
],
options={
'ordering': ['-layer__order', 'name'],
'verbose_name': 'marker',
'verbose_name_plural': 'markers',
'permissions': [('can_only_view', 'Can only view')],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OverlayLayer',
fields=[
('layer_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='webmap.Layer', on_delete=models.CASCADE)),
],
options={
'verbose_name': 'overlay layer',
'verbose_name_plural': 'overlay layers',
},
bases=('webmap.layer',),
),
migrations.CreateModel(
name='Photo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Photo name', max_length=255, verbose_name='name', blank=True)),
('desc', models.TextField(help_text='Photo description.', null=True, verbose_name='description', blank=True)),
('order', models.IntegerField(default=0, verbose_name='order')),
('photographer', models.CharField(help_text='Full name of the author of the photography', max_length=255, verbose_name='Photography author', blank=True)),
('photo', models.ImageField(help_text='Upload photo in full resolution.', upload_to='photo', storage=webmap.utils.SlugifyFileSystemStorage(), verbose_name='photo')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at', null=True)),
('last_modification', models.DateTimeField(auto_now=True, verbose_name='last modification at', null=True)),
('author', models.ForeignKey(related_name='photo_create', verbose_name='author', blank=True, to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE)),
('license', models.ForeignKey(verbose_name='license', to='webmap.License', on_delete=models.CASCADE)),
],
options={
'ordering': ['order'],
'verbose_name': 'photo',
'verbose_name_plural': 'photographies',
'permissions': [('can_view_photo_list', 'Can view photo list')],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Poi',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Exact place name', max_length=255, verbose_name='name')),
('importance', models.SmallIntegerField(default=0, help_text='Minimal zoom modificator (use 20+ to show always).<br/>', verbose_name='importance')),
('geom', django.contrib.gis.db.models.fields.GeometryField(help_text='Add point: Select pencil with plus sign icon and place your point to the map.<br/>\n Add line: Select line icon and by clicking to map draw the line. Finish drawing with double click.<br/>\n Add area: Select area icon and by clicking to mapy draw the area. Finish drawing with double click.<br/>\n Object edition: Select the first icon and then select object in map. Draw points in map to move them, use points in the middle of sections to add new edges.', srid=4326, verbose_name='place geometry')),
('desc', models.TextField(help_text='Text that will be shown after selecting POI.', null=True, verbose_name='description', blank=True)),
('desc_extra', models.TextField(help_text='Text that extends the description.', null=True, verbose_name='detailed description', blank=True)),
('url', models.URLField(help_text='Link to the web page of the place.', null=True, verbose_name='URL', blank=True)),
('address', models.CharField(help_text='Poi address (street, house number)', max_length=255, null=True, verbose_name='adress', blank=True)),
('remark', models.TextField(help_text='Internal information about POI.', null=True, verbose_name='Internal remark', blank=True)),
('properties_cache', models.CharField(max_length=255, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('last_modification', models.DateTimeField(auto_now=True, verbose_name='last modification at')),
('author', models.ForeignKey(related_name='poi_create', verbose_name='author', blank=True, to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE)),
('marker', models.ForeignKey(related_name='pois', verbose_name='marker', to='webmap.Marker', help_text='Select icon, that will be shown in map', on_delete=models.CASCADE)),
],
options={
'verbose_name': 'place',
'verbose_name_plural': 'places',
'permissions': [('can_only_own_data_only', 'Can only edit his own data'), ('can_edit_advanced_fields', 'Can edit importance status')],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Property',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Status name', max_length=255, verbose_name='name')),
('as_filter', models.BooleanField(default=False, help_text='Show as a filter in right map menu?', verbose_name='as filter?')),
('order', models.IntegerField(default=0, verbose_name='order')),
('slug', models.SlugField(unique=True, verbose_name='Name in URL')),
('desc', models.TextField(help_text='Property description.', null=True, verbose_name='description', blank=True)),
('remark', models.TextField(help_text='Internal information about the property.', null=True, verbose_name='Internal remark', blank=True)),
('default_icon', models.ImageField(storage=webmap.utils.SlugifyFileSystemStorage(), upload_to='icons', null=True, verbose_name='default icon', blank=True)),
],
options={
'ordering': ['order'],
'verbose_name': 'property',
'verbose_name_plural': 'properties',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Sector',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255, verbose_name='name')),
('slug', models.SlugField(unique=True, verbose_name='name in URL')),
('geom', django.contrib.gis.db.models.fields.PolygonField(help_text='Sector area', srid=4326, verbose_name='area')),
],
options={
'verbose_name': 'sector',
'verbose_name_plural': 'sectors',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Status',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Status name', unique=True, max_length=255, verbose_name='name')),
('desc', models.TextField(help_text='Status description.', null=True, verbose_name='description', blank=True)),
('show', models.BooleanField(default=False, help_text='Show to map user', verbose_name='show')),
('show_to_mapper', models.BooleanField(default=False, help_text='Show to mapper', verbose_name='show to mapper')),
],
options={
'verbose_name': 'status',
'verbose_name_plural': 'statuses',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='property',
name='status',
field=models.ForeignKey(verbose_name='status', to='webmap.Status', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='poi',
name='properties',
field=models.ManyToManyField(help_text='POI properties', to='webmap.Property', null=True, verbose_name='properties', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='poi',
name='status',
field=models.ForeignKey(default=0, verbose_name='status', to='webmap.Status', help_text='POI status, determinse if it will be shown in map', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='poi',
name='updated_by',
field=models.ForeignKey(related_name='poi_update', verbose_name='last updated by', blank=True, to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='photo',
name='poi',
field=models.ForeignKey(related_name='photos', verbose_name='poi', to='webmap.Poi', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='photo',
name='updated_by',
field=models.ForeignKey(related_name='photo_update', verbose_name='last updated by', blank=True, to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='marker',
name='layer',
field=models.ForeignKey(verbose_name='layer', to='webmap.Layer', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='marker',
name='status',
field=models.ForeignKey(verbose_name='status', to='webmap.Status', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='layer',
name='status',
field=models.ForeignKey(verbose_name='status', to='webmap.Status', on_delete=models.CASCADE),
preserve_default=True,
),
]
| true | true |
f728d42a9fd315f1f87a7ea0ded8196af22dec63 | 5,396 | py | Python | ros/src/twist_controller/dbw_node.py | roopakingole/CarND-Capstone | e286df9e1388a46240705df5175e46520082a3a0 | [
"MIT"
] | null | null | null | ros/src/twist_controller/dbw_node.py | roopakingole/CarND-Capstone | e286df9e1388a46240705df5175e46520082a3a0 | [
"MIT"
] | null | null | null | ros/src/twist_controller/dbw_node.py | roopakingole/CarND-Capstone | e286df9e1388a46240705df5175e46520082a3a0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import rospy
from std_msgs.msg import Bool
from dbw_mkz_msgs.msg import ThrottleCmd, SteeringCmd, BrakeCmd, SteeringReport
from geometry_msgs.msg import TwistStamped
import math
from twist_controller import Controller
'''
You can build this node only after you have built (or partially built) the `waypoint_updater` node.
You will subscribe to `/twist_cmd` message which provides the proposed linear and angular velocities.
You can subscribe to any other message that you find important or refer to the document for list
of messages subscribed to by the reference implementation of this node.
One thing to keep in mind while building this node and the `twist_controller` class is the status
of `dbw_enabled`. While in the simulator, its enabled all the time, in the real car, that will
not be the case. This may cause your PID controller to accumulate error because the car could
temporarily be driven by a human instead of your controller.
We have provided two launch files with this node. Vehicle specific values (like vehicle_mass,
wheel_base) etc should not be altered in these files.
We have also provided some reference implementations for PID controller and other utility classes.
You are free to use them or build your own.
Once you have the proposed throttle, brake, and steer values, publish it on the various publishers
that we have created in the `__init__` function.
'''
class DBWNode(object):
def __init__(self):
rospy.init_node('dbw_node')
vehicle_mass = rospy.get_param('~vehicle_mass', 1736.35)
fuel_capacity = rospy.get_param('~fuel_capacity', 13.5)
brake_deadband = rospy.get_param('~brake_deadband', .1)
decel_limit = rospy.get_param('~decel_limit', -5)
accel_limit = rospy.get_param('~accel_limit', 1.)
wheel_radius = rospy.get_param('~wheel_radius', 0.2413)
wheel_base = rospy.get_param('~wheel_base', 2.8498)
steer_ratio = rospy.get_param('~steer_ratio', 14.8)
max_lat_accel = rospy.get_param('~max_lat_accel', 3.)
max_steer_angle = rospy.get_param('~max_steer_angle', 8.)
self.steer_pub = rospy.Publisher('/vehicle/steering_cmd',
SteeringCmd, queue_size=1)
self.throttle_pub = rospy.Publisher('/vehicle/throttle_cmd',
ThrottleCmd, queue_size=1)
self.brake_pub = rospy.Publisher('/vehicle/brake_cmd',
BrakeCmd, queue_size=1)
# TODO: Create `Controller` object
self.controller = Controller(vehicle_mass=vehicle_mass,
fuel_capacity=fuel_capacity,
brake_deadband=brake_deadband,
decel_limit=decel_limit,
accel_limit=accel_limit,
wheel_radius=wheel_radius,
wheel_base=wheel_base,
steer_ratio=steer_ratio,
max_lat_accel=max_lat_accel,
max_steer_angle=max_steer_angle)
# TODO: Subscribe to all the topics you need to
rospy.Subscriber('/vehicle/dbw_enabled', Bool, self.dbw_enabled_cb)
rospy.Subscriber('/twist_cmd', TwistStamped, self.twist_cb)
rospy.Subscriber('/current_velocity', TwistStamped, self.velocity_cb)
self.current_vel = None
self.curr_ang_vel = None
self.dbw_enabled = None
self.linear_vel = None
self.angular_vel = None
self.throttle = self.steering = self.brake = 0
self.loop()
def loop(self):
rate = rospy.Rate(50) # 50Hz
while not rospy.is_shutdown():
# TODO: Get predicted throttle, brake, and steering using `twist_controller`
# You should only publish the control commands if dbw is enabled
if not None in (self.current_vel, self.linear_vel, self.angular_vel):
self.throttle, self.brake, self.steering = self.controller.control(self.current_vel,
self.dbw_enabled,
self.linear_vel,
self.angular_vel)
if self.dbw_enabled:
self.publish(self.throttle, self.brake, self.steering)
rate.sleep()
def dbw_enabled_cb(self, msg):
self.dbw_enabled = msg
def twist_cb(self, msg):
self.linear_vel = msg.twist.linear.x
self.angular_vel = msg.twist.angular.z
def velocity_cb(self, msg):
self.current_vel = msg.twist.linear.x
def publish(self, throttle, brake, steer):
tcmd = ThrottleCmd()
tcmd.enable = True
tcmd.pedal_cmd_type = ThrottleCmd.CMD_PERCENT
tcmd.pedal_cmd = throttle
self.throttle_pub.publish(tcmd)
scmd = SteeringCmd()
scmd.enable = True
scmd.steering_wheel_angle_cmd = steer
self.steer_pub.publish(scmd)
bcmd = BrakeCmd()
bcmd.enable = True
bcmd.pedal_cmd_type = BrakeCmd.CMD_TORQUE
bcmd.pedal_cmd = brake
self.brake_pub.publish(bcmd)
if __name__ == '__main__':
DBWNode()
| 42.488189 | 101 | 0.625093 |
import rospy
from std_msgs.msg import Bool
from dbw_mkz_msgs.msg import ThrottleCmd, SteeringCmd, BrakeCmd, SteeringReport
from geometry_msgs.msg import TwistStamped
import math
from twist_controller import Controller
class DBWNode(object):
def __init__(self):
rospy.init_node('dbw_node')
vehicle_mass = rospy.get_param('~vehicle_mass', 1736.35)
fuel_capacity = rospy.get_param('~fuel_capacity', 13.5)
brake_deadband = rospy.get_param('~brake_deadband', .1)
decel_limit = rospy.get_param('~decel_limit', -5)
accel_limit = rospy.get_param('~accel_limit', 1.)
wheel_radius = rospy.get_param('~wheel_radius', 0.2413)
wheel_base = rospy.get_param('~wheel_base', 2.8498)
steer_ratio = rospy.get_param('~steer_ratio', 14.8)
max_lat_accel = rospy.get_param('~max_lat_accel', 3.)
max_steer_angle = rospy.get_param('~max_steer_angle', 8.)
self.steer_pub = rospy.Publisher('/vehicle/steering_cmd',
SteeringCmd, queue_size=1)
self.throttle_pub = rospy.Publisher('/vehicle/throttle_cmd',
ThrottleCmd, queue_size=1)
self.brake_pub = rospy.Publisher('/vehicle/brake_cmd',
BrakeCmd, queue_size=1)
self.controller = Controller(vehicle_mass=vehicle_mass,
fuel_capacity=fuel_capacity,
brake_deadband=brake_deadband,
decel_limit=decel_limit,
accel_limit=accel_limit,
wheel_radius=wheel_radius,
wheel_base=wheel_base,
steer_ratio=steer_ratio,
max_lat_accel=max_lat_accel,
max_steer_angle=max_steer_angle)
rospy.Subscriber('/vehicle/dbw_enabled', Bool, self.dbw_enabled_cb)
rospy.Subscriber('/twist_cmd', TwistStamped, self.twist_cb)
rospy.Subscriber('/current_velocity', TwistStamped, self.velocity_cb)
self.current_vel = None
self.curr_ang_vel = None
self.dbw_enabled = None
self.linear_vel = None
self.angular_vel = None
self.throttle = self.steering = self.brake = 0
self.loop()
def loop(self):
rate = rospy.Rate(50)
while not rospy.is_shutdown():
if not None in (self.current_vel, self.linear_vel, self.angular_vel):
self.throttle, self.brake, self.steering = self.controller.control(self.current_vel,
self.dbw_enabled,
self.linear_vel,
self.angular_vel)
if self.dbw_enabled:
self.publish(self.throttle, self.brake, self.steering)
rate.sleep()
def dbw_enabled_cb(self, msg):
self.dbw_enabled = msg
def twist_cb(self, msg):
self.linear_vel = msg.twist.linear.x
self.angular_vel = msg.twist.angular.z
def velocity_cb(self, msg):
self.current_vel = msg.twist.linear.x
def publish(self, throttle, brake, steer):
tcmd = ThrottleCmd()
tcmd.enable = True
tcmd.pedal_cmd_type = ThrottleCmd.CMD_PERCENT
tcmd.pedal_cmd = throttle
self.throttle_pub.publish(tcmd)
scmd = SteeringCmd()
scmd.enable = True
scmd.steering_wheel_angle_cmd = steer
self.steer_pub.publish(scmd)
bcmd = BrakeCmd()
bcmd.enable = True
bcmd.pedal_cmd_type = BrakeCmd.CMD_TORQUE
bcmd.pedal_cmd = brake
self.brake_pub.publish(bcmd)
if __name__ == '__main__':
DBWNode()
| true | true |
f728d4a4b2bf15aa63f77688d6594a23123c0835 | 1,301 | py | Python | Dev/Tests/Whiskers_Roam_Expanded.py | parallaxinc/cyberbot | f7c4d355ee0310dcfef81027802cc41ac6ce90e1 | [
"MIT"
] | 4 | 2019-03-18T20:49:41.000Z | 2022-03-24T01:44:36.000Z | Dev/Tests/Whiskers_Roam_Expanded.py | parallaxinc/cyberbot | f7c4d355ee0310dcfef81027802cc41ac6ce90e1 | [
"MIT"
] | 5 | 2019-06-07T18:09:27.000Z | 2021-04-08T17:16:55.000Z | Dev/Tests/Whiskers_Roam_Expanded.py | parallaxinc/cyberbot | f7c4d355ee0310dcfef81027802cc41ac6ce90e1 | [
"MIT"
] | null | null | null | # Whiskers_Roam_Expanded.py
from cyberbot import *
bot(22).tone(2000, 300)
while True:
left = bot(7).read_digital()
right = bot(9).read_digital()
if left == 1 and right == 1: #Go forward
bot(18).servo_speed(75)
bot(19).servo_speed(-75)
display.show(Image.ARROW_S)
bot(15).write_digital(0)
bot(0).write_digital(0)
elif left == 1 and right == 0: #Obstacle on right
bot(18).servo_speed(-75) #back up for 1s, turn left
bot(19).servo_speed(75)
display.show(Image.ARROW_N)
bot(15).write_digital(0)
bot(0).write_digital(1)
sleep(1000)
bot(18).servo_speed(-75)
bot(19).servo_speed(-75)
display.show(Image.ARROW_E)
sleep(600)
elif left == 0 and right ==1: #Obstacle on left
bot(18).servo_speed(-75) #backup for 1s, turn right
bot(19).servo_speed(75)
display.show(Image.ARROW_N)
bot(15).write_digital(1)
bot(0).write_digital(0)
sleep(1000)
bot(18).servo_speed(75)
bot(19).servo_speed(75)
display.show(Image.ARROW_W)
sleep(600)
elif left == 0 and right == 0: #Obstacle on left + right
bot(18).servo_speed(-75) #backup for 1s, turn
bot(19).servo_speed(75)
display.show(Image.ARROW_N)
bot(15).write_digital(1)
bot(0).write_digital(1)
sleep(1000)
bot(18).servo_speed(75)
bot(19).servo_speed(75)
display.show(Image.ARROW_W)
sleep(1000) | 26.55102 | 57 | 0.693313 |
from cyberbot import *
bot(22).tone(2000, 300)
while True:
left = bot(7).read_digital()
right = bot(9).read_digital()
if left == 1 and right == 1:
bot(18).servo_speed(75)
bot(19).servo_speed(-75)
display.show(Image.ARROW_S)
bot(15).write_digital(0)
bot(0).write_digital(0)
elif left == 1 and right == 0:
bot(18).servo_speed(-75)
bot(19).servo_speed(75)
display.show(Image.ARROW_N)
bot(15).write_digital(0)
bot(0).write_digital(1)
sleep(1000)
bot(18).servo_speed(-75)
bot(19).servo_speed(-75)
display.show(Image.ARROW_E)
sleep(600)
elif left == 0 and right ==1:
bot(18).servo_speed(-75)
bot(19).servo_speed(75)
display.show(Image.ARROW_N)
bot(15).write_digital(1)
bot(0).write_digital(0)
sleep(1000)
bot(18).servo_speed(75)
bot(19).servo_speed(75)
display.show(Image.ARROW_W)
sleep(600)
elif left == 0 and right == 0:
bot(18).servo_speed(-75)
bot(19).servo_speed(75)
display.show(Image.ARROW_N)
bot(15).write_digital(1)
bot(0).write_digital(1)
sleep(1000)
bot(18).servo_speed(75)
bot(19).servo_speed(75)
display.show(Image.ARROW_W)
sleep(1000) | true | true |
f728d5be0024cc7a2e17aae74076499dffb5852e | 3,971 | py | Python | py/gps_building_blocks/analysis/exp_design/ab_testing_design_test.py | isabella232/gps_building_blocks | 86ef8be60a42cd12e27696007589388b7b053f4f | [
"Apache-2.0"
] | null | null | null | py/gps_building_blocks/analysis/exp_design/ab_testing_design_test.py | isabella232/gps_building_blocks | 86ef8be60a42cd12e27696007589388b7b053f4f | [
"Apache-2.0"
] | 1 | 2021-06-18T14:42:25.000Z | 2021-06-18T14:42:25.000Z | py/gps_building_blocks/analysis/exp_design/ab_testing_design_test.py | isabella232/gps_building_blocks | 86ef8be60a42cd12e27696007589388b7b053f4f | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for gps_building_blocks.analysis.exp_design.ab_testing_design."""
from absl.testing import absltest
import numpy as np
from gps_building_blocks.analysis.exp_design import ab_testing_design
BASELINE_CONVERSION_RATE_PERCENTAGE = 5
EXPECTED_UPLIFT_PERCENTAGE = 10
LABELS = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
PREDICTIONS = np.array([
0.7, 0.63, 0.4, 0.77, 0.45, 0.8, 0.41, 0.82, 0.7, 0.6, 0.5, 0.45, 0.74,
0.11, 0.21, 0.05, 0.67, 0.79, 0.60, 0.10
])
class ABTestingExperimentalDesignTest(absltest.TestCase):
def test_calc_chisquared_sample_size_returns_correct_values(self):
result_sample_size = ab_testing_design.calc_chisquared_sample_size(
baseline_conversion_rate_percentage=BASELINE_CONVERSION_RATE_PERCENTAGE,
expected_uplift_percentage=EXPECTED_UPLIFT_PERCENTAGE)
self.assertEqual(result_sample_size, 14913.0)
def test_calc_chisquared_sample_size_change_power_and_confidence(self):
result_sample_size = ab_testing_design.calc_chisquared_sample_size(
baseline_conversion_rate_percentage=BASELINE_CONVERSION_RATE_PERCENTAGE,
expected_uplift_percentage=EXPECTED_UPLIFT_PERCENTAGE,
power_percentage=90,
confidence_level_percentage=99)
self.assertEqual(result_sample_size, 28271.0)
def test_calc_chisquared_sample_sizes_for_bins_returns_correct_values(self):
results = ab_testing_design.calc_chisquared_sample_sizes_for_bins(
labels=LABELS, probability_predictions=PREDICTIONS, number_bins=3)
self.assertEqual(results.shape, (24, 7))
self.assertListEqual(
list(results.columns), [
'bin_number', 'bin_size', 'conv_rate_percentage',
'uplift_percentage', 'power_percentage',
'confidence_level_percentage', 'sample_size'
])
self.assertListEqual(
list(results['sample_size']), [
248.0, 314.0, 343.0, 421.0, 62.0, 79.0, 86.0, 106.0, 928.0, 1178.0,
1285.0, 1577.0, 232.0, 295.0, 322.0, 395.0, 1031.0, 1309.0, 1428.0,
1752.0, 258.0, 328.0, 357.0, 438.0
])
def test_resulted_bin_metrics_does_not_contain_nas(self):
results = ab_testing_design.calc_chisquared_sample_sizes_for_bins(
labels=LABELS, probability_predictions=PREDICTIONS, number_bins=3)
self.assertFalse(results.isna().values.any())
def test_calc_chisquared_sample_sizes_for_cumulative_bins_returns_right_vals(
self):
results = ab_testing_design.calc_chisquared_sample_sizes_for_cumulative_bins(
labels=LABELS, probability_predictions=PREDICTIONS, number_bins=5)
self.assertEqual(results.shape, (40, 8))
self.assertListEqual(
list(results.columns), [
'cumulative_bin_number', 'bin_size', 'bin_size_percentage',
'conv_rate_percentage', 'uplift_percentage', 'power_percentage',
'confidence_level_percentage', 'sample_size'
])
self.assertListEqual(
list(results['sample_size']), [
207.0, 262.0, 286.0, 351.0, 52.0, 66.0, 72.0, 88.0, 371.0, 471.0,
514.0, 631.0, 93.0, 118.0, 129.0, 158.0, 442.0, 561.0, 612.0, 751.0,
111.0, 141.0, 153.0, 188.0, 371.0, 471.0, 514.0, 631.0, 93.0, 118.0,
129.0, 158.0, 619.0, 785.0, 857.0, 1051.0, 155.0, 197.0, 215.0,
263.0
])
if __name__ == '__main__':
absltest.main()
| 41.8 | 81 | 0.706875 |
from absl.testing import absltest
import numpy as np
from gps_building_blocks.analysis.exp_design import ab_testing_design
BASELINE_CONVERSION_RATE_PERCENTAGE = 5
EXPECTED_UPLIFT_PERCENTAGE = 10
LABELS = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
PREDICTIONS = np.array([
0.7, 0.63, 0.4, 0.77, 0.45, 0.8, 0.41, 0.82, 0.7, 0.6, 0.5, 0.45, 0.74,
0.11, 0.21, 0.05, 0.67, 0.79, 0.60, 0.10
])
class ABTestingExperimentalDesignTest(absltest.TestCase):
def test_calc_chisquared_sample_size_returns_correct_values(self):
result_sample_size = ab_testing_design.calc_chisquared_sample_size(
baseline_conversion_rate_percentage=BASELINE_CONVERSION_RATE_PERCENTAGE,
expected_uplift_percentage=EXPECTED_UPLIFT_PERCENTAGE)
self.assertEqual(result_sample_size, 14913.0)
def test_calc_chisquared_sample_size_change_power_and_confidence(self):
result_sample_size = ab_testing_design.calc_chisquared_sample_size(
baseline_conversion_rate_percentage=BASELINE_CONVERSION_RATE_PERCENTAGE,
expected_uplift_percentage=EXPECTED_UPLIFT_PERCENTAGE,
power_percentage=90,
confidence_level_percentage=99)
self.assertEqual(result_sample_size, 28271.0)
def test_calc_chisquared_sample_sizes_for_bins_returns_correct_values(self):
results = ab_testing_design.calc_chisquared_sample_sizes_for_bins(
labels=LABELS, probability_predictions=PREDICTIONS, number_bins=3)
self.assertEqual(results.shape, (24, 7))
self.assertListEqual(
list(results.columns), [
'bin_number', 'bin_size', 'conv_rate_percentage',
'uplift_percentage', 'power_percentage',
'confidence_level_percentage', 'sample_size'
])
self.assertListEqual(
list(results['sample_size']), [
248.0, 314.0, 343.0, 421.0, 62.0, 79.0, 86.0, 106.0, 928.0, 1178.0,
1285.0, 1577.0, 232.0, 295.0, 322.0, 395.0, 1031.0, 1309.0, 1428.0,
1752.0, 258.0, 328.0, 357.0, 438.0
])
def test_resulted_bin_metrics_does_not_contain_nas(self):
results = ab_testing_design.calc_chisquared_sample_sizes_for_bins(
labels=LABELS, probability_predictions=PREDICTIONS, number_bins=3)
self.assertFalse(results.isna().values.any())
def test_calc_chisquared_sample_sizes_for_cumulative_bins_returns_right_vals(
self):
results = ab_testing_design.calc_chisquared_sample_sizes_for_cumulative_bins(
labels=LABELS, probability_predictions=PREDICTIONS, number_bins=5)
self.assertEqual(results.shape, (40, 8))
self.assertListEqual(
list(results.columns), [
'cumulative_bin_number', 'bin_size', 'bin_size_percentage',
'conv_rate_percentage', 'uplift_percentage', 'power_percentage',
'confidence_level_percentage', 'sample_size'
])
self.assertListEqual(
list(results['sample_size']), [
207.0, 262.0, 286.0, 351.0, 52.0, 66.0, 72.0, 88.0, 371.0, 471.0,
514.0, 631.0, 93.0, 118.0, 129.0, 158.0, 442.0, 561.0, 612.0, 751.0,
111.0, 141.0, 153.0, 188.0, 371.0, 471.0, 514.0, 631.0, 93.0, 118.0,
129.0, 158.0, 619.0, 785.0, 857.0, 1051.0, 155.0, 197.0, 215.0,
263.0
])
if __name__ == '__main__':
absltest.main()
| true | true |
f728d6871fb41255044213a71f7eb015371377f1 | 4,096 | py | Python | tempest/api/compute/admin/test_security_groups.py | vmahuli/tempest | f70319f5eda72b8c8a913ae1002ec531324e4116 | [
"Apache-2.0"
] | null | null | null | tempest/api/compute/admin/test_security_groups.py | vmahuli/tempest | f70319f5eda72b8c8a913ae1002ec531324e4116 | [
"Apache-2.0"
] | null | null | null | tempest/api/compute/admin/test_security_groups.py | vmahuli/tempest | f70319f5eda72b8c8a913ae1002ec531324e4116 | [
"Apache-2.0"
] | null | null | null | # Copyright 2013 NTT Data
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest import config
from tempest import test
CONF = config.CONF
class SecurityGroupsTestAdminJSON(base.BaseV2ComputeAdminTest):
@classmethod
def setUpClass(cls):
super(SecurityGroupsTestAdminJSON, cls).setUpClass()
cls.adm_client = cls.os_adm.security_groups_client
cls.client = cls.security_groups_client
def _delete_security_group(self, securitygroup_id, admin=True):
if admin:
resp, _ = self.adm_client.delete_security_group(securitygroup_id)
else:
resp, _ = self.client.delete_security_group(securitygroup_id)
self.assertEqual(202, resp.status)
@testtools.skipIf(CONF.service_available.neutron,
"Skipped because neutron do not support all_tenants"
"search filter.")
@test.attr(type='smoke')
def test_list_security_groups_list_all_tenants_filter(self):
# Admin can list security groups of all tenants
# List of all security groups created
security_group_list = []
# Create two security groups for a non-admin tenant
for i in range(2):
name = data_utils.rand_name('securitygroup-')
description = data_utils.rand_name('description-')
resp, securitygroup = (self.client
.create_security_group(name, description))
self.assertEqual(200, resp.status)
self.addCleanup(self._delete_security_group,
securitygroup['id'], admin=False)
security_group_list.append(securitygroup)
client_tenant_id = securitygroup['tenant_id']
# Create two security groups for admin tenant
for i in range(2):
name = data_utils.rand_name('securitygroup-')
description = data_utils.rand_name('description-')
resp, adm_securitygroup = (self.adm_client
.create_security_group(name,
description))
self.assertEqual(200, resp.status)
self.addCleanup(self._delete_security_group,
adm_securitygroup['id'])
security_group_list.append(adm_securitygroup)
# Fetch all security groups based on 'all_tenants' search filter
param = {'all_tenants': 'true'}
resp, fetched_list = self.adm_client.list_security_groups(params=param)
self.assertEqual(200, resp.status)
sec_group_id_list = map(lambda sg: sg['id'], fetched_list)
# Now check if all created Security Groups are present in fetched list
for sec_group in security_group_list:
self.assertIn(sec_group['id'], sec_group_id_list)
# Fetch all security groups for non-admin user with 'all_tenants'
# search filter
resp, fetched_list = self.client.list_security_groups(params=param)
self.assertEqual(200, resp.status)
# Now check if all created Security Groups are present in fetched list
for sec_group in fetched_list:
self.assertEqual(sec_group['tenant_id'], client_tenant_id,
"Failed to get all security groups for "
"non admin user.")
class SecurityGroupsTestAdminXML(SecurityGroupsTestAdminJSON):
_interface = 'xml'
| 42.666667 | 79 | 0.654541 |
import testtools
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest import config
from tempest import test
CONF = config.CONF
class SecurityGroupsTestAdminJSON(base.BaseV2ComputeAdminTest):
@classmethod
def setUpClass(cls):
super(SecurityGroupsTestAdminJSON, cls).setUpClass()
cls.adm_client = cls.os_adm.security_groups_client
cls.client = cls.security_groups_client
def _delete_security_group(self, securitygroup_id, admin=True):
if admin:
resp, _ = self.adm_client.delete_security_group(securitygroup_id)
else:
resp, _ = self.client.delete_security_group(securitygroup_id)
self.assertEqual(202, resp.status)
@testtools.skipIf(CONF.service_available.neutron,
"Skipped because neutron do not support all_tenants"
"search filter.")
@test.attr(type='smoke')
def test_list_security_groups_list_all_tenants_filter(self):
security_group_list = []
for i in range(2):
name = data_utils.rand_name('securitygroup-')
description = data_utils.rand_name('description-')
resp, securitygroup = (self.client
.create_security_group(name, description))
self.assertEqual(200, resp.status)
self.addCleanup(self._delete_security_group,
securitygroup['id'], admin=False)
security_group_list.append(securitygroup)
client_tenant_id = securitygroup['tenant_id']
for i in range(2):
name = data_utils.rand_name('securitygroup-')
description = data_utils.rand_name('description-')
resp, adm_securitygroup = (self.adm_client
.create_security_group(name,
description))
self.assertEqual(200, resp.status)
self.addCleanup(self._delete_security_group,
adm_securitygroup['id'])
security_group_list.append(adm_securitygroup)
param = {'all_tenants': 'true'}
resp, fetched_list = self.adm_client.list_security_groups(params=param)
self.assertEqual(200, resp.status)
sec_group_id_list = map(lambda sg: sg['id'], fetched_list)
for sec_group in security_group_list:
self.assertIn(sec_group['id'], sec_group_id_list)
resp, fetched_list = self.client.list_security_groups(params=param)
self.assertEqual(200, resp.status)
for sec_group in fetched_list:
self.assertEqual(sec_group['tenant_id'], client_tenant_id,
"Failed to get all security groups for "
"non admin user.")
class SecurityGroupsTestAdminXML(SecurityGroupsTestAdminJSON):
_interface = 'xml'
| true | true |
f728d7023209795915e43579b9727a3dcfc2c293 | 1,702 | py | Python | config/settings/test.py | tigrinustrade/humantech_prueba | 065a3a8cf4eca079a781c88322854fae2c9dcda8 | [
"MIT"
] | null | null | null | config/settings/test.py | tigrinustrade/humantech_prueba | 065a3a8cf4eca079a781c88322854fae2c9dcda8 | [
"MIT"
] | null | null | null | config/settings/test.py | tigrinustrade/humantech_prueba | 065a3a8cf4eca079a781c88322854fae2c9dcda8 | [
"MIT"
] | null | null | null | """
With these settings, tests run faster.
"""
from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env(
"DJANGO_SECRET_KEY",
default="UwkbY3fgeFHTlZrfDxa8T2C1sL8GY1HfTMmZarA4SXAdF1Q3TwsCt7bsNMNEaRyn",
)
# https://docs.djangoproject.com/en/dev/ref/settings/#test-runner
TEST_RUNNER = "django.test.runner.DiscoverRunner"
# CACHES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "",
}
}
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]
# TEMPLATES
# ------------------------------------------------------------------------------
TEMPLATES[-1]["OPTIONS"]["loaders"] = [ # type: ignore[index] # noqa F405
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
]
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
# Your stuff...
# ------------------------------------------------------------------------------
| 32.730769 | 80 | 0.49765 |
from .base import *
from .base import env
= env(
"DJANGO_SECRET_KEY",
default="UwkbY3fgeFHTlZrfDxa8T2C1sL8GY1HfTMmZarA4SXAdF1Q3TwsCt7bsNMNEaRyn",
)
= "django.test.runner.DiscoverRunner"
= {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "",
}
}
= ["django.contrib.auth.hashers.MD5PasswordHasher"]
TEMPLATES[-1]["OPTIONS"]["loaders"] = [ "django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
]
= "django.core.mail.backends.locmem.EmailBackend"
| true | true |
f728d9b17682bd804bb3615a11c5e2ce2f618a85 | 11,026 | py | Python | mealpy/fake/RHO.py | ashishpatel26/mealpy | 62160e61b8bd4b084e44b80fda720e6bd6332e03 | [
"MIT"
] | 1 | 2021-05-20T06:53:08.000Z | 2021-05-20T06:53:08.000Z | mealpy/fake/RHO.py | chenyuxiang0425/mealpy | 69e8dc727e15527e31ac5ace1debe92a0bc7d828 | [
"MIT"
] | null | null | null | mealpy/fake/RHO.py | chenyuxiang0425/mealpy | 69e8dc727e15527e31ac5ace1debe92a0bc7d828 | [
"MIT"
] | 1 | 2020-09-30T21:14:33.000Z | 2020-09-30T21:14:33.000Z | #!/usr/bin/env python
# ------------------------------------------------------------------------------------------------------%
# Created by "Thieu Nguyen" at 14:53, 17/03/2020 %
# %
# Email: nguyenthieu2102@gmail.com %
# Homepage: https://www.researchgate.net/profile/Thieu_Nguyen6 %
# Github: https://github.com/thieu1995 %
#-------------------------------------------------------------------------------------------------------%
from numpy.random import uniform, normal
from numpy.linalg import norm
from numpy import exp, power, pi, zeros, array, mean, ones, dot
from math import gamma
from copy import deepcopy
from mealpy.root import Root
class OriginalRHO(Root):
"""
The original version of: Rhino Herd Optimization (RHO)
(A Novel Metaheuristic Algorithm inspired by Rhino Herd Behavior)
Link:
https://doi.org/10.3384/ecp171421026
"""
def __init__(self, obj_func=None, lb=None, ub=None, problem_size=50, batch_size=10, verbose=True,
epoch=750, pop_size=100, c=0.53, a=2831, r=0.04, A=1):
Root.__init__(self, obj_func, lb, ub, problem_size, batch_size, verbose)
self.epoch = epoch
self.pop_size = pop_size
self.c = c # shape parameter - default = 0.53 > 0
self.a = a # scale parameter - default = 2831 > 0
self.r = r # default = 0.04
self.A = A # the area of each grid cell - default = 1
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
# Epoch loop
for epoch in range(self.epoch):
pos_list = array([item[self.ID_POS] for item in pop])
fit_list = array([item[self.ID_FIT] for item in pop])
fx_list = deepcopy(fit_list)
pos_center = mean(pos_list, axis=0)
## Each individual loop
for i in range(0, self.pop_size):
# Eq. 1
exp_component = -1 * power(norm(pop[i][self.ID_POS] - pos_center) / self.a, 2.0 / self.c)
fx = 2 * exp(exp_component) / (self.c ** 2 * pi * self.a ** 2 * gamma(self.c))
fx_list[i] = fx
# Eq. 7
s_component = ones(self.problem_size)
for j in range(0, self.problem_size):
sum_temp = 0
for i in range(0, self.pop_size):
sum_temp += fx_list[i] * (1 + pop[i][self.ID_POS][j] / (self.EPSILON + pop[i][self.ID_FIT]))
s_component[j] = self.A * sum_temp
for i in range(0, self.pop_size):
x_new = pop[i][self.ID_POS]
for j in range(0, self.problem_size):
# Eq. 7
s_x = fx_list[i] * (1 + pop[i][self.ID_FIT] * pop[i][self.ID_POS][j]) / s_component[j]
# Eq. 9
if uniform() <= 0.5:
x_new[j] = pop[i][self.ID_POS][j] - uniform() * s_x * pop[i][self.ID_POS][j]
else:
x_new[j] = pop[i][self.ID_POS][j] + uniform() * s_x * pop[i][self.ID_POS][j]
x_new = self.amend_position_faster(x_new)
fit = self.get_fitness_position(x_new)
if fit < pop[i][self.ID_FIT]:
pop[i] = [x_new, fit]
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class BaseRHO(Root):
"""
My version of: Rhino Herd Optimization (RHO)
(A Novel Metaheuristic Algorithm inspired by Rhino Herd Behavior)
Notes:
+ Remove third loop
"""
def __init__(self, obj_func=None, lb=None, ub=None, problem_size=50, batch_size=10, verbose=True,
epoch=750, pop_size=100, c=0.53, a=2831, r=0.04, A=1):
Root.__init__(self, obj_func, lb, ub, problem_size, batch_size, verbose)
self.epoch = epoch
self.pop_size = pop_size
self.c = c # shape parameter - default = 0.53 > 0
self.a = a # scale parameter - default = 2831 > 0
self.r = r # default = 0.04
self.A = A # the area of each grid cell - default = 1
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
pop_size = self.pop_size
# Epoch loop
for epoch in range(self.epoch):
pop_new = deepcopy(pop)
pos_list = array([item[self.ID_POS] for item in pop])
fit_list = array([item[self.ID_FIT] for item in pop])
fx_list = deepcopy(fit_list)
pos_center = mean(pos_list, axis=0)
## Calculate the fx for each individual
for i in range(0, pop_size):
# Eq. 1
exp_component = -1 * power(norm(pop[i][self.ID_POS] - pos_center) / self.a , 2.0/self.c )
fx = 2 * exp(exp_component) / (self.c ** 2 * pi * self.a ** 2 * gamma(self.c))
fx_list[i] = fx
# print(fx_list)
# Eq. 7
sum_temp = zeros(self.problem_size)
for i in range(0, pop_size):
sum_temp += fx_list[i] * (1 + pop[i][self.ID_POS] * pop[i][self.ID_FIT])
sum_temp = self.A * sum_temp
for i in range(0, pop_size):
s_x = fx_list[i] * (1 + pop[i][self.ID_POS]/pop[i][self.ID_FIT]) / sum_temp
if uniform() <= 0.5:
x_new = pop[i][self.ID_POS] - uniform() * dot(s_x, pop[i][self.ID_POS])
else:
x_new = pop[i][self.ID_POS] + uniform() * dot(s_x, pop[i][self.ID_POS])
x_new = self.amend_position_faster(x_new)
fit = self.get_fitness_position(x_new)
if fit < pop[i][self.ID_FIT]:
pop_new[i] = [x_new, fit]
if epoch % 100 == 0:
pop_size = self.pop_size
pop_new = sorted(pop_new, key=lambda item: item[self.ID_FIT])
pop = deepcopy(pop_new[:pop_size])
else:
pop_size = pop_size + int(self.r * pop_size)
n_new = pop_size - len(pop)
for i in range(0, n_new):
pop_new.extend([self.create_solution()])
pop = deepcopy(pop_new)
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class LevyRHO(BaseRHO):
"""
My modified version of: Rhino Herd Optimization (RH)
(A Novel Metaheuristic Algorithm inspired by Rhino Herd Behavior)
Notes:
+ Change the flow of algorithm
+ Uses normal in equation instead of uniform
+ Uses levy-flight instead of uniform-equation
"""
def __init__(self, obj_func=None, lb=None, ub=None, problem_size=50, batch_size=10, verbose=True,
epoch=750, pop_size=100, c=0.53, a=2831, r=0.04, A=1):
BaseRHO.__init__(self, obj_func, lb, ub, problem_size, batch_size, verbose, epoch, pop_size, c, a, r, A)
def train(self):
pop = [self.create_solution(minmax=0) for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
pop_size = self.pop_size
# Epoch loop
for epoch in range(self.epoch):
pop_new = deepcopy(pop)
pos_list = array([item[self.ID_POS] for item in pop])
pos_center = mean(pos_list, axis=0)
fx_list = zeros(pop_size)
## Calculate the fx for each individual
for i in range(0, pop_size):
# Eq. 1
exp_component = -1 * power( norm(pop[i][self.ID_POS] - pos_center) / self.a , 2.0/self.c )
fx = 2 * exp(exp_component) / (self.c ** 2 * pi * self.a ** 2 * gamma(self.c))
fx_list[i] = fx
#print(fx_list)
# Eq. 7
sum_temp = zeros(self.problem_size)
for i in range(0, self.pop_size):
sum_temp += fx_list[i] * (1 + pop[i][self.ID_POS] / pop[i][self.ID_FIT] + self.EPSILON)
sum_temp = self.A * sum_temp
for i in range(0, pop_size):
s_x = fx_list[i] * (1 + pop[i][self.ID_FIT] * pop[i][self.ID_POS]) / sum_temp
if uniform() < 0.5:
x_new = pop[i][self.ID_POS] - normal() * dot(s_x, pop[i][self.ID_POS])
else:
x_new = self.levy_flight(epoch+1, pop[i][self.ID_POS], g_best[self.ID_POS])
x_new = self.amend_position_faster(x_new)
fit = self.get_fitness_position(x_new)
if fit < pop[i][self.ID_FIT]:
pop_new[i] = [x_new, fit]
if epoch % 100 == 0:
pop_size = self.pop_size
pop_new = sorted(pop_new, key=lambda item: item[self.ID_FIT])
pop = deepcopy(pop_new[:pop_size])
else:
pop_size = pop_size + int(self.r * pop_size)
n_new = pop_size - len(pop)
for i in range(0, n_new):
pop_new.extend([self.create_solution()])
pop = deepcopy(pop_new)
## Make sure the population does not have duplicates.
new_set = set()
for idx, obj in enumerate(pop):
if tuple(obj[self.ID_POS].tolist()) in new_set:
pop[idx] = self.create_solution()
else:
new_set.add(tuple(obj[self.ID_POS].tolist()))
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Pop Size: {}, Best Fit: {}".format(epoch+1, pop_size, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
| 45.561983 | 112 | 0.519137 |
from numpy.random import uniform, normal
from numpy.linalg import norm
from numpy import exp, power, pi, zeros, array, mean, ones, dot
from math import gamma
from copy import deepcopy
from mealpy.root import Root
class OriginalRHO(Root):
def __init__(self, obj_func=None, lb=None, ub=None, problem_size=50, batch_size=10, verbose=True,
epoch=750, pop_size=100, c=0.53, a=2831, r=0.04, A=1):
Root.__init__(self, obj_func, lb, ub, problem_size, batch_size, verbose)
self.epoch = epoch
self.pop_size = pop_size
self.c = c
self.a = a
self.r = r
self.A = A
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
for epoch in range(self.epoch):
pos_list = array([item[self.ID_POS] for item in pop])
fit_list = array([item[self.ID_FIT] for item in pop])
fx_list = deepcopy(fit_list)
pos_center = mean(pos_list, axis=0)
range(0, self.pop_size):
exp_component = -1 * power(norm(pop[i][self.ID_POS] - pos_center) / self.a, 2.0 / self.c)
fx = 2 * exp(exp_component) / (self.c ** 2 * pi * self.a ** 2 * gamma(self.c))
fx_list[i] = fx
s_component = ones(self.problem_size)
for j in range(0, self.problem_size):
sum_temp = 0
for i in range(0, self.pop_size):
sum_temp += fx_list[i] * (1 + pop[i][self.ID_POS][j] / (self.EPSILON + pop[i][self.ID_FIT]))
s_component[j] = self.A * sum_temp
for i in range(0, self.pop_size):
x_new = pop[i][self.ID_POS]
for j in range(0, self.problem_size):
s_x = fx_list[i] * (1 + pop[i][self.ID_FIT] * pop[i][self.ID_POS][j]) / s_component[j]
if uniform() <= 0.5:
x_new[j] = pop[i][self.ID_POS][j] - uniform() * s_x * pop[i][self.ID_POS][j]
else:
x_new[j] = pop[i][self.ID_POS][j] + uniform() * s_x * pop[i][self.ID_POS][j]
x_new = self.amend_position_faster(x_new)
fit = self.get_fitness_position(x_new)
if fit < pop[i][self.ID_FIT]:
pop[i] = [x_new, fit]
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class BaseRHO(Root):
def __init__(self, obj_func=None, lb=None, ub=None, problem_size=50, batch_size=10, verbose=True,
epoch=750, pop_size=100, c=0.53, a=2831, r=0.04, A=1):
Root.__init__(self, obj_func, lb, ub, problem_size, batch_size, verbose)
self.epoch = epoch
self.pop_size = pop_size
self.c = c
self.a = a
self.r = r
self.A = A
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
pop_size = self.pop_size
for epoch in range(self.epoch):
pop_new = deepcopy(pop)
pos_list = array([item[self.ID_POS] for item in pop])
fit_list = array([item[self.ID_FIT] for item in pop])
fx_list = deepcopy(fit_list)
pos_center = mean(pos_list, axis=0)
e):
exp_component = -1 * power(norm(pop[i][self.ID_POS] - pos_center) / self.a , 2.0/self.c )
fx = 2 * exp(exp_component) / (self.c ** 2 * pi * self.a ** 2 * gamma(self.c))
fx_list[i] = fx
sum_temp = zeros(self.problem_size)
for i in range(0, pop_size):
sum_temp += fx_list[i] * (1 + pop[i][self.ID_POS] * pop[i][self.ID_FIT])
sum_temp = self.A * sum_temp
for i in range(0, pop_size):
s_x = fx_list[i] * (1 + pop[i][self.ID_POS]/pop[i][self.ID_FIT]) / sum_temp
if uniform() <= 0.5:
x_new = pop[i][self.ID_POS] - uniform() * dot(s_x, pop[i][self.ID_POS])
else:
x_new = pop[i][self.ID_POS] + uniform() * dot(s_x, pop[i][self.ID_POS])
x_new = self.amend_position_faster(x_new)
fit = self.get_fitness_position(x_new)
if fit < pop[i][self.ID_FIT]:
pop_new[i] = [x_new, fit]
if epoch % 100 == 0:
pop_size = self.pop_size
pop_new = sorted(pop_new, key=lambda item: item[self.ID_FIT])
pop = deepcopy(pop_new[:pop_size])
else:
pop_size = pop_size + int(self.r * pop_size)
n_new = pop_size - len(pop)
for i in range(0, n_new):
pop_new.extend([self.create_solution()])
pop = deepcopy(pop_new)
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class LevyRHO(BaseRHO):
def __init__(self, obj_func=None, lb=None, ub=None, problem_size=50, batch_size=10, verbose=True,
epoch=750, pop_size=100, c=0.53, a=2831, r=0.04, A=1):
BaseRHO.__init__(self, obj_func, lb, ub, problem_size, batch_size, verbose, epoch, pop_size, c, a, r, A)
def train(self):
pop = [self.create_solution(minmax=0) for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
pop_size = self.pop_size
for epoch in range(self.epoch):
pop_new = deepcopy(pop)
pos_list = array([item[self.ID_POS] for item in pop])
pos_center = mean(pos_list, axis=0)
fx_list = zeros(pop_size)
e):
exp_component = -1 * power( norm(pop[i][self.ID_POS] - pos_center) / self.a , 2.0/self.c )
fx = 2 * exp(exp_component) / (self.c ** 2 * pi * self.a ** 2 * gamma(self.c))
fx_list[i] = fx
sum_temp = zeros(self.problem_size)
for i in range(0, self.pop_size):
sum_temp += fx_list[i] * (1 + pop[i][self.ID_POS] / pop[i][self.ID_FIT] + self.EPSILON)
sum_temp = self.A * sum_temp
for i in range(0, pop_size):
s_x = fx_list[i] * (1 + pop[i][self.ID_FIT] * pop[i][self.ID_POS]) / sum_temp
if uniform() < 0.5:
x_new = pop[i][self.ID_POS] - normal() * dot(s_x, pop[i][self.ID_POS])
else:
x_new = self.levy_flight(epoch+1, pop[i][self.ID_POS], g_best[self.ID_POS])
x_new = self.amend_position_faster(x_new)
fit = self.get_fitness_position(x_new)
if fit < pop[i][self.ID_FIT]:
pop_new[i] = [x_new, fit]
if epoch % 100 == 0:
pop_size = self.pop_size
pop_new = sorted(pop_new, key=lambda item: item[self.ID_FIT])
pop = deepcopy(pop_new[:pop_size])
else:
pop_size = pop_size + int(self.r * pop_size)
n_new = pop_size - len(pop)
for i in range(0, n_new):
pop_new.extend([self.create_solution()])
pop = deepcopy(pop_new)
j in enumerate(pop):
if tuple(obj[self.ID_POS].tolist()) in new_set:
pop[idx] = self.create_solution()
else:
new_set.add(tuple(obj[self.ID_POS].tolist()))
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Pop Size: {}, Best Fit: {}".format(epoch+1, pop_size, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
| true | true |
f728da1527fd632c7835a634ccc21648be53f1a4 | 13,704 | py | Python | statuscake/model/monitoring_location.py | StatusCakeDev/statuscake-py | 267f485e6c18a6ed6e17667ceaddfce6271ec4af | [
"MIT"
] | null | null | null | statuscake/model/monitoring_location.py | StatusCakeDev/statuscake-py | 267f485e6c18a6ed6e17667ceaddfce6271ec4af | [
"MIT"
] | null | null | null | statuscake/model/monitoring_location.py | StatusCakeDev/statuscake-py | 267f485e6c18a6ed6e17667ceaddfce6271ec4af | [
"MIT"
] | null | null | null | """
StatusCake API
Copyright (c) 2022
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
API Version: 1.0.0-beta.3
Contact: support@statuscake.com
Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
"""
import re # noqa: F401
import sys # noqa: F401
from statuscake.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from statuscake.exceptions import ApiAttributeError
def lazy_import():
from statuscake.model.monitoring_location_status import MonitoringLocationStatus
globals()['MonitoringLocationStatus'] = MonitoringLocationStatus
class MonitoringLocation(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'description': (str,), # noqa: E501
'region': (str,), # noqa: E501
'region_code': (str,), # noqa: E501
'status': (MonitoringLocationStatus,), # noqa: E501
'ipv4': (str,), # noqa: E501
'ipv6': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'description': 'description', # noqa: E501
'region': 'region', # noqa: E501
'region_code': 'region_code', # noqa: E501
'status': 'status', # noqa: E501
'ipv4': 'ipv4', # noqa: E501
'ipv6': 'ipv6', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, description, region, region_code, status, *args, **kwargs): # noqa: E501
"""MonitoringLocation - a model defined in OpenAPI
Args:
description (str): Server description
region (str): Server region
region_code (str): Server region code
status (MonitoringLocationStatus):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
ipv4 (str): Server IPv4 address. [optional] # noqa: E501
ipv6 (str): Server IPv6 address. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.description = description
self.region = region
self.region_code = region_code
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, description, region, region_code, status, *args, **kwargs): # noqa: E501
"""MonitoringLocation - a model defined in OpenAPI
Args:
description (str): Server description
region (str): Server region
region_code (str): Server region code
status (MonitoringLocationStatus):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
ipv4 (str): Server IPv4 address. [optional] # noqa: E501
ipv6 (str): Server IPv6 address. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.description = description
self.region = region
self.region_code = region_code
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 43.782748 | 121 | 0.587858 |
import re
import sys
from statuscake.model_utils import (
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from statuscake.exceptions import ApiAttributeError
def lazy_import():
from statuscake.model.monitoring_location_status import MonitoringLocationStatus
globals()['MonitoringLocationStatus'] = MonitoringLocationStatus
class MonitoringLocation(ModelNormal):
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,)
_nullable = False
@cached_property
def openapi_types():
lazy_import()
return {
'description': (str,),
'region': (str,),
'region_code': (str,),
'status': (MonitoringLocationStatus,),
'ipv4': (str,),
'ipv6': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {
'description': 'description',
'region': 'region',
'region_code': 'region_code',
'status': 'status',
'ipv4': 'ipv4',
'ipv6': 'ipv6',
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, description, region, region_code, status, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.description = description
self.region = region
self.region_code = region_code
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, description, region, region_code, status, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.description = description
self.region = region
self.region_code = region_code
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| true | true |
f728dc2a946afc1b5d06e688400b56b32399c964 | 1,251 | py | Python | maml_zoo/baselines/zero_baseline.py | intel-isl/MetaLearningTradeoffs | bb1b849742a959310f3b9b630bb76ae3509a5d4a | [
"MIT"
] | 2 | 2021-04-26T15:09:46.000Z | 2021-04-28T11:58:28.000Z | maml_zoo/baselines/zero_baseline.py | intel-isl/MetaLearningTradeoffs | bb1b849742a959310f3b9b630bb76ae3509a5d4a | [
"MIT"
] | null | null | null | maml_zoo/baselines/zero_baseline.py | intel-isl/MetaLearningTradeoffs | bb1b849742a959310f3b9b630bb76ae3509a5d4a | [
"MIT"
] | null | null | null | from maml_zoo.baselines.base import Baseline
import numpy as np
class ZeroBaseline(Baseline):
"""
Dummy baseline
"""
def __init__(self):
super(ZeroBaseline, self).__init__()
def get_param_values(self, **kwargs):
"""
Returns the parameter values of the baseline object
Returns:
(None): coefficients of the baseline
"""
return None
def set_param_values(self, value, **kwargs):
"""
Sets the parameter values of the baseline object
Args:
value (None): coefficients of the baseline
"""
pass
def fit(self, paths, **kwargs):
"""
Improves the quality of zeroes output by baseline
Args:
paths: list of paths
"""
pass
def predict(self, path):
"""
Produces some zeroes
Args:
path (dict): dict of lists/numpy array containing trajectory / path information
such as "observations", "rewards", ...
Returns:
(np.ndarray): numpy array of the same length as paths["observations"] specifying the reward baseline
"""
return np.zeros_like(path["rewards"]) | 22.745455 | 113 | 0.56275 | from maml_zoo.baselines.base import Baseline
import numpy as np
class ZeroBaseline(Baseline):
def __init__(self):
super(ZeroBaseline, self).__init__()
def get_param_values(self, **kwargs):
return None
def set_param_values(self, value, **kwargs):
pass
def fit(self, paths, **kwargs):
pass
def predict(self, path):
return np.zeros_like(path["rewards"]) | true | true |
f728dd857698ca686c726ad6857773bcc957646e | 1,204 | py | Python | extras/kclibar.20s.py | flloreda/kcli | 0f0ee3bad058882dd5db1faa257e05270933cc93 | [
"Apache-2.0"
] | null | null | null | extras/kclibar.20s.py | flloreda/kcli | 0f0ee3bad058882dd5db1faa257e05270933cc93 | [
"Apache-2.0"
] | null | null | null | extras/kclibar.20s.py | flloreda/kcli | 0f0ee3bad058882dd5db1faa257e05270933cc93 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# <bitbar.title>kcli list</bitbar.title>
# <bitbar.version>v1.0</bitbar.version>
# <bitbar.author>Karim Boumedhel</bitbar.author>
# <bitbar.author.github>karmab</bitbar.author.github>
# <bitbar.desc>Lists running vms using kcli</bitbar.desc>
# <bitbar.image>https://raw.githubusercontent.com/karmab/kcli/master/kcli-small.png</bitbar.image>
# <bitbar.abouturl>https://github.com/karmab/kcli</bitbar.abouturl>
import sys
try:
from kvirt.config import Kconfig
except:
warning = u"\u26A0"
warning = warning.encode('utf-8')
print("%s\n" % warning)
print("---\n")
print("Kcli could not be found in your path. Is it installed?")
sys.exit(1)
config = Kconfig(quiet=True)
k = config.k
vms = k.list()
running = [vm[0] for vm in vms if vm[1] == 'up']
off = [vm[0] for vm in vms if vm[1] == 'down']
print("Kcli %s/%s Running" % (len(running), len(vms)))
print('---')
for vm in sorted(running):
print("%s| color=green" % vm)
# print("%s| color=green bash=kcli param1=ssh param2=%s" % (vm, vm))
print('---')
for vm in sorted(off):
print(vm)
# print("%s| terminal=false refresh=true bash=kcli param1=start param2=%s" % (vm, vm))
| 32.540541 | 98 | 0.657807 |
import sys
try:
from kvirt.config import Kconfig
except:
warning = u"\u26A0"
warning = warning.encode('utf-8')
print("%s\n" % warning)
print("---\n")
print("Kcli could not be found in your path. Is it installed?")
sys.exit(1)
config = Kconfig(quiet=True)
k = config.k
vms = k.list()
running = [vm[0] for vm in vms if vm[1] == 'up']
off = [vm[0] for vm in vms if vm[1] == 'down']
print("Kcli %s/%s Running" % (len(running), len(vms)))
print('---')
for vm in sorted(running):
print("%s| color=green" % vm)
print('---')
for vm in sorted(off):
print(vm)
| true | true |
f728deb54d5e8380553f699e1124d7ca227b1e24 | 2,736 | py | Python | app/__init__.py | pndemo/yummy-recipes-api | ae6729bd1c886ce9872d83488a6eaa99e92be513 | [
"MIT"
] | null | null | null | app/__init__.py | pndemo/yummy-recipes-api | ae6729bd1c886ce9872d83488a6eaa99e92be513 | [
"MIT"
] | 3 | 2019-12-20T23:17:20.000Z | 2022-03-21T22:16:25.000Z | app/__init__.py | pndemo/yummy-recipes-api | ae6729bd1c886ce9872d83488a6eaa99e92be513 | [
"MIT"
] | 1 | 2017-12-13T12:44:11.000Z | 2017-12-13T12:44:11.000Z | """ Initial application specifications """
from flask import Flask, redirect
from flask_sqlalchemy import SQLAlchemy
from instance.config import app_config
from flasgger import Swagger
# pylint: disable=C0103
db = SQLAlchemy()
def create_app(config_name):
""" Function for creating application depending on configuration """
app = Flask(__name__, instance_relative_config=True)
app.config.from_object(app_config[config_name])
app.config['SWAGGER'] = {
"swagger": "2.0",
"title": "Yummy Recipes API",
"info": {
"title": "Yummy Recipes API",
"description": "This app enables you to access Yummy Recipes resources, a platform \
for users to keep track of their awesome recipes and share with others if \
they so wish. The API functionalities include: creation of new user \
accounts, user login, password reset, creation of new recipe categories, \
viewing of recipe categories, updating of recipe categories, deletion of \
recipe categories, creation of new recipes, viewing of recipes, updating of \
recipes and deletion of recipes.",
"contact": {
"responsibleOrganization": "Yummy Recipes Inc.",
"responsibleDeveloper": "Paul Ndemo Oroko",
"url": "https://github.com/pndemo/yummy-recipes-api",
}
},
"schemes": ["http", "https"],
'securityDefinitions': {
"Bearer": {
"type": "apiKey",
"name": "Authorization",
"in": "header"
}
}
}
Swagger(app)
db.init_app(app)
def index():
""" Yummy Recipes API home page """
return redirect('/apidocs')
from app.v1.views import auth_blueprint
app.register_blueprint(auth_blueprint)
from app.v1.views.category_views import category_view, category_specific_view, \
category_search_view
app.add_url_rule('/', view_func=index)
app.add_url_rule('/api/v1/category/', view_func=category_view)
app.add_url_rule('/api/v1/category/<int:category_id>', view_func=category_specific_view)
app.add_url_rule('/api/v1/category/search', view_func=category_search_view)
from app.v1.views.recipe_views import recipe_view, recipe_specific_view, recipe_search_view
app.add_url_rule('/api/v1/recipe/<int:category_id>/', view_func=recipe_view)
app.add_url_rule('/api/v1/recipe/<int:category_id>/<int:recipe_id>', view_func= \
recipe_specific_view)
app.add_url_rule('/api/v1/recipe/<int:category_id>/search', view_func=recipe_search_view)
return app
| 38.535211 | 97 | 0.640351 |
from flask import Flask, redirect
from flask_sqlalchemy import SQLAlchemy
from instance.config import app_config
from flasgger import Swagger
db = SQLAlchemy()
def create_app(config_name):
app = Flask(__name__, instance_relative_config=True)
app.config.from_object(app_config[config_name])
app.config['SWAGGER'] = {
"swagger": "2.0",
"title": "Yummy Recipes API",
"info": {
"title": "Yummy Recipes API",
"description": "This app enables you to access Yummy Recipes resources, a platform \
for users to keep track of their awesome recipes and share with others if \
they so wish. The API functionalities include: creation of new user \
accounts, user login, password reset, creation of new recipe categories, \
viewing of recipe categories, updating of recipe categories, deletion of \
recipe categories, creation of new recipes, viewing of recipes, updating of \
recipes and deletion of recipes.",
"contact": {
"responsibleOrganization": "Yummy Recipes Inc.",
"responsibleDeveloper": "Paul Ndemo Oroko",
"url": "https://github.com/pndemo/yummy-recipes-api",
}
},
"schemes": ["http", "https"],
'securityDefinitions': {
"Bearer": {
"type": "apiKey",
"name": "Authorization",
"in": "header"
}
}
}
Swagger(app)
db.init_app(app)
def index():
return redirect('/apidocs')
from app.v1.views import auth_blueprint
app.register_blueprint(auth_blueprint)
from app.v1.views.category_views import category_view, category_specific_view, \
category_search_view
app.add_url_rule('/', view_func=index)
app.add_url_rule('/api/v1/category/', view_func=category_view)
app.add_url_rule('/api/v1/category/<int:category_id>', view_func=category_specific_view)
app.add_url_rule('/api/v1/category/search', view_func=category_search_view)
from app.v1.views.recipe_views import recipe_view, recipe_specific_view, recipe_search_view
app.add_url_rule('/api/v1/recipe/<int:category_id>/', view_func=recipe_view)
app.add_url_rule('/api/v1/recipe/<int:category_id>/<int:recipe_id>', view_func= \
recipe_specific_view)
app.add_url_rule('/api/v1/recipe/<int:category_id>/search', view_func=recipe_search_view)
return app
| true | true |
f728decc932ad7aa29280e8e97286815f1eb87a0 | 3,230 | py | Python | app/tests/v1/test_meetup.py | BILLthebuilder/meetup-query | 0c2ab5b3d8c9b97518e047f2e6030eea49916371 | [
"MIT"
] | null | null | null | app/tests/v1/test_meetup.py | BILLthebuilder/meetup-query | 0c2ab5b3d8c9b97518e047f2e6030eea49916371 | [
"MIT"
] | 2 | 2019-01-09T08:19:47.000Z | 2019-01-15T03:42:32.000Z | app/tests/v1/test_meetup.py | BILLthebuilder/meetup-query | 0c2ab5b3d8c9b97518e047f2e6030eea49916371 | [
"MIT"
] | null | null | null | # # Third party imports
# import unittest
# import json
# # Local imports
# from app import create_app
# class TestMeetups(unittest.TestCase):
# def setUp(self):
# self.app = create_app("testing")
# self.client = self.app.test_client()
# self.meetup_incomplete ={
# "topic" : "Programming"
# }
# self.meetup_complete ={
# "id": "1",
# "topic" : "Udacity welcom",
# "location" : "San Fransisco or remotely via zoom",
# "happeningOn" : "Tommorow"
# }
# # Test validity of json data in request
# def test_post_meetup(self):
# response = self.client.post('api/v1/meetups')
# result = json.loads(response.data)
# self.assertEqual(result["message"],"Only Application/JSON input expected")
# self.assertEqual(response.status_code, 400)
# # Test empty fields
# def test_post_empty_meetup(self):
# response = self.client.post('api/v1/meetups',data=json.dumps(self.meetup_incomplete),
# content_type="application/json")
# result = json.loads(response.data)
# self.assertEqual(result["message"],"All fields must be populated with data")
# self.assertEqual(response.status_code, 400)
# # Test valid input for meetup creation
# def test_post_meetup_success(self):
# response = self.client.post('api/v1/meetups', data=json.dumps(self.meetup_complete),
# content_type="application/json")
# result = json.loads(response.data)
# self.assertEqual(result["message"],"Meetup created succesfully")
# self.assertEqual(response.status_code, 201)
import unittest
import json
from app import create_app
class TestMeetups(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
def create_record(self):
response = self.client.post('/api/v1/meetups',
data=json.dumps({
"title": "Football",
"description": "Playing football on 25th",
"date": "25th of November",
"location": "Kasarani"
}),
headers={"content-type": "application/json"})
return response
# Test meetups creation
def test_01_post_meetups(self):
response = self.create_record()
self.assertEqual(response.status_code, 201)
# Test for fetching all meetup records
def test_02_get_all(self):
response = self.client.get('/api/v1/meetups',
headers={"content-type": "application/json"})
self.assertEqual(response.status_code, 200)
# Test for getting a specific meetup record
def test_03_get_specific(self):
self.create_record()
response = self.client.get('/api/v1/meetups/1',
headers={"content-type": "application/json"})
self.assertEqual(response.status_code, 200)
self.assertNotEqual(response.status_code, 404)
| 36.292135 | 95 | 0.580186 | TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
def create_record(self):
response = self.client.post('/api/v1/meetups',
data=json.dumps({
"title": "Football",
"description": "Playing football on 25th",
"date": "25th of November",
"location": "Kasarani"
}),
headers={"content-type": "application/json"})
return response
def test_01_post_meetups(self):
response = self.create_record()
self.assertEqual(response.status_code, 201)
def test_02_get_all(self):
response = self.client.get('/api/v1/meetups',
headers={"content-type": "application/json"})
self.assertEqual(response.status_code, 200)
def test_03_get_specific(self):
self.create_record()
response = self.client.get('/api/v1/meetups/1',
headers={"content-type": "application/json"})
self.assertEqual(response.status_code, 200)
self.assertNotEqual(response.status_code, 404)
| true | true |
f728df115e09cdf67172d2fb6c3518a298f4ff9c | 156 | py | Python | sqla_wrapper/version.py | gustavopp93/sqla-wrapper | 4e30534b67ebe83b40a95e1e3f342b132fedb6a6 | [
"Apache-2.0"
] | null | null | null | sqla_wrapper/version.py | gustavopp93/sqla-wrapper | 4e30534b67ebe83b40a95e1e3f342b132fedb6a6 | [
"Apache-2.0"
] | null | null | null | sqla_wrapper/version.py | gustavopp93/sqla-wrapper | 4e30534b67ebe83b40a95e1e3f342b132fedb6a6 | [
"Apache-2.0"
] | null | null | null | import pkg_resources
try:
__version__ = pkg_resources.require("sqla-wrapper")[0].version
except Exception: # pragma: no cover
__version__ = None
| 19.5 | 66 | 0.737179 | import pkg_resources
try:
__version__ = pkg_resources.require("sqla-wrapper")[0].version
except Exception:
__version__ = None
| true | true |
f728df9e68bfc20a2756b0819774695d041fcdb0 | 610 | py | Python | test_summary_index.py | derenlei/bert-extractive-summarizer | 64f73851d541bab40e62f09c799cbd7cce70cc69 | [
"MIT"
] | null | null | null | test_summary_index.py | derenlei/bert-extractive-summarizer | 64f73851d541bab40e62f09c799cbd7cce70cc69 | [
"MIT"
] | null | null | null | test_summary_index.py | derenlei/bert-extractive-summarizer | 64f73851d541bab40e62f09c799cbd7cce70cc69 | [
"MIT"
] | 1 | 2021-02-15T07:07:10.000Z | 2021-02-15T07:07:10.000Z | from summarizer import Summarizer
from transformers import AutoConfig, AutoTokenizer, AutoModel
test_file = 'tests/test_news_source.txt'
model_spec = "distilbert-base-uncased"
f = open(test_file, 'r')
body = f.read()
f.close()
custom_config = AutoConfig.from_pretrained(model_spec)
custom_config.output_hidden_states=True
custom_tokenizer = AutoTokenizer.from_pretrained(model_spec)
custom_model = AutoModel.from_pretrained(model_spec, config=custom_config)
model = Summarizer(custom_model=custom_model, return_list=True, return_index=True)
result, index = model(body)
print('\n'.join(result))
print(index) | 32.105263 | 82 | 0.816393 | from summarizer import Summarizer
from transformers import AutoConfig, AutoTokenizer, AutoModel
test_file = 'tests/test_news_source.txt'
model_spec = "distilbert-base-uncased"
f = open(test_file, 'r')
body = f.read()
f.close()
custom_config = AutoConfig.from_pretrained(model_spec)
custom_config.output_hidden_states=True
custom_tokenizer = AutoTokenizer.from_pretrained(model_spec)
custom_model = AutoModel.from_pretrained(model_spec, config=custom_config)
model = Summarizer(custom_model=custom_model, return_list=True, return_index=True)
result, index = model(body)
print('\n'.join(result))
print(index) | true | true |
f728e0077701397b6084ad1b88f21a79f54414ab | 403 | py | Python | app/run/migrations/0006_alter_run_start_time.py | Masado/django-app-api-3 | 88def27f1cd8974c62dead282cd04d1384054888 | [
"MIT"
] | null | null | null | app/run/migrations/0006_alter_run_start_time.py | Masado/django-app-api-3 | 88def27f1cd8974c62dead282cd04d1384054888 | [
"MIT"
] | null | null | null | app/run/migrations/0006_alter_run_start_time.py | Masado/django-app-api-3 | 88def27f1cd8974c62dead282cd04d1384054888 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.9 on 2021-11-17 15:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('run', '0005_alter_run_start_time'),
]
operations = [
migrations.AlterField(
model_name='run',
name='start_time',
field=models.DateTimeField(verbose_name='date started'),
),
]
| 21.210526 | 68 | 0.60794 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('run', '0005_alter_run_start_time'),
]
operations = [
migrations.AlterField(
model_name='run',
name='start_time',
field=models.DateTimeField(verbose_name='date started'),
),
]
| true | true |
f728e0dcd96c1b591ae5e8c1c0aaa544cd603765 | 393 | py | Python | MyMusicApp/users/migrations/0002_profile_bio.py | kells4real/MusicApp | 4e4ba065c4f472243413551f63dc4e9eddf7f4a7 | [
"bzip2-1.0.6",
"MIT"
] | null | null | null | MyMusicApp/users/migrations/0002_profile_bio.py | kells4real/MusicApp | 4e4ba065c4f472243413551f63dc4e9eddf7f4a7 | [
"bzip2-1.0.6",
"MIT"
] | null | null | null | MyMusicApp/users/migrations/0002_profile_bio.py | kells4real/MusicApp | 4e4ba065c4f472243413551f63dc4e9eddf7f4a7 | [
"bzip2-1.0.6",
"MIT"
] | null | null | null | # Generated by Django 2.2.2 on 2019-09-13 23:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='profile',
name='bio',
field=models.TextField(blank=True, max_length=500, null=True),
),
]
| 20.684211 | 74 | 0.590331 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='profile',
name='bio',
field=models.TextField(blank=True, max_length=500, null=True),
),
]
| true | true |
f728e12d20caee0ee30c4eb3f34608b8cca21f80 | 1,896 | py | Python | backend/app.py | eaglesemanation/CyberGarden-HackPlatform | e7bb9f7433644d3ba38c76a35595c798a30c0883 | [
"MIT"
] | null | null | null | backend/app.py | eaglesemanation/CyberGarden-HackPlatform | e7bb9f7433644d3ba38c76a35595c798a30c0883 | [
"MIT"
] | null | null | null | backend/app.py | eaglesemanation/CyberGarden-HackPlatform | e7bb9f7433644d3ba38c76a35595c798a30c0883 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import shutil
from pathlib import Path
import uvicorn
from fastapi import FastAPI
from starlette.middleware.cors import CORSMiddleware
from settings import PROD_TORTOISE_ORM, TEST_TORTOISE_ORM
from tortoise import Tortoise
from crud import hacks, locations, participants, teams, users
from settings import PROD_TORTOISE_ORM
from tools.db import fill_db
# from backend.tools.db import fill_db
from tools.db import fill_db
from crud import users, hacks, teams, locations
app = FastAPI(
version="0.0.2",
title="CyberGarden-HackPlatform",
description="CyberGarden Hackathon project API based on FastAPI",
)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(users.router, prefix="/users", tags=["Users"])
app.include_router(participants.router, prefix="/participants", tags=["Participants"])
app.include_router(hacks.router, prefix="/hacks", tags=["Hacks"])
app.include_router(teams.router, prefix="/teams", tags=["Teams"])
app.include_router(locations.router, prefix="/locations", tags=["Locations"])
try:
shutil.rmtree(
"db/test"
) # Удаляем папку с тестовой базой данных при запуске и импорте
except FileNotFoundError:
print("Error during delete")
pass
for path in ["db/test"]:
Path(path).mkdir(parents=True, exist_ok=True)
config_var = PROD_TORTOISE_ORM
# config_var = TEST_TORTOISE_ORM
for path in ["db/test", "db/prod"]:
Path(path).mkdir(parents=True, exist_ok=True)
@app.on_event("startup")
async def startup():
await Tortoise.init(config=config_var)
await Tortoise.generate_schemas(safe=True)
await fill_db()
@app.on_event("shutdown")
async def shutdown():
await Tortoise.close_connections()
if __name__ == "__main__":
uvicorn.run("app:app", reload=True, use_colors=True)
| 25.28 | 86 | 0.737869 |
import shutil
from pathlib import Path
import uvicorn
from fastapi import FastAPI
from starlette.middleware.cors import CORSMiddleware
from settings import PROD_TORTOISE_ORM, TEST_TORTOISE_ORM
from tortoise import Tortoise
from crud import hacks, locations, participants, teams, users
from settings import PROD_TORTOISE_ORM
from tools.db import fill_db
from tools.db import fill_db
from crud import users, hacks, teams, locations
app = FastAPI(
version="0.0.2",
title="CyberGarden-HackPlatform",
description="CyberGarden Hackathon project API based on FastAPI",
)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(users.router, prefix="/users", tags=["Users"])
app.include_router(participants.router, prefix="/participants", tags=["Participants"])
app.include_router(hacks.router, prefix="/hacks", tags=["Hacks"])
app.include_router(teams.router, prefix="/teams", tags=["Teams"])
app.include_router(locations.router, prefix="/locations", tags=["Locations"])
try:
shutil.rmtree(
"db/test"
)
except FileNotFoundError:
print("Error during delete")
pass
for path in ["db/test"]:
Path(path).mkdir(parents=True, exist_ok=True)
config_var = PROD_TORTOISE_ORM
for path in ["db/test", "db/prod"]:
Path(path).mkdir(parents=True, exist_ok=True)
@app.on_event("startup")
async def startup():
await Tortoise.init(config=config_var)
await Tortoise.generate_schemas(safe=True)
await fill_db()
@app.on_event("shutdown")
async def shutdown():
await Tortoise.close_connections()
if __name__ == "__main__":
uvicorn.run("app:app", reload=True, use_colors=True)
| true | true |
f728e28fd3d47867ca96ea9647907f197fd83ece | 1,001 | py | Python | logistic regression/logistic_adam/adam_train_loss .py | ouyangyike/Inference-Algorithm | ac3470e2fbc4415174b32ecc2e2f3f101da1ca38 | [
"MIT"
] | null | null | null | logistic regression/logistic_adam/adam_train_loss .py | ouyangyike/Inference-Algorithm | ac3470e2fbc4415174b32ecc2e2f3f101da1ca38 | [
"MIT"
] | null | null | null | logistic regression/logistic_adam/adam_train_loss .py | ouyangyike/Inference-Algorithm | ac3470e2fbc4415174b32ecc2e2f3f101da1ca38 | [
"MIT"
] | null | null | null | import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from logistic_adam import *
#learing rate = 1,batch_size = 500, epoch=15, lamda = 0.01
logging = runLogistic(1,500,15,0.01)
#print(logging)
plt.plot(logging[:,0],marker='+',label='learning rate = 1')
#learing rate = 0.1,batch_size = 500, epoch=15, lamda = 0.01
logging = runLogistic(0.1,500,15,0.01)
#print(logging)
plt.plot(logging[:,0],marker='*',label='learning rate = 0.1')
#learing rate = 0.01,batch_size = 500, epoch=15, lamda = 0.01
logging = runLogistic(0.01,500,15,0.01)
#print(logging)
plt.plot(logging[:,0],marker='h',label='learning rate = 0.01')
#learing rate = 0.001,batch_size = 500, epoch=15, lamda = 0.01
logging = runLogistic(0.001,500,15,0.01)
#print(logging)
plt.plot(logging[:,0],marker='d',label='learning rate = 0.001')
plt.legend(loc='upper right')
plt.title('Plot of Train_CrossEntropy vs. Iterations with batch_size=500')
plt.xlabel('Iterations')
plt.ylabel('Train_CrossEntropy')
plt.show()
| 28.6 | 74 | 0.712288 | import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from logistic_adam import *
logging = runLogistic(1,500,15,0.01)
plt.plot(logging[:,0],marker='+',label='learning rate = 1')
logging = runLogistic(0.1,500,15,0.01)
plt.plot(logging[:,0],marker='*',label='learning rate = 0.1')
logging = runLogistic(0.01,500,15,0.01)
plt.plot(logging[:,0],marker='h',label='learning rate = 0.01')
logging = runLogistic(0.001,500,15,0.01)
plt.plot(logging[:,0],marker='d',label='learning rate = 0.001')
plt.legend(loc='upper right')
plt.title('Plot of Train_CrossEntropy vs. Iterations with batch_size=500')
plt.xlabel('Iterations')
plt.ylabel('Train_CrossEntropy')
plt.show()
| true | true |
f728e2a3b8c71f6b3b84c50cab4c2551ffed73f2 | 5,983 | py | Python | ibmsecurity/isam/base/network/felb/ha.py | zone-zero/ibmsecurity | 7d3e38104b67e1b267e18a44845cb756a5302c3d | [
"Apache-2.0"
] | 46 | 2017-03-21T21:08:59.000Z | 2022-02-20T22:03:46.000Z | ibmsecurity/isam/base/network/felb/ha.py | zone-zero/ibmsecurity | 7d3e38104b67e1b267e18a44845cb756a5302c3d | [
"Apache-2.0"
] | 201 | 2017-03-21T21:25:52.000Z | 2022-03-30T21:38:20.000Z | ibmsecurity/isam/base/network/felb/ha.py | zone-zero/ibmsecurity | 7d3e38104b67e1b267e18a44845cb756a5302c3d | [
"Apache-2.0"
] | 91 | 2017-03-22T16:25:36.000Z | 2022-02-04T04:36:29.000Z | import ibmsecurity.utilities.tools
import logging
from ibmsecurity.utilities import tools
logger = logging.getLogger(__name__)
module_uri = "/isam/felb/configuration/ha"
requires_modules = None
requires_version = None
requires_model = "Appliance"
def get(isamAppliance, check_mode=False, force=False):
"""
Retrieving HA configuration
"""
return isamAppliance.invoke_get("Retrieving HA configuration", module_uri,
requires_version=requires_version, requires_modules=requires_modules,
requires_model=requires_model)
def disable(isamAppliance, check_mode=False, force=False):
"""
Disabling HA
"""
check_value, warnings = _check_enable(isamAppliance)
if force is True or check_value is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True, warnings=warnings)
else:
return isamAppliance.invoke_delete("Disabling HA", module_uri,
requires_version=requires_version,
requires_modules=requires_modules,
requires_model=requires_model)
else:
return isamAppliance.create_return_object(warnings=warnings)
def enable(isamAppliance, is_primary, interface, remote, port, health_check_interval,
health_check_timeout, check_mode=False, force=False):
"""
Enabling HA
"""
json_data = {
"is_primary": is_primary,
"interface": interface,
"remote": remote,
"port": port,
"health_check_interval": health_check_interval,
"health_check_timeout": health_check_timeout
}
check_value, warnings = _check_enable(isamAppliance)
if force is True or check_value is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True, warnings=warnings)
else:
return isamAppliance.invoke_post("Enabling HA", module_uri,
json_data,
requires_version=requires_version,
requires_modules=requires_modules,
requires_model=requires_model)
else:
return isamAppliance.create_return_object(warnings=warnings)
def update(isamAppliance, is_primary, interface, remote, port, health_check_interval,
health_check_timeout, check_mode=False, force=False):
"""
Updating HA configuration
"""
json_data = {
"is_primary": is_primary,
"interface": interface,
"remote": remote,
"port": port,
"health_check_interval": health_check_interval,
"health_check_timeout": health_check_timeout
}
# Call to check function to see if configuration already exist
update_required, warnings = _check_update(isamAppliance, json_data)
if force is True or update_required is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True, warnigns=warnings)
else:
return isamAppliance.invoke_put("Updating HA configuration", module_uri,
json_data,
requires_modules=requires_modules,
requires_version=requires_version,
requires_model=requires_model)
else:
return isamAppliance.create_return_object(warnings=warnings)
def set(isamAppliance, is_primary, interface, remote, port, health_check_interval,
health_check_timeout, check_mode=False, force=False):
"""
determines if add or update is used.
"""
is_enabled, warnings = _check_enable(isamAppliance)
if is_enabled is False:
return enable(isamAppliance, is_primary=is_primary, interface=interface, remote=remote, port=port,
health_check_interval=health_check_interval,
health_check_timeout=health_check_timeout, check_mode=check_mode, force=force)
elif is_enabled is True:
return update(isamAppliance, is_primary=is_primary, interface=interface, remote=remote, port=port,
health_check_interval=health_check_interval,
health_check_timeout=health_check_timeout, check_mode=check_mode, force=force)
else:
return isamAppliance.create_return_object(warnings=warnings)
def _check_update(isamAppliance, json_data):
"""
idempotency test for each parameter
"""
ret_obj = get(isamAppliance)
warnings = ret_obj['warnings']
if 'enabled' in ret_obj['data']:
if ret_obj['data']['enabled'] is False:
return False, warnings
else:
del ret_obj['data']['enabled']
sorted_ret_obj = tools.json_sort(ret_obj['data'])
sorted_json_data = tools.json_sort(json_data)
logger.debug("Sorted Existing Data:{0}".format(sorted_ret_obj))
logger.debug("Sorted Desired Data:{0}".format(sorted_json_data))
if sorted_ret_obj != sorted_json_data:
return True, warnings
else:
return False, warnings
def _check_enable(isamAppliance):
"""
Checks delete function for idempotency
"""
check_obj = get(isamAppliance)
warnings = check_obj['warnings']
if 'enabled' in check_obj['data']:
if check_obj['data']['enabled'] == True:
return True, warnings
else:
return False, warnings
else:
return None, warnings
def compare(isamAppliance1, isamAppliance2):
"""
Compare FELB HA configuration between two appliances
"""
ret_obj1 = get(isamAppliance1)
ret_obj2 = get(isamAppliance2)
return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])
| 34.583815 | 106 | 0.638476 | import ibmsecurity.utilities.tools
import logging
from ibmsecurity.utilities import tools
logger = logging.getLogger(__name__)
module_uri = "/isam/felb/configuration/ha"
requires_modules = None
requires_version = None
requires_model = "Appliance"
def get(isamAppliance, check_mode=False, force=False):
return isamAppliance.invoke_get("Retrieving HA configuration", module_uri,
requires_version=requires_version, requires_modules=requires_modules,
requires_model=requires_model)
def disable(isamAppliance, check_mode=False, force=False):
check_value, warnings = _check_enable(isamAppliance)
if force is True or check_value is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True, warnings=warnings)
else:
return isamAppliance.invoke_delete("Disabling HA", module_uri,
requires_version=requires_version,
requires_modules=requires_modules,
requires_model=requires_model)
else:
return isamAppliance.create_return_object(warnings=warnings)
def enable(isamAppliance, is_primary, interface, remote, port, health_check_interval,
health_check_timeout, check_mode=False, force=False):
json_data = {
"is_primary": is_primary,
"interface": interface,
"remote": remote,
"port": port,
"health_check_interval": health_check_interval,
"health_check_timeout": health_check_timeout
}
check_value, warnings = _check_enable(isamAppliance)
if force is True or check_value is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True, warnings=warnings)
else:
return isamAppliance.invoke_post("Enabling HA", module_uri,
json_data,
requires_version=requires_version,
requires_modules=requires_modules,
requires_model=requires_model)
else:
return isamAppliance.create_return_object(warnings=warnings)
def update(isamAppliance, is_primary, interface, remote, port, health_check_interval,
health_check_timeout, check_mode=False, force=False):
json_data = {
"is_primary": is_primary,
"interface": interface,
"remote": remote,
"port": port,
"health_check_interval": health_check_interval,
"health_check_timeout": health_check_timeout
}
update_required, warnings = _check_update(isamAppliance, json_data)
if force is True or update_required is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True, warnigns=warnings)
else:
return isamAppliance.invoke_put("Updating HA configuration", module_uri,
json_data,
requires_modules=requires_modules,
requires_version=requires_version,
requires_model=requires_model)
else:
return isamAppliance.create_return_object(warnings=warnings)
def set(isamAppliance, is_primary, interface, remote, port, health_check_interval,
health_check_timeout, check_mode=False, force=False):
is_enabled, warnings = _check_enable(isamAppliance)
if is_enabled is False:
return enable(isamAppliance, is_primary=is_primary, interface=interface, remote=remote, port=port,
health_check_interval=health_check_interval,
health_check_timeout=health_check_timeout, check_mode=check_mode, force=force)
elif is_enabled is True:
return update(isamAppliance, is_primary=is_primary, interface=interface, remote=remote, port=port,
health_check_interval=health_check_interval,
health_check_timeout=health_check_timeout, check_mode=check_mode, force=force)
else:
return isamAppliance.create_return_object(warnings=warnings)
def _check_update(isamAppliance, json_data):
ret_obj = get(isamAppliance)
warnings = ret_obj['warnings']
if 'enabled' in ret_obj['data']:
if ret_obj['data']['enabled'] is False:
return False, warnings
else:
del ret_obj['data']['enabled']
sorted_ret_obj = tools.json_sort(ret_obj['data'])
sorted_json_data = tools.json_sort(json_data)
logger.debug("Sorted Existing Data:{0}".format(sorted_ret_obj))
logger.debug("Sorted Desired Data:{0}".format(sorted_json_data))
if sorted_ret_obj != sorted_json_data:
return True, warnings
else:
return False, warnings
def _check_enable(isamAppliance):
check_obj = get(isamAppliance)
warnings = check_obj['warnings']
if 'enabled' in check_obj['data']:
if check_obj['data']['enabled'] == True:
return True, warnings
else:
return False, warnings
else:
return None, warnings
def compare(isamAppliance1, isamAppliance2):
ret_obj1 = get(isamAppliance1)
ret_obj2 = get(isamAppliance2)
return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])
| true | true |
f728e661b01796f02639fa1d1c695a188a1df203 | 1,684 | py | Python | Code/HypoBertClas/pybert/test/predicter.py | NinaTian98369/HypoGen | 14f192ecc1ef0c6fc5864f0816ef61885dc9e864 | [
"MIT"
] | 3 | 2021-09-21T19:05:30.000Z | 2022-02-17T22:14:31.000Z | Code/HypoBertClas/pybert/test/predicter.py | NinaTian98369/HypoGen | 14f192ecc1ef0c6fc5864f0816ef61885dc9e864 | [
"MIT"
] | 2 | 2021-11-30T06:56:12.000Z | 2022-03-11T06:01:59.000Z | Code/HypoBertClas/pybert/test/predicter.py | NinaTian98369/HypoGen | 14f192ecc1ef0c6fc5864f0816ef61885dc9e864 | [
"MIT"
] | 1 | 2022-02-17T22:08:13.000Z | 2022-02-17T22:08:13.000Z | #encoding:utf-8
import torch
import numpy as np
from ..utils.utils import model_device,load_bert
class Predicter(object):
def __init__(self,
model,
logger,
n_gpu,
model_path
):
self.model = model
self.logger = logger
self.width = 30
self.model, self.device = model_device(n_gpu= n_gpu, model=self.model, logger=self.logger)
loads = load_bert(model_path=model_path,model = self.model)
self.model = loads[0]
def show_info(self,batch_id,n_batch):
recv_per = int(100 * (batch_id + 1) / n_batch)
if recv_per >= 100:
recv_per = 100
# show bar
show_bar = f"\r[predict]{batch_id+1}/{n_batch}[{int(self.width * recv_per / 100) * '>':<{self.width}s}]{recv_per}%"
print(show_bar,end='')
def predict(self,data):
all_logits = None
self.model.eval()
n_batch = len(data)
with torch.no_grad():
for step, (input_ids, input_mask, segment_ids, label_ids) in enumerate(data):
input_ids = input_ids.to(self.device)
input_mask = input_mask.to(self.device)
segment_ids = segment_ids.to(self.device)
logits = self.model(input_ids, segment_ids, input_mask)
logits = logits.sigmoid()
self.show_info(step,n_batch)
if all_logits is None:
all_logits = logits.detach().cpu().numpy()
else:
all_logits = np.concatenate([all_logits,logits.detach().cpu().numpy()],axis = 0)
return all_logits
| 33.019608 | 123 | 0.559976 |
import torch
import numpy as np
from ..utils.utils import model_device,load_bert
class Predicter(object):
def __init__(self,
model,
logger,
n_gpu,
model_path
):
self.model = model
self.logger = logger
self.width = 30
self.model, self.device = model_device(n_gpu= n_gpu, model=self.model, logger=self.logger)
loads = load_bert(model_path=model_path,model = self.model)
self.model = loads[0]
def show_info(self,batch_id,n_batch):
recv_per = int(100 * (batch_id + 1) / n_batch)
if recv_per >= 100:
recv_per = 100
show_bar = f"\r[predict]{batch_id+1}/{n_batch}[{int(self.width * recv_per / 100) * '>':<{self.width}s}]{recv_per}%"
print(show_bar,end='')
def predict(self,data):
all_logits = None
self.model.eval()
n_batch = len(data)
with torch.no_grad():
for step, (input_ids, input_mask, segment_ids, label_ids) in enumerate(data):
input_ids = input_ids.to(self.device)
input_mask = input_mask.to(self.device)
segment_ids = segment_ids.to(self.device)
logits = self.model(input_ids, segment_ids, input_mask)
logits = logits.sigmoid()
self.show_info(step,n_batch)
if all_logits is None:
all_logits = logits.detach().cpu().numpy()
else:
all_logits = np.concatenate([all_logits,logits.detach().cpu().numpy()],axis = 0)
return all_logits
| true | true |
f728e80f740d63b4f91fffd0267a79fc4be81c99 | 304 | py | Python | pyro/nn/__init__.py | HeyangGong/Pyro4CI | 28778477ce8ff3bc12b3ac8face2208e5b05ff7c | [
"MIT"
] | 2 | 2020-04-11T04:30:55.000Z | 2021-07-29T18:45:08.000Z | pyro/nn/__init__.py | HeyangGong/Pyro4CI | 28778477ce8ff3bc12b3ac8face2208e5b05ff7c | [
"MIT"
] | null | null | null | pyro/nn/__init__.py | HeyangGong/Pyro4CI | 28778477ce8ff3bc12b3ac8face2208e5b05ff7c | [
"MIT"
] | null | null | null | from __future__ import absolute_import, division, print_function
from pyro.nn.auto_reg_nn import AutoRegressiveNN, ConditionalAutoRegressiveNN, MaskedLinear
from pyro.nn.dense_nn import DenseNN
__all__ = [
"AutoRegressiveNN",
"ConditionalAutoRegressiveNN",
"DenseNN",
"MaskedLinear",
]
| 25.333333 | 91 | 0.786184 | from __future__ import absolute_import, division, print_function
from pyro.nn.auto_reg_nn import AutoRegressiveNN, ConditionalAutoRegressiveNN, MaskedLinear
from pyro.nn.dense_nn import DenseNN
__all__ = [
"AutoRegressiveNN",
"ConditionalAutoRegressiveNN",
"DenseNN",
"MaskedLinear",
]
| true | true |
f728e89cc7e4fbbc18c7a515a87b73a0c5779a76 | 13,317 | py | Python | vtool/inspect_matches.py | WildMeOrg/wbia-vtool | a9c7e2649c625b26eb074766c1d9018f432a33c6 | [
"Apache-2.0"
] | 1 | 2017-05-13T14:06:15.000Z | 2017-05-13T14:06:15.000Z | vtool/inspect_matches.py | WildbookOrg/vtool | a9c7e2649c625b26eb074766c1d9018f432a33c6 | [
"Apache-2.0"
] | 4 | 2020-06-23T19:02:46.000Z | 2020-10-13T00:48:32.000Z | vtool/inspect_matches.py | WildbookOrg/vtool | a9c7e2649c625b26eb074766c1d9018f432a33c6 | [
"Apache-2.0"
] | 1 | 2020-06-29T03:42:39.000Z | 2020-06-29T03:42:39.000Z | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import utool as ut
import ubelt as ub
try:
import wbia.guitool as gt
from wbia.guitool import mpl_widget
INSPECT_BASE = gt.GuitoolWidget
MatplotlibWidget = mpl_widget.MatplotlibWidget
except (ImportError, TypeError):
import warnings
warnings.warn('WARNING: guitool not available')
MatplotlibWidget = object
INSPECT_BASE = object
def lazy_test_annot(key):
import numpy as np
rchip_fpath = ut.grab_test_imgpath(key)
annot = ut.LazyDict(
{
'aid': key.split('.')[0],
'nid': key[0:4],
'rchip_fpath': rchip_fpath,
'gps': (np.nan, np.nan),
'yaw': np.nan,
'view': np.nan,
'qual': np.nan,
'time': np.nan,
}
)
return annot
try:
import wbia.dtool
MatchDisplayConfig = wbia.dtool.from_param_info_list(
[
ut.ParamInfo('overlay', True),
ut.ParamInfo('show_all_kpts', False),
ut.ParamInfo('mask_blend', 0.0, min_=0, max_=1),
ut.ParamInfo('heatmask', True, hideif=':not overlay'),
ut.ParamInfo('show_homog', False, hideif=':not overlay'),
ut.ParamInfo('show_ori', False, hideif=':not overlay'),
ut.ParamInfo('show_ell', False, hideif=':not overlay'),
ut.ParamInfo('show_pts', False, hideif=':not overlay'),
ut.ParamInfo('show_lines', False, hideif=lambda cfg: not cfg['overlay']),
ut.ParamInfo('show_rect', False, hideif=':not overlay'),
ut.ParamInfo('show_eig', False, hideif=':not overlay'),
ut.ParamInfo('ell_alpha', 0.6, min_=0, max_=1, hideif=':not overlay'),
ut.ParamInfo('line_alpha', 0.35, min_=0, max_=1, hideif=':not overlay'),
]
)
except (ImportError, TypeError):
pass
class MatchInspector(INSPECT_BASE):
"""
A widget that contains
(1) a viewport that displays an annotation pair with matches overlayed.
(2) a control panel for tuning matching parameters
(3) a text area displaying information about the match vector
CommandLine:
python -m vtool.inspect_matches MatchInspector:0 --show
python -m vtool.inspect_matches MatchInspector:1 --show
python -m vtool.inspect_matches MatchInspector:1 --db GZ_Master1 --aids=1041,1045 --show
Example:
>>> # SCRIPT
>>> from vtool.inspect_matches import * # NOQA
>>> import vtool as vt
>>> gt.ensure_qapp()
>>> ut.qtensure()
>>> annot1 = lazy_test_annot('easy1.png')
>>> annot2 = lazy_test_annot('easy2.png')
>>> match = vt.PairwiseMatch(annot1, annot2)
>>> self = MatchInspector(match=match)
>>> self.show()
>>> # xdoctest: +REQUIRES(--show)
>>> #self.update()
>>> gt.qtapp_loop(qwin=self, freq=10)
Example:
>>> # SCRIPT
>>> from vtool.inspect_matches import * # NOQA
>>> import vtool as vt
>>> import wbia
>>> gt.ensure_qapp()
>>> ut.qtensure()
>>> ibs = wbia.opendb(defaultdb='PZ_MTEST')
>>> aids = ub.argval('--aids', default=[1, 2])
>>> print('aids = %r' % (aids,))
>>> annots = ibs.annots(aids)
>>> annot1 = annots[0]._make_lazy_dict()
>>> annot2 = annots[1]._make_lazy_dict()
>>> cfgdict = MatchDisplayConfig().asdict()
>>> cfgdict = ut.argparse_dict(cfgdict)
>>> match = vt.PairwiseMatch(annot1, annot2)
>>> self = MatchInspector(match=match, cfgdict=cfgdict)
>>> self.show()
>>> # xdoctest: +REQUIRES(--show)
>>> #self.update()
>>> gt.qtapp_loop(qwin=self, freq=10)
"""
def showEvent(self, event):
super(MatchInspector, self).showEvent(event)
# Fire initialize event after we show the GUI
# QtCore.QTimer.singleShot(50, self.init_inference)
self.first_show()
def first_show(self, state=None):
if self.match is not None:
# Show the match if updating is on, otherwise just draw the annot
# pair
if self.autoupdate_cb.checkState():
self.update()
else:
self.draw_pair()
def set_match(self, match=None, on_context=None, info_text=None):
self.match = match
self.info_text = info_text
self.on_context = on_context
if self.isVisible():
self.first_show()
def initialize(
self, match=None, on_context=None, autoupdate=True, info_text=None, cfgdict=None
):
from wbia.plottool import abstract_interaction
from wbia.guitool.__PYQT__ import QtCore
self.set_match(match, on_context, info_text)
self._setup_configs(cfgdict=cfgdict)
self._setup_layout(autoupdate=autoupdate)
abstract_interaction.register_interaction(self)
self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.execContextMenu)
def execContextMenu(self, qpoint):
if self.on_context:
options = self.on_context()
else:
options = [('No context set', None)]
gt.popup_menu(self, qpoint, options)
def screenshot(self):
import wbia.plottool as pt
with pt.RenderingContext() as render:
self.match.show(**self.disp_config)
fpaths = gt.newFileDialog('.', mode='save', exec_=True)
if fpaths is not None and len(fpaths) > 0:
fpath = fpaths[0]
if not fpath.endswith('.jpg'):
fpath += '.jpg'
import vtool as vt
vt.imwrite(fpath, render.image)
def embed(self):
match = self.match # NOQA
import utool
utool.embed()
def _new_config_widget(self, cfg, changed=None):
from wbia.guitool import PrefWidget2
user_mode = 0
cfg_widget = PrefWidget2.EditConfigWidget(
config=cfg, user_mode=user_mode, parent=self, changed=changed
)
return cfg_widget
def closeEvent(self, event):
from wbia.plottool import abstract_interaction
abstract_interaction.unregister_interaction(self)
super(MatchInspector, self).closeEvent(event)
def _setup_configs(self, cfgdict=None):
from vtool import matching
import wbia.dtool
# import pyhesaff
# default_dict = pyhesaff.get_hesaff_default_params()
# default_dict = vt.get_extract_features_default_params()
TmpFeatConfig = wbia.dtool.from_param_info_list(matching.VSONE_FEAT_CONFIG)
TmpNChipConfig = wbia.dtool.from_param_info_list(matching.NORM_CHIP_CONFIG)
# [
# ut.ParamInfo(key, val) for key, val in default_dict.items()
# # ut.ParamInfo('affine_invariance', True),
# # ut.ParamInfo('rotation_invariance', False),
# ])
self.featconfig = TmpFeatConfig()
self.chipconfig = TmpNChipConfig()
TmpVsOneConfig = wbia.dtool.from_param_info_list(matching.VSONE_DEFAULT_CONFIG)
self.config = TmpVsOneConfig()
self.disp_config = MatchDisplayConfig()
if cfgdict is not None:
print('[inspect_match] default cfgdict = %r' % (cfgdict,))
self.config.update(**cfgdict)
self.featconfig.update(**cfgdict)
self.chipconfig.update(**cfgdict)
self.disp_config.update(**cfgdict)
# Make config widgets after setting defaults
self.chipconfig_widget = self._new_config_widget(
self.chipconfig, changed=self.on_chip_cfg_changed
)
self.featconfig_widget = self._new_config_widget(
self.featconfig, changed=self.on_feat_cfg_changed
)
self.config_widget = self._new_config_widget(
self.config, changed=self.on_cfg_changed
)
self.disp_config_widget = self._new_config_widget(
self.disp_config, changed=self.on_cfg_changed
)
def _setup_layout(self, autoupdate=True):
from wbia.guitool.__PYQT__ import QtWidgets
self.menubar = gt.newMenubar(self)
self.menuFile = self.menubar.newMenu('Dev')
self.menuFile.newAction(triggered=self.embed)
self.menuFile.newAction(triggered=self.screenshot)
splitter1 = self.addNewSplitter(orientation='horiz')
config_vframe = splitter1.newWidget()
splitter2 = splitter1.addNewSplitter(orientation='vert')
config_vframe.addWidget(QtWidgets.QLabel('Chip Config'))
config_vframe.addWidget(self.chipconfig_widget)
config_vframe.addWidget(QtWidgets.QLabel('Feat Config'))
config_vframe.addWidget(self.featconfig_widget)
config_vframe.addWidget(QtWidgets.QLabel('Query Config'))
config_vframe.addWidget(self.config_widget)
config_vframe.addWidget(QtWidgets.QLabel('Display Config'))
config_vframe.addWidget(self.disp_config_widget)
# update_hframe = config_vframe.addNewWidget(orientation='horiz')
# update_hframe.addNewButton('Update', pressed=self.update)
self.autoupdate_cb = config_vframe.addNewCheckBox(
'auto-update', checked=autoupdate, changed=self.first_show
)
self.mpl_widget = MatplotlibWidget(parent=self)
splitter2.addWidget(self.mpl_widget)
self.infobox = splitter2.addNewTextEdit()
def execute_vsone(self):
from vtool import matching
print('[inspect_match] Execute vsone')
cfgdict = {}
cfgdict.update(self.featconfig.asdict())
cfgdict.update(self.chipconfig.asdict())
match = self.match
match.verbose = True
match._inplace_default = True
matching.ensure_metadata_vsone(match.annot1, match.annot2, cfgdict=cfgdict)
match_config = self.config.asdict()
match.apply_all(match_config)
def draw_pair(self):
if self.match is None:
return
self.mpl_widget.clf()
ax = self.mpl_widget.ax
info_html = ''
if self.info_text is not None:
info_html = '<pre>' + self.info_text + '</pre>'
self.infobox.setText(info_html)
self.match.show(ax=ax, overlay=False)
self.mpl_widget.fig.canvas.draw()
def draw_vsone(self):
match = self.match
summary = match._make_local_summary_feature_vector(summary_ops={'sum'})
info_html = ''
if self.info_text is not None:
info_html = '<pre>' + self.info_text + '</pre>'
feat_html = '<pre>' + ut.align(ub.repr2(summary), ':') + '</pre>'
self.infobox.setText(info_html + feat_html)
self.mpl_widget.clf()
ax = self.mpl_widget.ax
match.show(ax=ax, **self.disp_config)
# fig.show()
self.mpl_widget.fig.canvas.draw()
def update(self, state=None):
if self.autoupdate_cb.checkState() and self.match is not None:
self.execute_vsone()
self.draw_vsone()
def on_cfg_changed(self, *args):
self.update()
self.cfg_needs_update = True
def on_chip_cfg_changed(self, *args):
print('Update feats')
feat_keys = ['nchip', 'vecs', 'kpts', '_feats', 'flann']
self.match.annot1._mutable = True
self.match.annot2._mutable = True
for key in feat_keys:
if key in self.match.annot1:
del self.match.annot1[key]
if key in self.match.annot2:
del self.match.annot2[key]
self.update()
self.cfg_needs_update = True
def on_feat_cfg_changed(self, *args):
print('Update feats')
feat_keys = ['vecs', 'kpts', '_feats', 'flann']
self.match.annot1._mutable = True
self.match.annot2._mutable = True
for key in feat_keys:
if key in self.match.annot1:
del self.match.annot1[key]
if key in self.match.annot2:
del self.match.annot2[key]
self.update()
self.cfg_needs_update = True
def make_match_interaction(matches, metadata, type_='RAT+SV', **kwargs):
import wbia.plottool.interact_matches
# import wbia.plottool as pt
fm, fs = matches[type_][0:2]
try:
H1 = metadata['H_' + type_.split('+')[0]]
except Exception:
H1 = None
# fm, fs = matches['RAT'][0:2]
annot1 = metadata['annot1']
annot2 = metadata['annot2']
rchip1, kpts1, vecs1 = ub.dict_take(annot1, ['nchip', 'kpts', 'vecs'])
rchip2, kpts2, vecs2 = ub.dict_take(annot2, ['nchip', 'kpts', 'vecs'])
# pt.show_chipmatch2(rchip1, rchip2, kpts1, kpts2, fm=fm, fs=fs)
fsv = fs[:, None]
interact = wbia.plottool.interact_matches.MatchInteraction2(
rchip1, rchip2, kpts1, kpts2, fm, fs, fsv, vecs1, vecs2, H1=H1, **kwargs
)
return interact
def show_matching_dict(matches, metadata, *args, **kwargs):
interact = make_match_interaction(matches, metadata, *args, **kwargs)
interact.show_page()
return interact
if __name__ == '__main__':
"""
CommandLine:
xdoctest -m vtool.inspect_matches
"""
import xdoctest
xdoctest.doctest_module(__file__)
| 34.952756 | 96 | 0.619133 |
from __future__ import absolute_import, division, print_function
import utool as ut
import ubelt as ub
try:
import wbia.guitool as gt
from wbia.guitool import mpl_widget
INSPECT_BASE = gt.GuitoolWidget
MatplotlibWidget = mpl_widget.MatplotlibWidget
except (ImportError, TypeError):
import warnings
warnings.warn('WARNING: guitool not available')
MatplotlibWidget = object
INSPECT_BASE = object
def lazy_test_annot(key):
import numpy as np
rchip_fpath = ut.grab_test_imgpath(key)
annot = ut.LazyDict(
{
'aid': key.split('.')[0],
'nid': key[0:4],
'rchip_fpath': rchip_fpath,
'gps': (np.nan, np.nan),
'yaw': np.nan,
'view': np.nan,
'qual': np.nan,
'time': np.nan,
}
)
return annot
try:
import wbia.dtool
MatchDisplayConfig = wbia.dtool.from_param_info_list(
[
ut.ParamInfo('overlay', True),
ut.ParamInfo('show_all_kpts', False),
ut.ParamInfo('mask_blend', 0.0, min_=0, max_=1),
ut.ParamInfo('heatmask', True, hideif=':not overlay'),
ut.ParamInfo('show_homog', False, hideif=':not overlay'),
ut.ParamInfo('show_ori', False, hideif=':not overlay'),
ut.ParamInfo('show_ell', False, hideif=':not overlay'),
ut.ParamInfo('show_pts', False, hideif=':not overlay'),
ut.ParamInfo('show_lines', False, hideif=lambda cfg: not cfg['overlay']),
ut.ParamInfo('show_rect', False, hideif=':not overlay'),
ut.ParamInfo('show_eig', False, hideif=':not overlay'),
ut.ParamInfo('ell_alpha', 0.6, min_=0, max_=1, hideif=':not overlay'),
ut.ParamInfo('line_alpha', 0.35, min_=0, max_=1, hideif=':not overlay'),
]
)
except (ImportError, TypeError):
pass
class MatchInspector(INSPECT_BASE):
def showEvent(self, event):
super(MatchInspector, self).showEvent(event)
self.first_show()
def first_show(self, state=None):
if self.match is not None:
if self.autoupdate_cb.checkState():
self.update()
else:
self.draw_pair()
def set_match(self, match=None, on_context=None, info_text=None):
self.match = match
self.info_text = info_text
self.on_context = on_context
if self.isVisible():
self.first_show()
def initialize(
self, match=None, on_context=None, autoupdate=True, info_text=None, cfgdict=None
):
from wbia.plottool import abstract_interaction
from wbia.guitool.__PYQT__ import QtCore
self.set_match(match, on_context, info_text)
self._setup_configs(cfgdict=cfgdict)
self._setup_layout(autoupdate=autoupdate)
abstract_interaction.register_interaction(self)
self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.execContextMenu)
def execContextMenu(self, qpoint):
if self.on_context:
options = self.on_context()
else:
options = [('No context set', None)]
gt.popup_menu(self, qpoint, options)
def screenshot(self):
import wbia.plottool as pt
with pt.RenderingContext() as render:
self.match.show(**self.disp_config)
fpaths = gt.newFileDialog('.', mode='save', exec_=True)
if fpaths is not None and len(fpaths) > 0:
fpath = fpaths[0]
if not fpath.endswith('.jpg'):
fpath += '.jpg'
import vtool as vt
vt.imwrite(fpath, render.image)
def embed(self):
match = self.match
import utool
utool.embed()
def _new_config_widget(self, cfg, changed=None):
from wbia.guitool import PrefWidget2
user_mode = 0
cfg_widget = PrefWidget2.EditConfigWidget(
config=cfg, user_mode=user_mode, parent=self, changed=changed
)
return cfg_widget
def closeEvent(self, event):
from wbia.plottool import abstract_interaction
abstract_interaction.unregister_interaction(self)
super(MatchInspector, self).closeEvent(event)
def _setup_configs(self, cfgdict=None):
from vtool import matching
import wbia.dtool
TmpFeatConfig = wbia.dtool.from_param_info_list(matching.VSONE_FEAT_CONFIG)
TmpNChipConfig = wbia.dtool.from_param_info_list(matching.NORM_CHIP_CONFIG)
TmpNChipConfig()
TmpVsOneConfig = wbia.dtool.from_param_info_list(matching.VSONE_DEFAULT_CONFIG)
self.config = TmpVsOneConfig()
self.disp_config = MatchDisplayConfig()
if cfgdict is not None:
print('[inspect_match] default cfgdict = %r' % (cfgdict,))
self.config.update(**cfgdict)
self.featconfig.update(**cfgdict)
self.chipconfig.update(**cfgdict)
self.disp_config.update(**cfgdict)
self.chipconfig_widget = self._new_config_widget(
self.chipconfig, changed=self.on_chip_cfg_changed
)
self.featconfig_widget = self._new_config_widget(
self.featconfig, changed=self.on_feat_cfg_changed
)
self.config_widget = self._new_config_widget(
self.config, changed=self.on_cfg_changed
)
self.disp_config_widget = self._new_config_widget(
self.disp_config, changed=self.on_cfg_changed
)
def _setup_layout(self, autoupdate=True):
from wbia.guitool.__PYQT__ import QtWidgets
self.menubar = gt.newMenubar(self)
self.menuFile = self.menubar.newMenu('Dev')
self.menuFile.newAction(triggered=self.embed)
self.menuFile.newAction(triggered=self.screenshot)
splitter1 = self.addNewSplitter(orientation='horiz')
config_vframe = splitter1.newWidget()
splitter2 = splitter1.addNewSplitter(orientation='vert')
config_vframe.addWidget(QtWidgets.QLabel('Chip Config'))
config_vframe.addWidget(self.chipconfig_widget)
config_vframe.addWidget(QtWidgets.QLabel('Feat Config'))
config_vframe.addWidget(self.featconfig_widget)
config_vframe.addWidget(QtWidgets.QLabel('Query Config'))
config_vframe.addWidget(self.config_widget)
config_vframe.addWidget(QtWidgets.QLabel('Display Config'))
config_vframe.addWidget(self.disp_config_widget)
self.autoupdate_cb = config_vframe.addNewCheckBox(
'auto-update', checked=autoupdate, changed=self.first_show
)
self.mpl_widget = MatplotlibWidget(parent=self)
splitter2.addWidget(self.mpl_widget)
self.infobox = splitter2.addNewTextEdit()
def execute_vsone(self):
from vtool import matching
print('[inspect_match] Execute vsone')
cfgdict = {}
cfgdict.update(self.featconfig.asdict())
cfgdict.update(self.chipconfig.asdict())
match = self.match
match.verbose = True
match._inplace_default = True
matching.ensure_metadata_vsone(match.annot1, match.annot2, cfgdict=cfgdict)
match_config = self.config.asdict()
match.apply_all(match_config)
def draw_pair(self):
if self.match is None:
return
self.mpl_widget.clf()
ax = self.mpl_widget.ax
info_html = ''
if self.info_text is not None:
info_html = '<pre>' + self.info_text + '</pre>'
self.infobox.setText(info_html)
self.match.show(ax=ax, overlay=False)
self.mpl_widget.fig.canvas.draw()
def draw_vsone(self):
match = self.match
summary = match._make_local_summary_feature_vector(summary_ops={'sum'})
info_html = ''
if self.info_text is not None:
info_html = '<pre>' + self.info_text + '</pre>'
feat_html = '<pre>' + ut.align(ub.repr2(summary), ':') + '</pre>'
self.infobox.setText(info_html + feat_html)
self.mpl_widget.clf()
ax = self.mpl_widget.ax
match.show(ax=ax, **self.disp_config)
self.mpl_widget.fig.canvas.draw()
def update(self, state=None):
if self.autoupdate_cb.checkState() and self.match is not None:
self.execute_vsone()
self.draw_vsone()
def on_cfg_changed(self, *args):
self.update()
self.cfg_needs_update = True
def on_chip_cfg_changed(self, *args):
print('Update feats')
feat_keys = ['nchip', 'vecs', 'kpts', '_feats', 'flann']
self.match.annot1._mutable = True
self.match.annot2._mutable = True
for key in feat_keys:
if key in self.match.annot1:
del self.match.annot1[key]
if key in self.match.annot2:
del self.match.annot2[key]
self.update()
self.cfg_needs_update = True
def on_feat_cfg_changed(self, *args):
print('Update feats')
feat_keys = ['vecs', 'kpts', '_feats', 'flann']
self.match.annot1._mutable = True
self.match.annot2._mutable = True
for key in feat_keys:
if key in self.match.annot1:
del self.match.annot1[key]
if key in self.match.annot2:
del self.match.annot2[key]
self.update()
self.cfg_needs_update = True
def make_match_interaction(matches, metadata, type_='RAT+SV', **kwargs):
import wbia.plottool.interact_matches
fm, fs = matches[type_][0:2]
try:
H1 = metadata['H_' + type_.split('+')[0]]
except Exception:
H1 = None
annot1 = metadata['annot1']
annot2 = metadata['annot2']
rchip1, kpts1, vecs1 = ub.dict_take(annot1, ['nchip', 'kpts', 'vecs'])
rchip2, kpts2, vecs2 = ub.dict_take(annot2, ['nchip', 'kpts', 'vecs'])
fsv = fs[:, None]
interact = wbia.plottool.interact_matches.MatchInteraction2(
rchip1, rchip2, kpts1, kpts2, fm, fs, fsv, vecs1, vecs2, H1=H1, **kwargs
)
return interact
def show_matching_dict(matches, metadata, *args, **kwargs):
interact = make_match_interaction(matches, metadata, *args, **kwargs)
interact.show_page()
return interact
if __name__ == '__main__':
import xdoctest
xdoctest.doctest_module(__file__)
| true | true |
f728e8ba0cf5f206c1d37e33dba1a29723c65385 | 5,237 | py | Python | src/smart_compose/train/data_fn.py | StarWang/detext | 66f071ec2cebf5e54e7d1de40936b5f281c2a69b | [
"BSD-2-Clause"
] | 1,229 | 2020-02-07T22:17:47.000Z | 2022-03-26T06:56:52.000Z | src/smart_compose/train/data_fn.py | StarWang/detext | 66f071ec2cebf5e54e7d1de40936b5f281c2a69b | [
"BSD-2-Clause"
] | 36 | 2020-04-10T02:00:20.000Z | 2022-02-09T06:52:35.000Z | src/smart_compose/train/data_fn.py | StarWang/detext | 66f071ec2cebf5e54e7d1de40936b5f281c2a69b | [
"BSD-2-Clause"
] | 142 | 2020-04-07T01:04:07.000Z | 2022-03-31T16:22:20.000Z | import tensorflow as tf
from functools import partial
from smart_compose.utils.parsing_utils import get_input_files, InputFtrType, iterate_items_with_list_val
def _read_specified_features(inputs, feature_type2name):
"""Only reads in features specified in the DeText arguments"""
required_inputs = {}
for _, ftr_name_list in iterate_items_with_list_val(feature_type2name):
for ftr_name in ftr_name_list:
required_inputs[ftr_name] = inputs[ftr_name]
return required_inputs
_FTR_TYPE_TO_SCHEMA = {
InputFtrType.TARGET_COLUMN_NAME: tf.io.FixedLenFeature(shape=[], dtype=tf.string)
}
def _get_tfrecord_feature_parsing_schema(feature_type_2_name: dict):
"""Returns parsing schema for input TFRecord
:param feature_type_2_name: Features mapping from feature types to feature names
"""
ftr_name_2_schema = dict()
for ftr_type, ftr_name_lst in iterate_items_with_list_val(feature_type_2_name):
for ftr_name in ftr_name_lst:
ftr_name_2_schema[ftr_name] = _FTR_TYPE_TO_SCHEMA[ftr_type]
return ftr_name_2_schema
def _cast_features_to_smaller_dtype(example, feature_type_2_names: dict):
"""Casts tensor to smaller storage dtype. int64 -> int32, float64 -> float32"""
def _cast_to_dtype_of_smaller_size(t):
if t.dtype == tf.int64:
return tf.cast(t, dtype=tf.int32)
elif t.dtype == tf.float64:
return tf.cast(t, dtype=tf.float32)
else:
return t
for ftr_type, ftr_name_lst in iterate_items_with_list_val(feature_type_2_names):
for ftr_name in ftr_name_lst:
example[ftr_name] = _cast_to_dtype_of_smaller_size(example[ftr_name])
return example
_FTR_TYPE_TO_DENSE_DEFAULT_VAL = {
InputFtrType.TARGET_COLUMN_NAME: '',
}
def input_fn_tfrecord(input_pattern,
batch_size,
mode,
feature_type_2_name: dict,
block_length=100,
prefetch_size=tf.data.experimental.AUTOTUNE,
num_parallel_calls=tf.data.experimental.AUTOTUNE,
input_pipeline_context=None):
"""
Data input function for training given TFRecord
"""
output_buffer_size = 1000
input_files = get_input_files(input_pattern)
feature_type_2_name = feature_type_2_name.copy()
if len(input_files) > 1: # Multiple input files
# Preprocess files concurrently, and interleave blocks of block_length records from each file
dataset = tf.data.Dataset.from_tensor_slices(tf.constant(input_files))
# Shard input when using distributed training strategy
if mode == tf.estimator.ModeKeys.TRAIN and input_pipeline_context and input_pipeline_context.num_input_pipelines > 1:
dataset = dataset.shard(input_pipeline_context.num_input_pipelines,
input_pipeline_context.input_pipeline_id)
dataset = dataset.shuffle(buffer_size=len(input_files))
dataset = dataset.interleave(tf.data.TFRecordDataset, block_length=block_length,
num_parallel_calls=num_parallel_calls)
else:
dataset = tf.data.TFRecordDataset(input_files[0])
# Parse and preprocess data
dataset = tfrecord_transform_fn(dataset,
batch_size,
mode,
feature_type_2_name,
output_buffer_size,
prefetch_size)
return dataset
def _split_features_and_labels(example, feature_type_2_name: dict):
"""Split inputs into two parts: features and label"""
target_ftr_name = feature_type_2_name[InputFtrType.TARGET_COLUMN_NAME]
labels = {
target_ftr_name: example.pop(target_ftr_name)
}
return example, labels
def tfrecord_transform_fn(dataset,
batch_size,
mode,
feature_type_2_name,
output_buffer_size,
prefetch_size=tf.data.experimental.AUTOTUNE,
num_parallel_calls=tf.data.experimental.AUTOTUNE):
""" Preprocesses datasets including
1. dataset shuffling
2. record parsing
3. padding and batching
"""
if mode == tf.estimator.ModeKeys.TRAIN:
dataset = dataset.shuffle(output_buffer_size)
dataset = dataset.repeat()
def _process_data(record, features_schema):
example = tf.io.parse_single_example(serialized=record, features=features_schema)
example = _cast_features_to_smaller_dtype(example, feature_type_2_name)
features, labels = _split_features_and_labels(example, feature_type_2_name)
return features, labels
features_schema = _get_tfrecord_feature_parsing_schema(feature_type_2_name)
dataset = dataset.map(partial(_process_data, features_schema=features_schema),
num_parallel_calls=num_parallel_calls)
dataset = (dataset
.batch(batch_size, drop_remainder=True)
.prefetch(prefetch_size))
return dataset
| 38.507353 | 125 | 0.665266 | import tensorflow as tf
from functools import partial
from smart_compose.utils.parsing_utils import get_input_files, InputFtrType, iterate_items_with_list_val
def _read_specified_features(inputs, feature_type2name):
required_inputs = {}
for _, ftr_name_list in iterate_items_with_list_val(feature_type2name):
for ftr_name in ftr_name_list:
required_inputs[ftr_name] = inputs[ftr_name]
return required_inputs
_FTR_TYPE_TO_SCHEMA = {
InputFtrType.TARGET_COLUMN_NAME: tf.io.FixedLenFeature(shape=[], dtype=tf.string)
}
def _get_tfrecord_feature_parsing_schema(feature_type_2_name: dict):
ftr_name_2_schema = dict()
for ftr_type, ftr_name_lst in iterate_items_with_list_val(feature_type_2_name):
for ftr_name in ftr_name_lst:
ftr_name_2_schema[ftr_name] = _FTR_TYPE_TO_SCHEMA[ftr_type]
return ftr_name_2_schema
def _cast_features_to_smaller_dtype(example, feature_type_2_names: dict):
def _cast_to_dtype_of_smaller_size(t):
if t.dtype == tf.int64:
return tf.cast(t, dtype=tf.int32)
elif t.dtype == tf.float64:
return tf.cast(t, dtype=tf.float32)
else:
return t
for ftr_type, ftr_name_lst in iterate_items_with_list_val(feature_type_2_names):
for ftr_name in ftr_name_lst:
example[ftr_name] = _cast_to_dtype_of_smaller_size(example[ftr_name])
return example
_FTR_TYPE_TO_DENSE_DEFAULT_VAL = {
InputFtrType.TARGET_COLUMN_NAME: '',
}
def input_fn_tfrecord(input_pattern,
batch_size,
mode,
feature_type_2_name: dict,
block_length=100,
prefetch_size=tf.data.experimental.AUTOTUNE,
num_parallel_calls=tf.data.experimental.AUTOTUNE,
input_pipeline_context=None):
output_buffer_size = 1000
input_files = get_input_files(input_pattern)
feature_type_2_name = feature_type_2_name.copy()
if len(input_files) > 1:
dataset = tf.data.Dataset.from_tensor_slices(tf.constant(input_files))
if mode == tf.estimator.ModeKeys.TRAIN and input_pipeline_context and input_pipeline_context.num_input_pipelines > 1:
dataset = dataset.shard(input_pipeline_context.num_input_pipelines,
input_pipeline_context.input_pipeline_id)
dataset = dataset.shuffle(buffer_size=len(input_files))
dataset = dataset.interleave(tf.data.TFRecordDataset, block_length=block_length,
num_parallel_calls=num_parallel_calls)
else:
dataset = tf.data.TFRecordDataset(input_files[0])
dataset = tfrecord_transform_fn(dataset,
batch_size,
mode,
feature_type_2_name,
output_buffer_size,
prefetch_size)
return dataset
def _split_features_and_labels(example, feature_type_2_name: dict):
target_ftr_name = feature_type_2_name[InputFtrType.TARGET_COLUMN_NAME]
labels = {
target_ftr_name: example.pop(target_ftr_name)
}
return example, labels
def tfrecord_transform_fn(dataset,
batch_size,
mode,
feature_type_2_name,
output_buffer_size,
prefetch_size=tf.data.experimental.AUTOTUNE,
num_parallel_calls=tf.data.experimental.AUTOTUNE):
if mode == tf.estimator.ModeKeys.TRAIN:
dataset = dataset.shuffle(output_buffer_size)
dataset = dataset.repeat()
def _process_data(record, features_schema):
example = tf.io.parse_single_example(serialized=record, features=features_schema)
example = _cast_features_to_smaller_dtype(example, feature_type_2_name)
features, labels = _split_features_and_labels(example, feature_type_2_name)
return features, labels
features_schema = _get_tfrecord_feature_parsing_schema(feature_type_2_name)
dataset = dataset.map(partial(_process_data, features_schema=features_schema),
num_parallel_calls=num_parallel_calls)
dataset = (dataset
.batch(batch_size, drop_remainder=True)
.prefetch(prefetch_size))
return dataset
| true | true |
f728e961381abd978e448b0e9421310fa127c4d7 | 519 | py | Python | app/users/signals.py | Nilsen11/django-training-CBV | b470fd1ea11df5d360389cbd24cac18c9383b8bd | [
"MIT"
] | null | null | null | app/users/signals.py | Nilsen11/django-training-CBV | b470fd1ea11df5d360389cbd24cac18c9383b8bd | [
"MIT"
] | null | null | null | app/users/signals.py | Nilsen11/django-training-CBV | b470fd1ea11df5d360389cbd24cac18c9383b8bd | [
"MIT"
] | null | null | null | from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from .models import Profile
# @receiver(post_save, sender=User)
def create_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
# @receiver(post_save, sender=User)
def save_profile(sender, instance, **kwargs):
instance.profile.save()
post_save.connect(create_profile, sender=User)
post_save.connect(save_profile, sender=User)
| 25.95 | 56 | 0.776493 | from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from .models import Profile
def create_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
def save_profile(sender, instance, **kwargs):
instance.profile.save()
post_save.connect(create_profile, sender=User)
post_save.connect(save_profile, sender=User)
| true | true |
f728ea2eb644fbcc81d1cbb2f7e623c7f87f0380 | 834 | py | Python | src/bokeh_app/graph_view.py | avbatchelor/insight-articles-project | 852b338b786cb5b9c281fcec2e378aed8d3dc617 | [
"MIT"
] | null | null | null | src/bokeh_app/graph_view.py | avbatchelor/insight-articles-project | 852b338b786cb5b9c281fcec2e378aed8d3dc617 | [
"MIT"
] | null | null | null | src/bokeh_app/graph_view.py | avbatchelor/insight-articles-project | 852b338b786cb5b9c281fcec2e378aed8d3dc617 | [
"MIT"
] | null | null | null | import networkx as nx
import pickle
from bokeh.io import show, output_file
from bokeh.plotting import figure
from bokeh.models.graphs import from_networkx
processed_data_folder = 'C:\\Users\\Alex\\Documents\\GitHub\\insight-articles-project\\data\\processed\\'
filename = processed_data_folder + 'graph_and_labels'
with open (filename, 'rb') as fp:
graph_mat, topic_labels = pickle.load(fp)
G = nx.from_numpy_matrix(graph_mat)
pos=nx.spring_layout(G)
nx.relabel_nodes(G,topic_labels)
nx.draw(G,pos)
nx.draw_networkx_labels(G,pos,topic_labels,font_size=16)
plot = figure(title="Blog Curator Demo", x_range=(-2.1,2.1), y_range=(-2.1,2.1),
tools="", toolbar_location=None)
graph = from_networkx(G, nx.spring_layout, scale=2, center=(0,0))
plot.renderers.append(graph)
output_file("networkx_graph.html")
show(plot) | 29.785714 | 105 | 0.758993 | import networkx as nx
import pickle
from bokeh.io import show, output_file
from bokeh.plotting import figure
from bokeh.models.graphs import from_networkx
processed_data_folder = 'C:\\Users\\Alex\\Documents\\GitHub\\insight-articles-project\\data\\processed\\'
filename = processed_data_folder + 'graph_and_labels'
with open (filename, 'rb') as fp:
graph_mat, topic_labels = pickle.load(fp)
G = nx.from_numpy_matrix(graph_mat)
pos=nx.spring_layout(G)
nx.relabel_nodes(G,topic_labels)
nx.draw(G,pos)
nx.draw_networkx_labels(G,pos,topic_labels,font_size=16)
plot = figure(title="Blog Curator Demo", x_range=(-2.1,2.1), y_range=(-2.1,2.1),
tools="", toolbar_location=None)
graph = from_networkx(G, nx.spring_layout, scale=2, center=(0,0))
plot.renderers.append(graph)
output_file("networkx_graph.html")
show(plot) | true | true |
f728ed59d3b16b2ce2e1e501c09060a43e8d75f1 | 2,814 | py | Python | cpp/src/tools/scons/scons-local-1.2.0.d20090223/SCons/Tool/ifl.py | nawien-sharma/keyczar | c55563bbd70f4b6fefc7444e296aab9894475f9a | [
"Apache-2.0"
] | 30 | 2015-01-29T14:06:05.000Z | 2022-01-10T07:47:29.000Z | cpp/src/tools/scons/scons-local-1.2.0.d20090223/SCons/Tool/ifl.py | nawien-sharma/keyczar | c55563bbd70f4b6fefc7444e296aab9894475f9a | [
"Apache-2.0"
] | 1 | 2017-02-20T20:57:48.000Z | 2018-12-19T23:44:38.000Z | cpp/src/tools/scons/scons-local-1.2.0.d20090223/SCons/Tool/ifl.py | nawien-sharma/keyczar | c55563bbd70f4b6fefc7444e296aab9894475f9a | [
"Apache-2.0"
] | 15 | 2015-04-23T02:38:36.000Z | 2021-03-01T20:09:39.000Z | """SCons.Tool.ifl
Tool-specific initialization for the Intel Fortran compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/ifl.py 4043 2009/02/23 09:06:45 scons"
import SCons.Defaults
from SCons.Scanner.Fortran import FortranScan
from FortranCommon import add_all_to_env
def generate(env):
"""Add Builders and construction variables for ifl to an Environment."""
fscan = FortranScan("FORTRANPATH")
SCons.Tool.SourceFileScanner.add_scanner('.i', fscan)
SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan)
if not env.has_key('FORTRANFILESUFFIXES'):
env['FORTRANFILESUFFIXES'] = ['.i']
else:
env['FORTRANFILESUFFIXES'].append('.i')
if not env.has_key('F90FILESUFFIXES'):
env['F90FILESUFFIXES'] = ['.i90']
else:
env['F90FILESUFFIXES'].append('.i90')
add_all_to_env(env)
env['FORTRAN'] = 'ifl'
env['SHFORTRAN'] = '$FORTRAN'
env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
def exists(env):
return env.Detect('ifl')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 38.547945 | 121 | 0.730988 |
__revision__ = "src/engine/SCons/Tool/ifl.py 4043 2009/02/23 09:06:45 scons"
import SCons.Defaults
from SCons.Scanner.Fortran import FortranScan
from FortranCommon import add_all_to_env
def generate(env):
fscan = FortranScan("FORTRANPATH")
SCons.Tool.SourceFileScanner.add_scanner('.i', fscan)
SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan)
if not env.has_key('FORTRANFILESUFFIXES'):
env['FORTRANFILESUFFIXES'] = ['.i']
else:
env['FORTRANFILESUFFIXES'].append('.i')
if not env.has_key('F90FILESUFFIXES'):
env['F90FILESUFFIXES'] = ['.i90']
else:
env['F90FILESUFFIXES'].append('.i90')
add_all_to_env(env)
env['FORTRAN'] = 'ifl'
env['SHFORTRAN'] = '$FORTRAN'
env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
def exists(env):
return env.Detect('ifl')
| true | true |
f728ed8b115953ba9eb18572f31a9091b8aceb3d | 10,194 | py | Python | web/form/widgets.py | marcelb98/pycroft | 34cc59d9ab7fdc0c20b09b4851111048a9f64d90 | [
"Apache-2.0"
] | null | null | null | web/form/widgets.py | marcelb98/pycroft | 34cc59d9ab7fdc0c20b09b4851111048a9f64d90 | [
"Apache-2.0"
] | null | null | null | web/form/widgets.py | marcelb98/pycroft | 34cc59d9ab7fdc0c20b09b4851111048a9f64d90 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
from itertools import chain
from flask import url_for
from markupsafe import escape, Markup
import wtforms.ext.sqlalchemy.fields
import wtforms.fields
from wtforms.widgets.core import html_params, HTMLString
from web.templates import page_resources
from functools import reduce
class WidgetDecorator(object):
"""Decorate widgets."""
def __init__(self, widget):
"""
:param widget: Original widget to be decorated.
"""
if widget is None:
raise ValueError('Parameter widget may not be None.')
self.widget = widget
class BootstrapFormGroupDecorator(WidgetDecorator):
"""
Wraps a widget inside a Bootstrap form-group and prints errors.
The widget's output is wrapped in a Bootstrap form-group. Any field errors
are displayed in Bootstrap help-blocks after the widget.
"""
def __call__(self, field, **kwargs):
classes = [u'form-group']
if field.errors:
classes.append(u'has-error')
return HTMLString(u''.join([
Markup(u'<div class="{0}">').format(u' '.join(classes)),
self.widget(field, **kwargs),
u'</div>']))
class BootstrapFormControlDecorator(WidgetDecorator):
"""Adds the Bootstrap form-control class to a widget."""
def __call__(self, field, **kwargs):
if 'class_' in kwargs:
kwargs['class_'] = u'form-control ' + kwargs['class_']
else:
kwargs['class_'] = u'form-control'
return self.widget(field, **kwargs)
class BootstrapStandardDecorator(WidgetDecorator):
"""
Renders a field in horizontal layout.
Horizontal layout is a two column layout, where the label is placed in the
left column and the field is placed right next to it.
"""
def render_horizontal(self, field, **kwargs):
html = [u'<div class="col-sm-5">',
field.label(class_=u'control-label'),
u'</div>',
u'<div class="col-sm-7">',
self.widget(field, **kwargs),
u'</div>']
help_block = Markup(u'<div class="col-sm-12">'
u'<span class="help-block">{0}</span>'
u'</div>')
if field.description:
html.append(help_block.format(field.description))
html.extend(help_block.format(e) for e in field.errors)
return HTMLString(u''.join(html))
def render_inline(self, field, **kwargs):
return HTMLString(u''.join([
field.label(class_=u'sr-only'),
self.widget(field, placeholder=field.label.text, **kwargs),
]))
def render_basic(self, field, **kwargs):
html = [field.label(),
self.widget(field, **kwargs)]
help_block = Markup(u'<span class="help-block">{0}</span>')
if field.description:
html.append(help_block.format(field.description))
html.extend(help_block.format(e) for e in field.errors)
return HTMLString(u''.join(html))
def __call__(self, field, **kwargs):
render_mode = kwargs.pop("render_mode", "basic")
if render_mode == "basic":
return self.render_basic(field, **kwargs)
elif render_mode == "horizontal":
return self.render_horizontal(field, **kwargs)
elif render_mode == "inline":
return self.render_inline(field, **kwargs)
else:
raise ValueError("Unknown render mode: {0}".format(render_mode))
class BootstrapRadioCheckboxDecorator(WidgetDecorator):
"""
Renders a field in horizontal layout.
Horizontal layout is a two column layout, where the label is placed in the
left column and the field is placed right next to it.
"""
wrapper_class = None
def _render(self, field, **kwargs):
return HTMLString(u''.join([
u'<div class="',
self.wrapper_class,
u'">',
field.label(
u"{0} {1}".format(
self.widget(field, **kwargs),
escape(field.label.text)
)),
u'</div>',
]))
def render_basic(self, field, **kwargs):
return self._render(field, **kwargs)
def render_horizontal(self, field, **kwargs):
return HTMLString(u''.join([
u'<div class="col-sm-offset-5 col-sm-7">',
self._render(field, **kwargs),
u'</div>',
]))
def render_inline(self, field, **kwargs):
return field.label(u"{0} {1}".format(
self.widget(field, **kwargs),
escape(field.label.text)
), class_=self.wrapper_class + "-inline")
def __call__(self, field, **kwargs):
render_mode = kwargs.pop("render_mode", "horizontal")
if render_mode == "basic":
return self.render_basic(field, **kwargs)
elif render_mode == "horizontal":
return self.render_horizontal(field, **kwargs)
elif render_mode == "inline":
return self.render_inline(field, **kwargs)
else:
raise ValueError("Unknown render mode: {0}".format(render_mode))
class BootstrapRadioDecorator(BootstrapRadioCheckboxDecorator):
wrapper_class = u"radio"
class BootstrapCheckboxDecorator(BootstrapRadioCheckboxDecorator):
wrapper_class = u"checkbox"
class BootstrapFieldListWidget(object):
def __call__(self, field, **kwargs):
return HTMLString(u''.join(chain(
(Markup(u'<p class="help-block">{0}</p>').format(e) for e in field.errors),
(f(**kwargs) for f in field)
)))
class BootstrapFormFieldWidget(object):
def __call__(self, field, **kwargs):
return HTMLString(u"<div class=\"form-field\">" +
u''.join(f(**kwargs) for f in field) +
u"</div>")
class BootstrapStaticFieldWidget(object):
"""Render a static Bootstrap control."""
def __call__(self, field, **kwargs):
kwargs["class_"] = u"form-control-static"
# Assume that the field provides access to its value.
value = field._value()
return HTMLString(u''.join([
u'<p {}>'.format(html_params(**kwargs)),
value,
u'</p>',
]))
def decorators(widget):
"""
Yields all decorators of a widget starting from the outermost.
"""
while isinstance(widget, WidgetDecorator):
yield type(widget)
widget = widget.widget
def decorate(widget, *decorators):
"""
Decorate a widget with a list of decorators.
:param widget: a widget
:param tuple[WidgetDecorator] decorators: some decorators
:rtype: WidgetDecorator
:returns: decorated widget
"""
return reduce(lambda w, d: d(w), decorators, widget)
def decorate_field(field, *decorators):
"""
Return a field's widget decorated with the given decorators..
:param wtforms.fields.core.Field field: a WTForms field
:param tuple[WidgetDecorator] decorators: some decorators
:rtype: WidgetDecorator
:returns: decorated widget
"""
return decorate(field.widget, *decorators)
from markupsafe import Markup
class BootstrapDatepickerWidget(object):
"""Renders datetime fields using bootstrap-datepicker."""
def __call__(self, field, **kwargs):
kwargs["data-provide"] = u"datepicker"
for (option, value) in field.datepicker_options.items():
attribute = 'data-date-{0}'.format(option.replace('_', '-'))
kwargs[attribute] = value
page_resources.link_script(url_for(
"static", filename="libs/bootstrap-datepicker/js/bootstrap-datepicker.js"
))
page_resources.link_script(url_for(
"static", filename="libs/bootstrap-datepicker/js/locales/bootstrap-datepicker.de.js"
))
options = dict(kwargs, name=field.name)
if field.data:
options["value"] = field.data
return HTMLString(u"<input {0}>".format(html_params(**options)))
class CheckBoxWidget(wtforms.widgets.Select):
"""A simple multi selection widget rendered as Checkbox list.
It uses the bootstrap markup.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('type', 'checkbox')
field_id = kwargs.pop('id', field.id)
html = []
for value, label, checked in field.iter_choices():
choice_id = u'{}-{}'.format(field_id, value)
options = dict(kwargs, name=field.name, value=value, id=choice_id)
html.append(u'<label class="checkbox" {}>'.format(html_params(id=field_id)))
if checked:
options['checked'] = 'checked'
html.append(u'<input {}>'.format(html_params(**options)))
html.append(label)
html.append(u'</label>')
return u''.join(html)
class LazyLoadSelectWidget(wtforms.widgets.Select):
"""This is the widget for the LazyLoadSelectField
Please look at web.form.fields.LazyLoadSelectField for more information.
"""
def __call__(self, field, **kwargs):
conditions = getattr(field, "conditions", None)
if conditions is not None:
kwargs["data-fieldids"] = ",".join(conditions)
kwargs['data-role'] = u'lazyloadselect'
kwargs['data-url'] = url_for(field.data_endpoint)
kwargs['value'] = str(field.data)
return super(LazyLoadSelectWidget, self).__call__(field, **kwargs)
class Disabler(WidgetDecorator):
def __call__(self, field, **kwargs):
kwargs['disabled'] = True
return self.widget(field, **kwargs)
class MoneyFieldDecorator(WidgetDecorator):
"""Adds the Bootstrap form-control class to a widget."""
def __call__(self, field, **kwargs):
kwargs['class_'] += ' money-amount'
return (u"<div class=\"input-group\">" + self.widget(field, **kwargs) +
u"<span class=\"input-group-addon\">€</span></div>")
| 34.673469 | 96 | 0.614087 |
from itertools import chain
from flask import url_for
from markupsafe import escape, Markup
import wtforms.ext.sqlalchemy.fields
import wtforms.fields
from wtforms.widgets.core import html_params, HTMLString
from web.templates import page_resources
from functools import reduce
class WidgetDecorator(object):
def __init__(self, widget):
if widget is None:
raise ValueError('Parameter widget may not be None.')
self.widget = widget
class BootstrapFormGroupDecorator(WidgetDecorator):
def __call__(self, field, **kwargs):
classes = [u'form-group']
if field.errors:
classes.append(u'has-error')
return HTMLString(u''.join([
Markup(u'<div class="{0}">').format(u' '.join(classes)),
self.widget(field, **kwargs),
u'</div>']))
class BootstrapFormControlDecorator(WidgetDecorator):
def __call__(self, field, **kwargs):
if 'class_' in kwargs:
kwargs['class_'] = u'form-control ' + kwargs['class_']
else:
kwargs['class_'] = u'form-control'
return self.widget(field, **kwargs)
class BootstrapStandardDecorator(WidgetDecorator):
def render_horizontal(self, field, **kwargs):
html = [u'<div class="col-sm-5">',
field.label(class_=u'control-label'),
u'</div>',
u'<div class="col-sm-7">',
self.widget(field, **kwargs),
u'</div>']
help_block = Markup(u'<div class="col-sm-12">'
u'<span class="help-block">{0}</span>'
u'</div>')
if field.description:
html.append(help_block.format(field.description))
html.extend(help_block.format(e) for e in field.errors)
return HTMLString(u''.join(html))
def render_inline(self, field, **kwargs):
return HTMLString(u''.join([
field.label(class_=u'sr-only'),
self.widget(field, placeholder=field.label.text, **kwargs),
]))
def render_basic(self, field, **kwargs):
html = [field.label(),
self.widget(field, **kwargs)]
help_block = Markup(u'<span class="help-block">{0}</span>')
if field.description:
html.append(help_block.format(field.description))
html.extend(help_block.format(e) for e in field.errors)
return HTMLString(u''.join(html))
def __call__(self, field, **kwargs):
render_mode = kwargs.pop("render_mode", "basic")
if render_mode == "basic":
return self.render_basic(field, **kwargs)
elif render_mode == "horizontal":
return self.render_horizontal(field, **kwargs)
elif render_mode == "inline":
return self.render_inline(field, **kwargs)
else:
raise ValueError("Unknown render mode: {0}".format(render_mode))
class BootstrapRadioCheckboxDecorator(WidgetDecorator):
wrapper_class = None
def _render(self, field, **kwargs):
return HTMLString(u''.join([
u'<div class="',
self.wrapper_class,
u'">',
field.label(
u"{0} {1}".format(
self.widget(field, **kwargs),
escape(field.label.text)
)),
u'</div>',
]))
def render_basic(self, field, **kwargs):
return self._render(field, **kwargs)
def render_horizontal(self, field, **kwargs):
return HTMLString(u''.join([
u'<div class="col-sm-offset-5 col-sm-7">',
self._render(field, **kwargs),
u'</div>',
]))
def render_inline(self, field, **kwargs):
return field.label(u"{0} {1}".format(
self.widget(field, **kwargs),
escape(field.label.text)
), class_=self.wrapper_class + "-inline")
def __call__(self, field, **kwargs):
render_mode = kwargs.pop("render_mode", "horizontal")
if render_mode == "basic":
return self.render_basic(field, **kwargs)
elif render_mode == "horizontal":
return self.render_horizontal(field, **kwargs)
elif render_mode == "inline":
return self.render_inline(field, **kwargs)
else:
raise ValueError("Unknown render mode: {0}".format(render_mode))
class BootstrapRadioDecorator(BootstrapRadioCheckboxDecorator):
wrapper_class = u"radio"
class BootstrapCheckboxDecorator(BootstrapRadioCheckboxDecorator):
wrapper_class = u"checkbox"
class BootstrapFieldListWidget(object):
def __call__(self, field, **kwargs):
return HTMLString(u''.join(chain(
(Markup(u'<p class="help-block">{0}</p>').format(e) for e in field.errors),
(f(**kwargs) for f in field)
)))
class BootstrapFormFieldWidget(object):
def __call__(self, field, **kwargs):
return HTMLString(u"<div class=\"form-field\">" +
u''.join(f(**kwargs) for f in field) +
u"</div>")
class BootstrapStaticFieldWidget(object):
def __call__(self, field, **kwargs):
kwargs["class_"] = u"form-control-static"
value = field._value()
return HTMLString(u''.join([
u'<p {}>'.format(html_params(**kwargs)),
value,
u'</p>',
]))
def decorators(widget):
while isinstance(widget, WidgetDecorator):
yield type(widget)
widget = widget.widget
def decorate(widget, *decorators):
return reduce(lambda w, d: d(w), decorators, widget)
def decorate_field(field, *decorators):
return decorate(field.widget, *decorators)
from markupsafe import Markup
class BootstrapDatepickerWidget(object):
def __call__(self, field, **kwargs):
kwargs["data-provide"] = u"datepicker"
for (option, value) in field.datepicker_options.items():
attribute = 'data-date-{0}'.format(option.replace('_', '-'))
kwargs[attribute] = value
page_resources.link_script(url_for(
"static", filename="libs/bootstrap-datepicker/js/bootstrap-datepicker.js"
))
page_resources.link_script(url_for(
"static", filename="libs/bootstrap-datepicker/js/locales/bootstrap-datepicker.de.js"
))
options = dict(kwargs, name=field.name)
if field.data:
options["value"] = field.data
return HTMLString(u"<input {0}>".format(html_params(**options)))
class CheckBoxWidget(wtforms.widgets.Select):
def __call__(self, field, **kwargs):
kwargs.setdefault('type', 'checkbox')
field_id = kwargs.pop('id', field.id)
html = []
for value, label, checked in field.iter_choices():
choice_id = u'{}-{}'.format(field_id, value)
options = dict(kwargs, name=field.name, value=value, id=choice_id)
html.append(u'<label class="checkbox" {}>'.format(html_params(id=field_id)))
if checked:
options['checked'] = 'checked'
html.append(u'<input {}>'.format(html_params(**options)))
html.append(label)
html.append(u'</label>')
return u''.join(html)
class LazyLoadSelectWidget(wtforms.widgets.Select):
def __call__(self, field, **kwargs):
conditions = getattr(field, "conditions", None)
if conditions is not None:
kwargs["data-fieldids"] = ",".join(conditions)
kwargs['data-role'] = u'lazyloadselect'
kwargs['data-url'] = url_for(field.data_endpoint)
kwargs['value'] = str(field.data)
return super(LazyLoadSelectWidget, self).__call__(field, **kwargs)
class Disabler(WidgetDecorator):
def __call__(self, field, **kwargs):
kwargs['disabled'] = True
return self.widget(field, **kwargs)
class MoneyFieldDecorator(WidgetDecorator):
def __call__(self, field, **kwargs):
kwargs['class_'] += ' money-amount'
return (u"<div class=\"input-group\">" + self.widget(field, **kwargs) +
u"<span class=\"input-group-addon\">€</span></div>")
| true | true |
f728ee3b1671b49bf022dd945f48cb355a0d7e61 | 691 | py | Python | AutoRefresh/AutoRefresh.py | GWDx/Script | 35c9e8aefa7378608d99de5bf711c22383c1bb09 | [
"MIT"
] | 1 | 2021-10-03T07:43:41.000Z | 2021-10-03T07:43:41.000Z | AutoRefresh/AutoRefresh.py | GWDx/Script | 35c9e8aefa7378608d99de5bf711c22383c1bb09 | [
"MIT"
] | null | null | null | AutoRefresh/AutoRefresh.py | GWDx/Script | 35c9e8aefa7378608d99de5bf711c22383c1bb09 | [
"MIT"
] | null | null | null | from selenium import webdriver
from time import sleep
import re
browser = webdriver.Firefox()
browser.get('https://jw.ustc.edu.cn')
input()
windows = browser.window_handles
browser.switch_to.window(windows[1])
while True:
element = browser.find_element_by_id('table')
elementText = element.text
number = re.sub(r'.*? (\d+\/\d+).*', r'\1', elementText.split('\n')[2]) # 行数
selectedNumber = int(number.split('/')[0])
selectedRange = int(number.split('/')[1])
if (selectedNumber < selectedRange):
raise (RuntimeError('exist'))
try:
browser.refresh()
except:
browser.refresh()
sleep(.5)
browser.quit()
| 24.678571 | 82 | 0.625181 | from selenium import webdriver
from time import sleep
import re
browser = webdriver.Firefox()
browser.get('https://jw.ustc.edu.cn')
input()
windows = browser.window_handles
browser.switch_to.window(windows[1])
while True:
element = browser.find_element_by_id('table')
elementText = element.text
number = re.sub(r'.*? (\d+\/\d+).*', r'\1', elementText.split('\n')[2])
selectedNumber = int(number.split('/')[0])
selectedRange = int(number.split('/')[1])
if (selectedNumber < selectedRange):
raise (RuntimeError('exist'))
try:
browser.refresh()
except:
browser.refresh()
sleep(.5)
browser.quit()
| true | true |
f728eef6dc9a58573ba69af9db9c00b0d0e6663c | 4,166 | py | Python | roman/ur/realtime/constants.py | drMJ/roman | 9650e73ec6fbb2d8044aa1bbf89fd671843ea54e | [
"MIT"
] | 14 | 2020-04-03T03:48:35.000Z | 2021-11-08T11:17:41.000Z | roman/ur/realtime/constants.py | drMJ/roman | 9650e73ec6fbb2d8044aa1bbf89fd671843ea54e | [
"MIT"
] | 5 | 2020-04-17T21:59:35.000Z | 2022-01-21T23:21:45.000Z | roman/ur/realtime/constants.py | drMJ/roman | 9650e73ec6fbb2d8044aa1bbf89fd671843ea54e | [
"MIT"
] | 10 | 2020-04-16T15:44:25.000Z | 2021-11-10T08:22:52.000Z | ################################################################################################################################
## constants.py
## Contains various constants and defaults used both on the urscript side and the python side
## The defaults can be oveeriden when calling utils.load_script, by including them in the defs hashtable.
################################################################################################################################
UR_ZERO = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
UR_STATE_ENTRIES_COUNT = 72 # This is how many numbers we expect to receive with every response
UR_PROTOCOL_VERSION = 0.1
# Possible types of commands
UR_CMD_KIND_ESTOP = 0
UR_CMD_KIND_MOVE_JOINT_SPEEDS = 1 # Accelerate to and maintain the specified speed
UR_CMD_KIND_MOVE_TOOL_POSE = 2 # Move towards an absolute goal position expressed as a tool pose.
UR_CMD_KIND_MOVE_JOINT_POSITIONS = 3 # Move towards an absolute goal position expressed in joint angles.
UR_CMD_KIND_MOVE_TOOL_LINEAR = 4 # Move in a straight line towards an absolute goal position expressed as a tool pose.
UR_CMD_KIND_READ = 8
UR_CMD_KIND_CONFIG = 9
UR_CMD_KIND_IK_QUERY = 10
UR_CMD_KIND_INVALID = 11
# Command field indices
UR_CMD_ID = 0
UR_CMD_KIND = 1
UR_CMD_CONFIG_MASS = 2
UR_CMD_CONFIG_TOOL_COG = [3, 6]
UR_CMD_CONFIG_TOOL_TIP = [6, 12]
UR_CMD_MOVE_TARGET = [2, 8]
UR_CMD_MOVE_MAX_SPEED = 8
UR_CMD_MOVE_MAX_ACCELERATION = 9
UR_CMD_MOVE_FORCE_LOW_BOUND = [10, 16]
UR_CMD_MOVE_FORCE_HIGH_BOUND = [16, 22]
UR_CMD_MOVE_CONTACT_HANDLING = 22
UR_CMD_MOVE_CONTROLLER = 23
UR_CMD_ENTRIES_COUNT = 24 # This is how many floats we expect with each command (not including the count prefix). Needs to stay under 30 (UR restriction).
# State field indices
UR_STATE_TIME = 0
UR_STATE_CMD_ID = 1
UR_STATE_STATUS = 2
UR_STATE_JOINT_POSITIONS = [3, 9]
UR_STATE_JOINT_SPEEDS = [9, 15]
UR_STATE_TOOL_POSE = [15, 21]
UR_STATE_TOOL_SPEED = [21, 27]
UR_STATE_TARGET_JOINT_POSITIONS = [27, 33]
UR_STATE_TARGET_JOINT_SPEEDS = [33, 39]
UR_STATE_TARGET_TOOL_POSE = [39, 45]
UR_STATE_TARGET_TOOL_SPEED = [45, 51]
UR_STATE_JOINT_TORQUES = [51, 57]
UR_STATE_TOOL_FORCE = [57, 63]
UR_STATE_TOOL_ACCELERATION = [63, 66]
UR_STATE_SENSOR_FORCE = [66, 72]
# status bits
UR_STATUS_FLAG_MOVING = 1
UR_STATUS_FLAG_CONTACT = 2
UR_STATUS_FLAG_DEADMAN = 4
UR_STATUS_FLAG_RESERVED = 8
UR_STATUS_FLAG_DONE = 16
UR_STATUS_FLAG_GOAL_REACHED = 32
################################################################################################################################
## Default values
################################################################################################################################
# robot settings
UR_DEFAULT_MASS = 3.25
UR_DEFAULT_TOOL_COG = [0, 0, 0.12]
UR_DEFAULT_TCP = [0, 0, 0.12, 0, 0, 0]
# control
UR_TIME_SLICE = 1./125 # by default, use the CB2 version.
UR_SPEED_TOLERANCE = 0.05 # rad/s
UR_SPEED_NORM_ZERO = 0.05 # rad/s
UR_JOINTS_POSITION_TOLERANCE = 0.001 # rad
UR_TOOL_POSITION_TOLERANCE = 0.001 # m
UR_TOOL_ROTATION_TOLERANCE = 0.001 # rad
UR_DEADMAN_SWITCH_LIMIT = 0.1 # seconds
UR_EPSILON = 0.00001
UR_DEFAULT_FORCE_LOW_BOUND = [-20.0, -20.0, -20.0, -2, -2, -2]
UR_DEFAULT_FORCE_HIGH_BOUND = [20.0, 20.0, 20.0, 2, 2, 2]
UR_FORCE_IGNORE_HIGH = [1000, 1000, 1000, 100, 100, 100]
UR_FORCE_IGNORE_LOW = [-1000, -1000, -1000, -100, -100, -100]
UR_DEFAULT_ACCELERATION = 0.1 # rad/s2
UR_FAST_STOP_ACCELERATION = 3.0 # rad/s2
UR_DEFAULT_MAX_SPEED = 0.1 # rad/s
# interface / protocol
UR_RT_PORT = 30003 # real-time UR interface (RT)
UR_ROBOT_IP = "192.168.1.2"
UR_DEFAULT_CLIENT_IP = "192.168.1.9"
UR_DEFAULT_CLIENT_PORT = 50003
UR_ROBOT_VERSION_CB2 = 0
UR_ROBOT_VERSION_ESERIES = 2
UR_ROBOT_VERSION = UR_ROBOT_VERSION_CB2
# these need to be defined outside, e.g. through the defs parameter of utils.load_script()
#UR_CLIENT_IP
#UR_CLIENT_PORT
# URScript-compatible slicing function
def s_(vec, bounds, start):
s = bounds[0] + start
cnt = bounds[1] - bounds[0]
if cnt == 3:
return [vec[s], vec[s + 1], vec[s + 2]]
elif cnt == 6:
return [vec[s], vec[s + 1], vec[s + 2], vec[s + 3], vec[s + 4], vec[s + 5]]
#ur:end
#ur:end
| 37.872727 | 154 | 0.660346 | true | true | |
f728ef3ac6c9afa2c6e1b048510aab2473316fca | 1,781 | py | Python | data/main.py | banahaker/mingdao | a5dc7fce50c23a0e287813d914234f6b84027cdf | [
"MIT"
] | 2 | 2021-12-19T11:44:44.000Z | 2022-01-27T09:29:58.000Z | data/main.py | banahaker/mingdao | a5dc7fce50c23a0e287813d914234f6b84027cdf | [
"MIT"
] | null | null | null | data/main.py | banahaker/mingdao | a5dc7fce50c23a0e287813d914234f6b84027cdf | [
"MIT"
] | null | null | null | import requests
import bs4
import json
import os
# define git commit function
def gitCommit():
os.chdir('..')
os.system('git status')
os.system("git add .")
os.system("git status")
os.system('git commit -m "update data information"')
os.system("git push")
# get page response
url = 'http://www.mingdao.edu.tw/homeX/Web/'
header = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36 Edg/96.0.1054.57"
}
response = requests.get(url, headers=header)
response.encoding = 'utf-8'
data = response.text
# response page analyse
root = bs4.BeautifulSoup(data, 'html.parser')
infos = root.find_all('a', class_="style44")
informationList = []
for i in range(16):
key = infos[i]
if i<7:
news_id = key.attrs['onclick']
news_id = news_id.replace("','公告內容','scrollbars=yes,resizable=yes,width=500,height=400')", "")
news_id = news_id.replace("MM_openBrWindow('../../md/200310/honor/show.php?News_ID=", "")
singleInfo = {
"content": key.string,
"news_id": news_id
}
informationList.append(singleInfo)
else:
news_id = key.attrs['href']
news_id = news_id.replace("../../md/200310/honor/show.php?News_ID=", "")
singleInfo = {
"content": key.string,
"news_id": news_id
}
informationList.append(singleInfo)
honorRoll = informationList[0:8]
announcements = informationList[8:16]
# deal with json file
allInfo = [honorRoll, announcements]
jsonAllInfo = json.dumps(allInfo, ensure_ascii=False, sort_keys=True, indent=2)
# write json file
with open('information.json', 'w', encoding='utf-8') as file:
file.write(jsonAllInfo)
gitCommit() | 29.196721 | 152 | 0.647951 | import requests
import bs4
import json
import os
def gitCommit():
os.chdir('..')
os.system('git status')
os.system("git add .")
os.system("git status")
os.system('git commit -m "update data information"')
os.system("git push")
url = 'http://www.mingdao.edu.tw/homeX/Web/'
header = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36 Edg/96.0.1054.57"
}
response = requests.get(url, headers=header)
response.encoding = 'utf-8'
data = response.text
root = bs4.BeautifulSoup(data, 'html.parser')
infos = root.find_all('a', class_="style44")
informationList = []
for i in range(16):
key = infos[i]
if i<7:
news_id = key.attrs['onclick']
news_id = news_id.replace("','公告內容','scrollbars=yes,resizable=yes,width=500,height=400')", "")
news_id = news_id.replace("MM_openBrWindow('../../md/200310/honor/show.php?News_ID=", "")
singleInfo = {
"content": key.string,
"news_id": news_id
}
informationList.append(singleInfo)
else:
news_id = key.attrs['href']
news_id = news_id.replace("../../md/200310/honor/show.php?News_ID=", "")
singleInfo = {
"content": key.string,
"news_id": news_id
}
informationList.append(singleInfo)
honorRoll = informationList[0:8]
announcements = informationList[8:16]
allInfo = [honorRoll, announcements]
jsonAllInfo = json.dumps(allInfo, ensure_ascii=False, sort_keys=True, indent=2)
with open('information.json', 'w', encoding='utf-8') as file:
file.write(jsonAllInfo)
gitCommit() | true | true |
f728efb52d226dd7c63082f446ae3c4e21518b76 | 6,913 | py | Python | latency_pipe.py | ybCliff/VideoCaptioning | 93fc3b095c970e51e1e24909163a827df98d6ef3 | [
"MIT"
] | 3 | 2020-05-16T23:59:57.000Z | 2021-06-14T01:59:41.000Z | latency_pipe.py | ybCliff/VideoCaptioning | 93fc3b095c970e51e1e24909163a827df98d6ef3 | [
"MIT"
] | null | null | null | latency_pipe.py | ybCliff/VideoCaptioning | 93fc3b095c970e51e1e24909163a827df98d6ef3 | [
"MIT"
] | 3 | 2020-05-17T00:01:01.000Z | 2020-07-28T18:04:05.000Z |
import sys
sys.path.append("../")
import json
import os
import argparse
import torch
import shutil
import numpy as np
num_loop = 1
#for i in range(num_loop):
# op = 'CUDA_VISIBLE_DEVICES=2 python ar_test.py -i 0 -em test -analyze -ns -write_time -beam_size 1'
# os.system(op)
'''
for bs in [1, 5, 6]:
op = 'CUDA_VISIBLE_DEVICES=0 python ar_test.py -i 1 -em test -analyze -write_time -beam_size %d'%bs
os.system(op)
for iteration in range(1, 8):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index 1 -beam_alpha 1.35 -em test -nd -paradigm mp -print_latency -write_time -lbs 6 -s 100 ' + ' -i %d'%iteration
os.system(op)
for iteration in range(1, 8):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index -1 -beam_alpha 1.35 -em test -nd -paradigm mp -print_latency -write_time -lbs 6 ' + ' -i %d'%iteration
os.system(op)
#myset = [[5, 1], [5, 2], [5, 3], [5, 4], [5, 5], [4, 4], [3, 3], [2, 2], [1, 1]]
myset = [[3, 4], [3, 5]]
for item in myset:
i, lbs = item
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index 1 -beam_alpha 1.35 -em test -nd -paradigm mp -s 100 -print_latency -write_time' + ' -i %d'%i + ' -lbs %d'%lbs
os.system(op)
for q in range(4, 0, -1):
for iteration in range(2):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index 1 -beam_alpha 1.35 -em test -nd -paradigm ef -s 100 -print_latency -write_time -lbs 6 ' + ' -i %d'%iteration + ' -q %d'%q
os.system(op)
'''
for bs in [1, 5, 6]:
op = 'CUDA_VISIBLE_DEVICES=0 python ar_test.py -i 0 -em test -analyze -write_time -beam_size %d'%bs
os.system(op)
for iteration in range(1, 8):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index 0 -em test -nd -paradigm mp -print_latency -write_time -lbs 6 -s 100 ' + ' -i %d'%iteration
os.system(op)
for iteration in range(1, 8):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index -2 -em test -nd -paradigm mp -print_latency -write_time -lbs 6 ' + ' -i %d'%iteration
os.system(op)
#myset = [[5, 1], [5, 2], [5, 3], [5, 4], [5, 5], [4, 4], [3, 3], [2, 2], [1, 1]]
myset = [[1,1], [1, 2],[1, 3], [1, 4],[1, 5]]
for item in myset:
i, lbs = item
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index 0 -em test -nd -paradigm mp -s 100 -print_latency -write_time' + ' -i %d'%i + ' -lbs %d'%lbs
os.system(op)
for q in range(4, 0, -1):
for iteration in range(2):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index 0 -em test -nd -paradigm ef -s 100 -print_latency -write_time -lbs 6 ' + ' -i %d'%iteration + ' -q %d'%q
os.system(op)
'''
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python ar_test.py -i 0 -em test -analyze -ns -write_time -beam_size 1'
os.system(op)
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python ar_test.py -i 0 -em test -analyze -ns -write_time'
os.system(op)
for iteration in range(1, 8):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py -ns --index 0 -em test -nd -paradigm mp -s 100 -print_latency -write_time' + ' -i %d'%iteration
os.system(op)
for iteration in range(1, 8):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py -ns --index -2 -em test -nd -paradigm mp -s 100 -print_latency -write_time' + ' -i %d'%iteration
os.system(op)
'''
'''
myset = [[5, 1], [5, 2], [5, 3], [5, 4], [5, 5], [5, 6], [4, 4], [3, 3], [2, 2], [1, 1]]
for item in myset:
i, lbs = item
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index 0 -em test -nd -paradigm mp -s 100 -print_latency -write_time' + ' -i %d'%i + ' -lbs %d'%lbs
os.system(op)
for q in range(4, 0, -1):
for iteration in range(2):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index 0 -em test -nd -paradigm ef -s 100 -print_latency -write_time' + ' -i %d'%iteration + ' -q %d'%q
os.system(op)
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=3 python ar_test.py -i 1 -em test -analyze -ns -write_time -beam_size 1'
os.system(op)
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=3 python ar_test.py -i 1 -em test -analyze -ns -write_time'
os.system(op)
for iteration in range(1, 7):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=3 python test_nar.py --index 0 -em test -nd -paradigm mp -s 100 -print_latency -write_time -ns' + ' -i %d'%iteration
os.system(op)
for iteration in range(5, 7):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=3 python test_nar.py --index 0 -em test -nd -paradigm mp -print_latency -write_time -ns' + ' -i %d'%iteration
os.system(op)
for iteration in range(5, 7):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=3 python test_nar.py --index 1 -beam_alpha 1.15 -em test -nd -paradigm mp -print_latency -write_time -ns' + ' -i %d'%iteration
os.system(op)
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=3 python test_nar.py --index 1 -beam_alpha 1.15 -em test -nd -paradigm ef -print_latency -write_time -ns'
os.system(op)
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=3 python test_nar.py --index 1 -beam_alpha 1.15 -em test -nd -paradigm ef -s 100 -print_latency -write_time -ns'
os.system(op)
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=3 python test_nar.py --index 1 -beam_alpha 1.15 -em test -nd -paradigm ef -s 100 -print_latency -write_time -ns -q 2'
os.system(op)
for iteration in range(1, 7):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=3 python test_nar.py --index 0 -em test -nd -paradigm mp -s 100 -print_latency -write_time -i 5 ' + ' -lbs %d'%iteration
if i > 0:
op += " -ns "
os.system(op)
for iteration in range(3, 7):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=3 python test_nar.py --index 1 -em test -nd -paradigm mp -beam_alpha 1.15 -s 100 -print_latency -write_time -i 5 ' + ' -lbs %d'%iteration
if i > 0:
op += " -ns "
os.system(op)
for iteration in range(1, 7):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=3 python test_nar.py --index -1 -em test -nd -paradigm mp -beam_alpha 1.15 -print_latency -write_time -i 5 ' + ' -lbs %d'%iteration
if i > 0:
op += " -ns "
os.system(op)
for iteration in range(1, 7):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=3 python test_nar.py --index 0 -s 100 -em test -nd -paradigm mp -print_latency -write_time -lbs 5 ' + ' -i %d'%iteration
#if i > 0:
# op += " -ns "
os.system(op)
''' | 38.19337 | 189 | 0.621872 |
import sys
sys.path.append("../")
import json
import os
import argparse
import torch
import shutil
import numpy as np
num_loop = 1
for bs in [1, 5, 6]:
op = 'CUDA_VISIBLE_DEVICES=0 python ar_test.py -i 0 -em test -analyze -write_time -beam_size %d'%bs
os.system(op)
for iteration in range(1, 8):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index 0 -em test -nd -paradigm mp -print_latency -write_time -lbs 6 -s 100 ' + ' -i %d'%iteration
os.system(op)
for iteration in range(1, 8):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index -2 -em test -nd -paradigm mp -print_latency -write_time -lbs 6 ' + ' -i %d'%iteration
os.system(op)
myset = [[1,1], [1, 2],[1, 3], [1, 4],[1, 5]]
for item in myset:
i, lbs = item
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index 0 -em test -nd -paradigm mp -s 100 -print_latency -write_time' + ' -i %d'%i + ' -lbs %d'%lbs
os.system(op)
for q in range(4, 0, -1):
for iteration in range(2):
for i in range(num_loop):
op = 'CUDA_VISIBLE_DEVICES=0 python test_nar.py --index 0 -em test -nd -paradigm ef -s 100 -print_latency -write_time -lbs 6 ' + ' -i %d'%iteration + ' -q %d'%q
os.system(op)
| true | true |
f728f0d04d3651b4023f52477fccaa85cf8b99cf | 8,361 | py | Python | qiskit/circuit/controlflow/for_loop.py | boschmitt/qiskit-terra | 1a1235a7206bdf293d153d285f932e80151e9cb4 | [
"Apache-2.0"
] | null | null | null | qiskit/circuit/controlflow/for_loop.py | boschmitt/qiskit-terra | 1a1235a7206bdf293d153d285f932e80151e9cb4 | [
"Apache-2.0"
] | 7 | 2021-10-14T12:38:24.000Z | 2022-03-21T11:20:02.000Z | qiskit/circuit/controlflow/for_loop.py | boschmitt/qiskit-terra | 1a1235a7206bdf293d153d285f932e80151e9cb4 | [
"Apache-2.0"
] | null | null | null | # This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"Circuit operation representing a ``for`` loop."
import warnings
from typing import Iterable, Optional, Union
from qiskit.circuit.parameter import Parameter
from qiskit.circuit.exceptions import CircuitError
from qiskit.circuit.quantumcircuit import QuantumCircuit
from .control_flow import ControlFlowOp
class ForLoopOp(ControlFlowOp):
"""A circuit operation which repeatedly executes a subcircuit
(``body``) parameterized by a parameter ``loop_parameter`` through
the set of integer values provided in ``indexset``.
Parameters:
indexset: A collection of integers to loop over.
loop_parameter: The placeholder parameterizing ``body`` to which
the values from ``indexset`` will be assigned.
body: The loop body to be repeatedly executed.
label: An optional label for identifying the instruction.
**Circuit symbol:**
.. parsed-literal::
┌───────────┐
q_0: ┤0 ├
│ │
q_1: ┤1 ├
│ for_loop │
q_2: ┤2 ├
│ │
c_0: ╡0 ╞
└───────────┘
"""
def __init__(
self,
indexset: Iterable[int],
loop_parameter: Union[Parameter, None],
body: QuantumCircuit,
label: Optional[str] = None,
):
num_qubits = body.num_qubits
num_clbits = body.num_clbits
super().__init__(
"for_loop", num_qubits, num_clbits, [indexset, loop_parameter, body], label=label
)
@property
def params(self):
return self._params
@params.setter
def params(self, parameters):
indexset, loop_parameter, body = parameters
if not isinstance(loop_parameter, (Parameter, type(None))):
raise CircuitError(
"ForLoopOp expects a loop_parameter parameter to "
"be either of type Parameter or None, but received "
f"{type(loop_parameter)}."
)
if not isinstance(body, QuantumCircuit):
raise CircuitError(
"ForLoopOp expects a body parameter to be of type "
f"QuantumCircuit, but received {type(body)}."
)
if body.num_qubits != self.num_qubits or body.num_clbits != self.num_clbits:
raise CircuitError(
"Attempted to assign a body parameter with a num_qubits or "
"num_clbits different than that of the ForLoopOp. "
f"ForLoopOp num_qubits/clbits: {self.num_qubits}/{self.num_clbits} "
f"Supplied body num_qubits/clbits: {body.num_qubits}/{body.num_clbits}."
)
if (
loop_parameter is not None
and loop_parameter not in body.parameters
and loop_parameter.name in (p.name for p in body.parameters)
):
warnings.warn(
"The Parameter provided as a loop_parameter was not found "
"on the loop body and so no binding of the indexset to loop "
"parameter will occur. A different Parameter of the same name "
f"({loop_parameter.name}) was found. If you intended to loop "
"over that Parameter, please use that Parameter instance as "
"the loop_parameter.",
stacklevel=2,
)
# Consume indexset into a tuple unless it was provided as a range.
# Preserve ranges so that they can be exported as OpenQASM3 ranges.
indexset = indexset if isinstance(indexset, range) else tuple(indexset)
self._params = [indexset, loop_parameter, body]
@property
def blocks(self):
return (self._params[2],)
class ForLoopContext:
"""A context manager for building up ``for`` loops onto circuits in a natural order, without
having to construct the loop body first.
Within the block, a lot of the bookkeeping is done for you; you do not need to keep track of
which qubits and clbits you are using, for example, and a loop parameter will be allocated for
you, if you do not supply one yourself. All normal methods of accessing the qubits on the
underlying :obj:`~QuantumCircuit` will work correctly, and resolve into correct accesses within
the interior block.
You generally should never need to instantiate this object directly. Instead, use
:obj:`.QuantumCircuit.for_loop` in its context-manager form, i.e. by not supplying a ``body`` or
sets of qubits and clbits.
Example usage::
import math
from qiskit import QuantumCircuit
qc = QuantumCircuit(2, 1)
with qc.for_loop(None, range(5)) as i:
qc.rx(i * math.pi/4, 0)
qc.cx(0, 1)
qc.measure(0, 0)
qc.break_loop().c_if(0)
This context should almost invariably be created by a :meth:`.QuantumCircuit.for_loop` call, and
the resulting instance is a "friend" of the calling circuit. The context will manipulate the
circuit's defined scopes when it is entered (by pushing a new scope onto the stack) and exited
(by popping its scope, building it, and appending the resulting :obj:`.ForLoopOp`).
"""
# Class-level variable keep track of the number of auto-generated loop variables, so we don't
# get naming clashes.
_generated_loop_parameters = 0
__slots__ = (
"_circuit",
"_generate_loop_parameter",
"_loop_parameter",
"_indexset",
"_label",
"_used",
)
def __init__(
self,
circuit: QuantumCircuit,
indexset: Iterable[int],
loop_parameter: Optional[Parameter] = None,
*,
label: Optional[str] = None,
):
self._circuit = circuit
self._generate_loop_parameter = loop_parameter is None
self._loop_parameter = loop_parameter
# We can pass through `range` instances because OpenQASM 3 has native support for this type
# of iterator set.
self._indexset = indexset if isinstance(indexset, range) else tuple(indexset)
self._label = label
self._used = False
def __enter__(self):
if self._used:
raise CircuitError("A for-loop context manager cannot be re-entered.")
self._used = True
self._circuit._push_scope()
if self._generate_loop_parameter:
self._loop_parameter = Parameter(f"_loop_i_{self._generated_loop_parameters}")
type(self)._generated_loop_parameters += 1
return self._loop_parameter
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
# If we're leaving the context manager because an exception was raised, there's nothing
# to do except restore the circuit state.
self._circuit._pop_scope()
return False
scope = self._circuit._pop_scope()
# Loops do not need to pass any further resources in, because this scope itself defines the
# extent of ``break`` and ``continue`` statements.
body = scope.build(scope.qubits, scope.clbits)
# We always bind the loop parameter if the user gave it to us, even if it isn't actually
# used, because they requested we do that by giving us a parameter. However, if they asked
# us to auto-generate a parameter, then we only add it if they actually used it, to avoid
# using unnecessary resources.
if self._generate_loop_parameter and self._loop_parameter not in body.parameters:
loop_parameter = None
else:
loop_parameter = self._loop_parameter
self._circuit.append(
ForLoopOp(self._indexset, loop_parameter, body, label=self._label),
tuple(body.qubits),
tuple(body.clbits),
)
return False
| 38.529954 | 100 | 0.634852 |
import warnings
from typing import Iterable, Optional, Union
from qiskit.circuit.parameter import Parameter
from qiskit.circuit.exceptions import CircuitError
from qiskit.circuit.quantumcircuit import QuantumCircuit
from .control_flow import ControlFlowOp
class ForLoopOp(ControlFlowOp):
def __init__(
self,
indexset: Iterable[int],
loop_parameter: Union[Parameter, None],
body: QuantumCircuit,
label: Optional[str] = None,
):
num_qubits = body.num_qubits
num_clbits = body.num_clbits
super().__init__(
"for_loop", num_qubits, num_clbits, [indexset, loop_parameter, body], label=label
)
@property
def params(self):
return self._params
@params.setter
def params(self, parameters):
indexset, loop_parameter, body = parameters
if not isinstance(loop_parameter, (Parameter, type(None))):
raise CircuitError(
"ForLoopOp expects a loop_parameter parameter to "
"be either of type Parameter or None, but received "
f"{type(loop_parameter)}."
)
if not isinstance(body, QuantumCircuit):
raise CircuitError(
"ForLoopOp expects a body parameter to be of type "
f"QuantumCircuit, but received {type(body)}."
)
if body.num_qubits != self.num_qubits or body.num_clbits != self.num_clbits:
raise CircuitError(
"Attempted to assign a body parameter with a num_qubits or "
"num_clbits different than that of the ForLoopOp. "
f"ForLoopOp num_qubits/clbits: {self.num_qubits}/{self.num_clbits} "
f"Supplied body num_qubits/clbits: {body.num_qubits}/{body.num_clbits}."
)
if (
loop_parameter is not None
and loop_parameter not in body.parameters
and loop_parameter.name in (p.name for p in body.parameters)
):
warnings.warn(
"The Parameter provided as a loop_parameter was not found "
"on the loop body and so no binding of the indexset to loop "
"parameter will occur. A different Parameter of the same name "
f"({loop_parameter.name}) was found. If you intended to loop "
"over that Parameter, please use that Parameter instance as "
"the loop_parameter.",
stacklevel=2,
)
indexset = indexset if isinstance(indexset, range) else tuple(indexset)
self._params = [indexset, loop_parameter, body]
@property
def blocks(self):
return (self._params[2],)
class ForLoopContext:
# get naming clashes.
_generated_loop_parameters = 0
__slots__ = (
"_circuit",
"_generate_loop_parameter",
"_loop_parameter",
"_indexset",
"_label",
"_used",
)
def __init__(
self,
circuit: QuantumCircuit,
indexset: Iterable[int],
loop_parameter: Optional[Parameter] = None,
*,
label: Optional[str] = None,
):
self._circuit = circuit
self._generate_loop_parameter = loop_parameter is None
self._loop_parameter = loop_parameter
# We can pass through `range` instances because OpenQASM 3 has native support for this type
# of iterator set.
self._indexset = indexset if isinstance(indexset, range) else tuple(indexset)
self._label = label
self._used = False
def __enter__(self):
if self._used:
raise CircuitError("A for-loop context manager cannot be re-entered.")
self._used = True
self._circuit._push_scope()
if self._generate_loop_parameter:
self._loop_parameter = Parameter(f"_loop_i_{self._generated_loop_parameters}")
type(self)._generated_loop_parameters += 1
return self._loop_parameter
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
# If we're leaving the context manager because an exception was raised, there's nothing
# to do except restore the circuit state.
self._circuit._pop_scope()
return False
scope = self._circuit._pop_scope()
# Loops do not need to pass any further resources in, because this scope itself defines the
# extent of ``break`` and ``continue`` statements.
body = scope.build(scope.qubits, scope.clbits)
# We always bind the loop parameter if the user gave it to us, even if it isn't actually
if self._generate_loop_parameter and self._loop_parameter not in body.parameters:
loop_parameter = None
else:
loop_parameter = self._loop_parameter
self._circuit.append(
ForLoopOp(self._indexset, loop_parameter, body, label=self._label),
tuple(body.qubits),
tuple(body.clbits),
)
return False
| true | true |
f728f0e8a193188e2b3867c59814c0860e7e1ed5 | 5,671 | py | Python | baselines/ddpg/main.py | wensun/baselines | 81b7b988918de2c1c2f5fa9f38b7716608efc125 | [
"MIT"
] | null | null | null | baselines/ddpg/main.py | wensun/baselines | 81b7b988918de2c1c2f5fa9f38b7716608efc125 | [
"MIT"
] | null | null | null | baselines/ddpg/main.py | wensun/baselines | 81b7b988918de2c1c2f5fa9f38b7716608efc125 | [
"MIT"
] | null | null | null | import argparse
import time
import os
import logging
from baselines import logger, bench
from baselines.common.misc_util import (
set_global_seeds,
boolean_flag,
)
#import baselines.ddpg.training as training
import training as training
from baselines.ddpg.models import Actor, Critic
from baselines.ddpg.memory import Memory
from baselines.ddpg.noise import *
import gym
import tensorflow as tf
from mpi4py import MPI
def run(env_id, seed, noise_type, layer_norm, evaluation, **kwargs):
# Configure things.
rank = MPI.COMM_WORLD.Get_rank()
if rank != 0:
logger.set_level(logger.DISABLED)
# Create envs.
env = gym.make(env_id)
env = bench.Monitor(env, logger.get_dir() and os.path.join(logger.get_dir(), str(rank)))
if evaluation and rank==0:
eval_env = gym.make(env_id)
eval_env = bench.Monitor(eval_env, os.path.join(logger.get_dir(), 'gym_eval'))
#env = bench.Monitor(env, None)
else:
eval_env = None
# Parse noise_type
action_noise = None
param_noise = None
nb_actions = env.action_space.shape[-1]
for current_noise_type in noise_type.split(','):
current_noise_type = current_noise_type.strip()
if current_noise_type == 'none':
pass
elif 'adaptive-param' in current_noise_type:
_, stddev = current_noise_type.split('_')
param_noise = AdaptiveParamNoiseSpec(initial_stddev=float(stddev), desired_action_stddev=float(stddev))
elif 'normal' in current_noise_type:
_, stddev = current_noise_type.split('_')
action_noise = NormalActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions))
elif 'ou' in current_noise_type:
_, stddev = current_noise_type.split('_')
action_noise = OrnsteinUhlenbeckActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions))
else:
raise RuntimeError('unknown noise type "{}"'.format(current_noise_type))
# Configure components.
memory = Memory(limit=int(1e6), action_shape=env.action_space.shape, observation_shape=env.observation_space.shape)
critic = Critic(layer_norm=layer_norm)
actor = Actor(nb_actions, layer_norm=layer_norm)
# Seed everything to make things reproducible.
seed = seed + 1000000 * rank
logger.info('rank {}: seed={}, logdir={}'.format(rank, seed, logger.get_dir()))
tf.reset_default_graph()
set_global_seeds(seed)
env.seed(seed)
if eval_env is not None:
eval_env.seed(seed)
# Disable logging for rank != 0 to avoid noise.
if rank == 0:
start_time = time.time()
training.train(env=env, eval_env=eval_env, param_noise=param_noise,
action_noise=action_noise, actor=actor, critic=critic, memory=memory, **kwargs)
env.close()
if eval_env is not None:
eval_env.close()
if rank == 0:
logger.info('total runtime: {}s'.format(time.time() - start_time))
def parse_args():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--env-id', type=str, default='HalfCheetah-v2')
boolean_flag(parser, 'render-eval', default=False)
boolean_flag(parser, 'layer-norm', default=True)
boolean_flag(parser, 'render', default=False)
boolean_flag(parser, 'normalize-returns', default=False)
boolean_flag(parser, 'normalize-observations', default=True)
parser.add_argument('--seed', help='RNG seed', type=int, default=0)
parser.add_argument('--critic-l2-reg', type=float, default=1e-2)
parser.add_argument('--batch-size', type=int, default=64) # per MPI worker
parser.add_argument('--actor-lr', type=float, default=1e-4)
parser.add_argument('--critic-lr', type=float, default=1e-3)
boolean_flag(parser, 'popart', default=False)
parser.add_argument('--gamma', type=float, default=0.99)
parser.add_argument('--reward-scale', type=float, default=1.)
parser.add_argument('--clip-norm', type=float, default=None)
parser.add_argument('--nb-epochs', type=int, default=500) # with default settings, perform 1M steps total, was 500
parser.add_argument('--nb-epoch-cycles', type=int, default=20)
parser.add_argument('--nb-train-steps', type=int, default=50) # per epoch cycle and MPI worker
parser.add_argument('--nb-eval-steps', type=int, default=1000) # per epoch cycle and MPI worker
parser.add_argument('--nb-rollout-steps', type=int, default=100) # per epoch cycle and MPI worker
parser.add_argument('--noise-type', type=str, default='adaptive-param_0.2') # choices are adaptive-param_xx, ou_xx, normal_xx, none
parser.add_argument('--num-timesteps', type=int, default=None)
parser.add_argument('--alg', type=str, default='DDPG') # DDPG or DDPGRM
#boolean_flag(parser, 'evaluation', default=False)
boolean_flag(parser, 'evaluation', default=True) #turn evaluation on
args = parser.parse_args()
# we don't directly specify timesteps for this script, so make sure that if we do specify them
# they agree with the other parameters. default: 1M total steps
eval_steps_per_epoch = args.nb_epoch_cycles*args.nb_eval_steps #defualt: 1000*20 = 10K (~ 20 episodes)
print(args)
if args.num_timesteps is not None:
assert(args.num_timesteps == args.nb_epochs * args.nb_epoch_cycles * args.nb_rollout_steps)
dict_args = vars(args)
del dict_args['num_timesteps']
return dict_args
if __name__ == '__main__':
args = parse_args()
if MPI.COMM_WORLD.Get_rank() == 0:
logger.configure()
# Run actual script.
run(**args)
| 42.962121 | 136 | 0.696703 | import argparse
import time
import os
import logging
from baselines import logger, bench
from baselines.common.misc_util import (
set_global_seeds,
boolean_flag,
)
import training as training
from baselines.ddpg.models import Actor, Critic
from baselines.ddpg.memory import Memory
from baselines.ddpg.noise import *
import gym
import tensorflow as tf
from mpi4py import MPI
def run(env_id, seed, noise_type, layer_norm, evaluation, **kwargs):
rank = MPI.COMM_WORLD.Get_rank()
if rank != 0:
logger.set_level(logger.DISABLED)
env = gym.make(env_id)
env = bench.Monitor(env, logger.get_dir() and os.path.join(logger.get_dir(), str(rank)))
if evaluation and rank==0:
eval_env = gym.make(env_id)
eval_env = bench.Monitor(eval_env, os.path.join(logger.get_dir(), 'gym_eval'))
else:
eval_env = None
action_noise = None
param_noise = None
nb_actions = env.action_space.shape[-1]
for current_noise_type in noise_type.split(','):
current_noise_type = current_noise_type.strip()
if current_noise_type == 'none':
pass
elif 'adaptive-param' in current_noise_type:
_, stddev = current_noise_type.split('_')
param_noise = AdaptiveParamNoiseSpec(initial_stddev=float(stddev), desired_action_stddev=float(stddev))
elif 'normal' in current_noise_type:
_, stddev = current_noise_type.split('_')
action_noise = NormalActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions))
elif 'ou' in current_noise_type:
_, stddev = current_noise_type.split('_')
action_noise = OrnsteinUhlenbeckActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions))
else:
raise RuntimeError('unknown noise type "{}"'.format(current_noise_type))
memory = Memory(limit=int(1e6), action_shape=env.action_space.shape, observation_shape=env.observation_space.shape)
critic = Critic(layer_norm=layer_norm)
actor = Actor(nb_actions, layer_norm=layer_norm)
seed = seed + 1000000 * rank
logger.info('rank {}: seed={}, logdir={}'.format(rank, seed, logger.get_dir()))
tf.reset_default_graph()
set_global_seeds(seed)
env.seed(seed)
if eval_env is not None:
eval_env.seed(seed)
if rank == 0:
start_time = time.time()
training.train(env=env, eval_env=eval_env, param_noise=param_noise,
action_noise=action_noise, actor=actor, critic=critic, memory=memory, **kwargs)
env.close()
if eval_env is not None:
eval_env.close()
if rank == 0:
logger.info('total runtime: {}s'.format(time.time() - start_time))
def parse_args():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--env-id', type=str, default='HalfCheetah-v2')
boolean_flag(parser, 'render-eval', default=False)
boolean_flag(parser, 'layer-norm', default=True)
boolean_flag(parser, 'render', default=False)
boolean_flag(parser, 'normalize-returns', default=False)
boolean_flag(parser, 'normalize-observations', default=True)
parser.add_argument('--seed', help='RNG seed', type=int, default=0)
parser.add_argument('--critic-l2-reg', type=float, default=1e-2)
parser.add_argument('--batch-size', type=int, default=64)
parser.add_argument('--actor-lr', type=float, default=1e-4)
parser.add_argument('--critic-lr', type=float, default=1e-3)
boolean_flag(parser, 'popart', default=False)
parser.add_argument('--gamma', type=float, default=0.99)
parser.add_argument('--reward-scale', type=float, default=1.)
parser.add_argument('--clip-norm', type=float, default=None)
parser.add_argument('--nb-epochs', type=int, default=500)
parser.add_argument('--nb-epoch-cycles', type=int, default=20)
parser.add_argument('--nb-train-steps', type=int, default=50)
parser.add_argument('--nb-eval-steps', type=int, default=1000)
parser.add_argument('--nb-rollout-steps', type=int, default=100)
parser.add_argument('--noise-type', type=str, default='adaptive-param_0.2')
parser.add_argument('--num-timesteps', type=int, default=None)
parser.add_argument('--alg', type=str, default='DDPG')
boolean_flag(parser, 'evaluation', default=True)
args = parser.parse_args()
# they agree with the other parameters. default: 1M total steps
eval_steps_per_epoch = args.nb_epoch_cycles*args.nb_eval_steps #defualt: 1000*20 = 10K (~ 20 episodes)
print(args)
if args.num_timesteps is not None:
assert(args.num_timesteps == args.nb_epochs * args.nb_epoch_cycles * args.nb_rollout_steps)
dict_args = vars(args)
del dict_args['num_timesteps']
return dict_args
if __name__ == '__main__':
args = parse_args()
if MPI.COMM_WORLD.Get_rank() == 0:
logger.configure()
# Run actual script.
run(**args)
| true | true |
f728f0fa1b2ee82aebef93fdc59f9d8021dc8a1a | 3,779 | py | Python | feincms/content/filer/models.py | ixc/feincms | d3746174190acbe7caf32078ddc5ec7c2e48b8ef | [
"BSD-3-Clause"
] | 1 | 2016-05-07T11:50:28.000Z | 2016-05-07T11:50:28.000Z | feincms/content/filer/models.py | ixc/feincms | d3746174190acbe7caf32078ddc5ec7c2e48b8ef | [
"BSD-3-Clause"
] | null | null | null | feincms/content/filer/models.py | ixc/feincms | d3746174190acbe7caf32078ddc5ec7c2e48b8ef | [
"BSD-3-Clause"
] | 1 | 2019-03-15T19:36:34.000Z | 2019-03-15T19:36:34.000Z | from __future__ import absolute_import, unicode_literals
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.translation import ugettext_lazy as _
from feincms.admin.item_editor import FeinCMSInline
from feincms._internal import ct_render_to_string
try:
from filer.fields.file import FilerFileField
from filer.fields.image import FilerImageField
except ImportError:
__all__ = ()
else:
__all__ = (
'MediaFileContentInline', 'ContentWithFilerFile',
'FilerFileContent', 'FilerImageContent',
)
class MediaFileContentInline(FeinCMSInline):
radio_fields = {'type': admin.VERTICAL}
class ContentWithFilerFile(models.Model):
"""
File content
"""
feincms_item_editor_inline = MediaFileContentInline
class Meta:
abstract = True
def render(self, **kwargs):
return ct_render_to_string(
[
'content/filer/%s_%s.html' % (self.file_type, self.type),
'content/filer/%s.html' % self.type,
'content/filer/%s.html' % self.file_type,
'content/filer/default.html',
],
{'content': self},
request=kwargs.get('request'),
context=kwargs.get('context'),
)
class FilerFileContent(ContentWithFilerFile):
mediafile = FilerFileField(verbose_name=_('file'), related_name='+')
file_type = 'file'
type = 'download'
class Meta:
abstract = True
verbose_name = _('file')
verbose_name_plural = _('files')
class FilerImageContent(ContentWithFilerFile):
"""
Create a media file content as follows::
from feincms.contents import FilerImageContent
Page.create_content_type(FilerImageContent, TYPE_CHOICES=(
('inline', _('Default')),
('lightbox', _('Lightbox')),
('whatever', _('Whatever')),
))
For a media file of type 'image' and type 'lightbox', the following
templates are tried in order:
* content/mediafile/image_lightbox.html
* content/mediafile/lightbox.html
* content/mediafile/image.html
* content/mediafile/default.html
The context contains ``content`` and ``request`` (if available).
The content.mediafile attribute are as follows (selection):
label, description, default_caption, default_alt_text,
author, must_always_publish_author_credit,
must_always_publish_copyright, date_taken, file, id, is_public, url
"""
mediafile = FilerImageField(verbose_name=_('image'), related_name='+')
caption = models.CharField(
_('caption'),
max_length=1000,
blank=True,
)
url = models.CharField(
_('URL'),
max_length=1000,
blank=True,
)
file_type = 'image'
class Meta:
abstract = True
verbose_name = _('image')
verbose_name_plural = _('images')
@classmethod
def initialize_type(cls, TYPE_CHOICES=None):
if TYPE_CHOICES is None:
raise ImproperlyConfigured(
'You have to set TYPE_CHOICES when'
' creating a %s' % cls.__name__)
cls.add_to_class(
'type',
models.CharField(
_('type'),
max_length=20,
choices=TYPE_CHOICES,
default=TYPE_CHOICES[0][0],
),
)
| 31.231405 | 78 | 0.576078 | from __future__ import absolute_import, unicode_literals
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.translation import ugettext_lazy as _
from feincms.admin.item_editor import FeinCMSInline
from feincms._internal import ct_render_to_string
try:
from filer.fields.file import FilerFileField
from filer.fields.image import FilerImageField
except ImportError:
__all__ = ()
else:
__all__ = (
'MediaFileContentInline', 'ContentWithFilerFile',
'FilerFileContent', 'FilerImageContent',
)
class MediaFileContentInline(FeinCMSInline):
radio_fields = {'type': admin.VERTICAL}
class ContentWithFilerFile(models.Model):
"""
File content
"""
feincms_item_editor_inline = MediaFileContentInline
class Meta:
abstract = True
def render(self, **kwargs):
return ct_render_to_string(
[
'content/filer/%s_%s.html' % (self.file_type, self.type),
'content/filer/%s.html' % self.type,
'content/filer/%s.html' % self.file_type,
'content/filer/default.html',
],
{'content': self},
request=kwargs.get('request'),
context=kwargs.get('context'),
)
class FilerFileContent(ContentWithFilerFile):
mediafile = FilerFileField(verbose_name=_('file'), related_name='+')
file_type = 'file'
type = 'download'
class Meta:
abstract = True
verbose_name = _('file')
verbose_name_plural = _('files')
class FilerImageContent(ContentWithFilerFile):
"""
Create a media file content as follows::
from feincms.contents import FilerImageContent
Page.create_content_type(FilerImageContent, TYPE_CHOICES=(
('inline', _('Default')),
('lightbox', _('Lightbox')),
('whatever', _('Whatever')),
))
For a media file of type 'image' and type 'lightbox', the following
templates are tried in order:
* content/mediafile/image_lightbox.html
* content/mediafile/lightbox.html
* content/mediafile/image.html
* content/mediafile/default.html
The context contains ``content`` and ``request`` (if available).
The content.mediafile attribute are as follows (selection):
label, description, default_caption, default_alt_text,
author, must_always_publish_author_credit,
must_always_publish_copyright, date_taken, file, id, is_public, url
"""
mediafile = FilerImageField(verbose_name=_('image'), related_name='+')
caption = models.CharField(
_('caption'),
max_length=1000,
blank=True,
)
url = models.CharField(
_('URL'),
max_length=1000,
blank=True,
)
file_type = 'image'
class Meta:
abstract = True
verbose_name = _('image')
verbose_name_plural = _('images')
@classmethod
def initialize_type(cls, TYPE_CHOICES=None):
if TYPE_CHOICES is None:
raise ImproperlyConfigured(
'You have to set TYPE_CHOICES when'
' creating a %s' % cls.__name__)
cls.add_to_class(
'type',
models.CharField(
_('type'),
max_length=20,
choices=TYPE_CHOICES,
default=TYPE_CHOICES[0][0],
),
)
| true | true |
f728f1fe99a294562108e7f14024099bb7779f8d | 10,345 | py | Python | src/TypingExercise.py | mruiz42/zhong-wen-trainer-master | b565c40185d7184515be335d848a653cc7f86b51 | [
"Unlicense"
] | 2 | 2021-02-16T11:09:17.000Z | 2021-12-17T13:50:40.000Z | src/TypingExercise.py | mruiz42/zhong-wen-trainer-master | b565c40185d7184515be335d848a653cc7f86b51 | [
"Unlicense"
] | 4 | 2019-05-18T02:16:02.000Z | 2021-12-17T14:58:43.000Z | src/TypingExercise.py | mruiz42/zhong-wen-trainer-master | b565c40185d7184515be335d848a653cc7f86b51 | [
"Unlicense"
] | null | null | null | from src.StarDelegate import *
from src.driverUi.callReview import *
from datetime import *
from src.utilities.SqlTools import *
from PyQt5.QtGui import QStandardItemModel, QStandardItem
class TypingExercise():
def __init__(self, main_window, word_deck, parent=None):
super(TypingExercise, self).__init__()
self.win = main_window
self.wordDeck = word_deck
self.model = QStandardItemModel() # Model used for displaying review session test_data
self.missedWordSet = set() # Set used to keep track of all missed words
self.reviewSet = set()
self.startTime = datetime.datetime.now()
print("Started session at: ", self.startTime)
def resetUi(self):
#self.mainWindow.ui.progressBar_typing.reset()
self.setStatLabels()
self.win.ui.pushButton_enter.setEnabled(True)
self.win.ui.pushButton_enter.clicked.connect(self.submitAnswer)
self.win.ui.pushButton_notSure_Skip.clicked.connect(self.nextWord)
self.win.ui.pushButton_notSure_Skip.hide()
self.win.ui.lineEdit_answer.textEdited['QString'].connect(lambda: self.win.ui.pushButton_enter.setText("Enter"))
def setStatLabels(self):
cn = self.wordDeck.cardNum
timesCorrect = self.wordDeck.studyList[cn].timesCorrect
timesMissed = str(self.wordDeck.studyList[cn].timesAttempted - timesCorrect)
self.win.ui.label_typingCorrect.setText("Times Correct: " + str(timesCorrect))
self.win.ui.label_typingMissed.setText("Times Missed: " + str(timesMissed))
def buildAnswerList(self, input_str):
'''This function will build a list of answers from the database string'''
tempAnsList = []
ansList = []
# Check if there are multiple definitions separated by semicolon
if ';' in input_str:
tempAnsList = input_str.split(';')
# If not, add the single string to a list
else:
tempAnsList.append(input_str)
for ans in tempAnsList:
# if '(' in ans:
# # optionalAns = ans.replace(")", "").split("(")
# # ansList.append(self.sanitizeInput(optionalAns[0]))
# # ansList.append(self.sanitizeInput(optionalAns[1]))
pos = ans.find('(')
if pos != -1:
ans = ans[pos:]
a = self.sanitizeInput(ans)
if len(a) > 0 and a != " ":
ansList.append(a)
return ansList
def compareAnswerLists(self, user_input_list:list, answer_list:list):
for u in user_input_list:
if u in answer_list:
return True
return False
def submitAnswer(self):
currWordData = self.wordDeck.studyList[self.wordDeck.cardNum]
userInput = self.win.ui.lineEdit_answer.text()
answer = self.wordDeck.studyList[self.wordDeck.cardNum].definition
userInputList = self.buildAnswerList(userInput)
answerList = self.buildAnswerList(answer)
print("user input:",userInputList, " answer:", answerList)
# Check answer list against user input list
if self.compareAnswerLists(userInputList, answerList):
self.proceedUi()
print("Current word test_data: ", currWordData)
self.reviewSet.add(currWordData)
else:
self.pauseUi(answerList)
print("Current word test_data: ", currWordData)
self.missedWordSet.add(currWordData)
def pauseUi(self, answer_list:list):
cn = self.wordDeck.cardNum
self.wordDeck.studyList[cn].timesAttempted += 1
self.setStatLabels()
self.wordDeck.missedWordList.append(self.wordDeck.studyList[cn]) # Add to incorrect list
# how to prevent attempted from increasing on "enter it correctly" call
self.win.ui.lineEdit_answer.textChanged.connect(lambda: self.unpauseUi(answer_list))
percentLabelStr = self.wordDeck.calcPercentageCorrect()
answerLabelStr = self.wordDeck.studyList[cn].definition
self.win.ui.label_typingFractionCorrect.setText("%" + str(percentLabelStr))
#self.mainWindow.ui.pushButton_enter.setText("Enter")
self.win.ui.pushButton_notSure_Skip.show()
self.win.ui.lineEdit_answer.clear()
self.win.ui.label_typingWord.setText("Oops! Correct answer is:\n" + answerLabelStr)
self.win.ui.pushButton_notSure_Skip.setText("I was right")
self.win.ui.pushButton_notSure_Skip.clicked.disconnect()
self.win.ui.pushButton_notSure_Skip.clicked.connect(self.wasRight)
self.win.ui.lineEdit_answer.setPlaceholderText("Enter the correct answer")
def wasRight(self):
cn = self.wordDeck.cardNum
print(self.wordDeck.studyList[cn])
self.wordDeck.studyList[cn].timesAttempted -= 1
print(self.wordDeck.studyList[cn])
self.nextWord()
def unpauseUi(self, answer_list:str):
userInputList = self.sanitizeInput(self.win.ui.lineEdit_answer.text())
if userInputList in answer_list:
print("ui unpauesd")
self.win.ui.pushButton_enter.setText("Enter")
self.win.ui.pushButton_enter.setEnabled(True)
# self.mainWindow.ui.lineEdit_answer.textChanged.disconnect()
self.win.ui.pushButton_enter.clicked.disconnect()
self.win.ui.pushButton_enter.clicked.connect(self.nextWord)
else:
self.win.ui.pushButton_enter.setEnabled(False)
def proceedUi(self):
cn = self.wordDeck.cardNum
answer = self.wordDeck.studyList[cn].definition
self.wordDeck.studyList[cn].timesCorrect += 1
self.wordDeck.studyList[cn].timesAttempted += 1
self.setStatLabels()
percent = self.wordDeck.calcPercentageCorrect()
self.win.ui.lineEdit_answer.clear()
self.win.ui.label_typingFractionCorrect.setText("%" + str(percent))
self.win.ui.label_typingWord.setText("Correct!\n" + answer)
self.win.ui.pushButton_enter.setText("Continue")
self.win.ui.lineEdit_answer.setPlaceholderText("Press Enter to continue")
self.win.ui.lineEdit_answer.setDisabled(True)
self.win.ui.pushButton_enter.clicked.disconnect()
self.win.ui.pushButton_enter.clicked.connect(self.nextWord)
def intermission(self):
''' This function will serve as a breakpoint in the typing module where the user can review their progress
and the program will make a call to save session test_data into the database. '''
review = ReviewDialog(self.win)
combinedSet = self.missedWordSet | self.reviewSet
self.model.clear()
for i in combinedSet:
# IS_STARRED, WORD, DEFINITION, PRONUN, TC, TM
cardNum = QStandardItem(str(i.cardNum))
timesMissed = i.timesAttempted - i.timesCorrect
isStarred = QStandardItem(str(i.isStarred))
#isStarred.itemChanged.connect(lambda: self.changeStar(isStarred))
vocabulary = QStandardItem(i.vocabulary)
definition = QStandardItem(i.definition)
pronunciation = QStandardItem(i.pronunciation)
timesCorrect = QStandardItem(str(i.timesCorrect))
timesMissed = QStandardItem(str(timesMissed))
row = [cardNum, isStarred, vocabulary, definition, pronunciation, timesCorrect, timesMissed]
print(i.timesAttempted, i.timesCorrect)
self.model.appendRow(row)
self.n = StarDelegate(review.rd .tableView)
headers = ["#", "⭐", "Vocabulary", "Definition", "Pronunciation", "# Correct", "# Missed"]
self.model.sort(5)
review.rd.tableView.setItemDelegateForColumn(1, self.n)
self.model.setHorizontalHeaderLabels(headers)
review.rd.tableView.setColumnWidth(0, 40)
review.rd.tableView.setModel(self.model)
review.rd.tableView.sortByColumn(5)
review.show()
def changeStar(self, index, isStarred:QStandardItem):
if isStarred.text() == 0:
isStarred.setText(1)
elif isStarred.text() == 1:
isStarred.setText(0)
def updateSession(self):
pass
# Read from model
# for n in range (0, len(self.model.rowCount)):
# if self.model.item(n, 1).text() != :
DATABASE_PATH = '../test_data/vocab2.db'
database = SqlTools(self.DATABASE_PATH)
combinedSet = self.missedWordSet | self.reviewSet
rows = (self.startTime, self.win.nameOfCurrentDeck)
database.insertSession(rows)
database.close()
def nextWord(self):
self.win.ui.progressBar_typing.setValue(self.wordDeck.cardNum + 1)
self.win.ui.label_typingFractionCorrect.clear()
print(self.wordDeck.cardNum+1, " of ", len(self.wordDeck.studyList))
if self.wordDeck.cardNum == len(self.wordDeck.studyList):
print("END GAME")
return
elif self.wordDeck.cardNum in self.wordDeck.summaryIndexList:
print("OVERVIEW")
print("Here are the words most commonly missed:")
self.intermission()
for i in self.missedWordSet:
timesMissed = i.timesAttempted - i.timesCorrect
print(i, timesMissed)
self.wordDeck.cardNum += 1
self.setStatLabels()
self.win.ui.lineEdit_answer.setEnabled(True)
self.win.ui.pushButton_notSure_Skip.hide()
self.win.ui.lineEdit_answer.setFocus()
self.win.ui.lineEdit_answer.clear()
self.win.ui.lineEdit_answer.setPlaceholderText("Enter your answer")
self.win.ui.pushButton_enter.setText("Don't Know")
self.win.ui.label_typingWord.setText(self.wordDeck.studyList[self.wordDeck.cardNum].vocabulary)
self.win.ui.pushButton_enter.clicked.disconnect()
self.win.ui.pushButton_enter.clicked.connect(self.submitAnswer)
try:
self.win.ui.lineEdit_answer.textChanged.disconnect()
except RuntimeError:
print("didnt have connection?")
self.win.ui.pushButton_enter.setEnabled(True)
def sanitizeInput(self, input_str:str):
punct = '!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~ '
return input_str.translate({ord(ch): '' for ch in punct}).lower()
| 45.174672 | 120 | 0.652779 | from src.StarDelegate import *
from src.driverUi.callReview import *
from datetime import *
from src.utilities.SqlTools import *
from PyQt5.QtGui import QStandardItemModel, QStandardItem
class TypingExercise():
def __init__(self, main_window, word_deck, parent=None):
super(TypingExercise, self).__init__()
self.win = main_window
self.wordDeck = word_deck
self.model = QStandardItemModel()
self.missedWordSet = set()
self.reviewSet = set()
self.startTime = datetime.datetime.now()
print("Started session at: ", self.startTime)
def resetUi(self):
self.setStatLabels()
self.win.ui.pushButton_enter.setEnabled(True)
self.win.ui.pushButton_enter.clicked.connect(self.submitAnswer)
self.win.ui.pushButton_notSure_Skip.clicked.connect(self.nextWord)
self.win.ui.pushButton_notSure_Skip.hide()
self.win.ui.lineEdit_answer.textEdited['QString'].connect(lambda: self.win.ui.pushButton_enter.setText("Enter"))
def setStatLabels(self):
cn = self.wordDeck.cardNum
timesCorrect = self.wordDeck.studyList[cn].timesCorrect
timesMissed = str(self.wordDeck.studyList[cn].timesAttempted - timesCorrect)
self.win.ui.label_typingCorrect.setText("Times Correct: " + str(timesCorrect))
self.win.ui.label_typingMissed.setText("Times Missed: " + str(timesMissed))
def buildAnswerList(self, input_str):
tempAnsList = []
ansList = []
if ';' in input_str:
tempAnsList = input_str.split(';')
else:
tempAnsList.append(input_str)
for ans in tempAnsList:
(ans)
if len(a) > 0 and a != " ":
ansList.append(a)
return ansList
def compareAnswerLists(self, user_input_list:list, answer_list:list):
for u in user_input_list:
if u in answer_list:
return True
return False
def submitAnswer(self):
currWordData = self.wordDeck.studyList[self.wordDeck.cardNum]
userInput = self.win.ui.lineEdit_answer.text()
answer = self.wordDeck.studyList[self.wordDeck.cardNum].definition
userInputList = self.buildAnswerList(userInput)
answerList = self.buildAnswerList(answer)
print("user input:",userInputList, " answer:", answerList)
if self.compareAnswerLists(userInputList, answerList):
self.proceedUi()
print("Current word test_data: ", currWordData)
self.reviewSet.add(currWordData)
else:
self.pauseUi(answerList)
print("Current word test_data: ", currWordData)
self.missedWordSet.add(currWordData)
def pauseUi(self, answer_list:list):
cn = self.wordDeck.cardNum
self.wordDeck.studyList[cn].timesAttempted += 1
self.setStatLabels()
self.wordDeck.missedWordList.append(self.wordDeck.studyList[cn])
self.win.ui.lineEdit_answer.textChanged.connect(lambda: self.unpauseUi(answer_list))
percentLabelStr = self.wordDeck.calcPercentageCorrect()
answerLabelStr = self.wordDeck.studyList[cn].definition
self.win.ui.label_typingFractionCorrect.setText("%" + str(percentLabelStr))
self.win.ui.pushButton_notSure_Skip.show()
self.win.ui.lineEdit_answer.clear()
self.win.ui.label_typingWord.setText("Oops! Correct answer is:\n" + answerLabelStr)
self.win.ui.pushButton_notSure_Skip.setText("I was right")
self.win.ui.pushButton_notSure_Skip.clicked.disconnect()
self.win.ui.pushButton_notSure_Skip.clicked.connect(self.wasRight)
self.win.ui.lineEdit_answer.setPlaceholderText("Enter the correct answer")
def wasRight(self):
cn = self.wordDeck.cardNum
print(self.wordDeck.studyList[cn])
self.wordDeck.studyList[cn].timesAttempted -= 1
print(self.wordDeck.studyList[cn])
self.nextWord()
def unpauseUi(self, answer_list:str):
userInputList = self.sanitizeInput(self.win.ui.lineEdit_answer.text())
if userInputList in answer_list:
print("ui unpauesd")
self.win.ui.pushButton_enter.setText("Enter")
self.win.ui.pushButton_enter.setEnabled(True)
self.win.ui.pushButton_enter.clicked.disconnect()
self.win.ui.pushButton_enter.clicked.connect(self.nextWord)
else:
self.win.ui.pushButton_enter.setEnabled(False)
def proceedUi(self):
cn = self.wordDeck.cardNum
answer = self.wordDeck.studyList[cn].definition
self.wordDeck.studyList[cn].timesCorrect += 1
self.wordDeck.studyList[cn].timesAttempted += 1
self.setStatLabels()
percent = self.wordDeck.calcPercentageCorrect()
self.win.ui.lineEdit_answer.clear()
self.win.ui.label_typingFractionCorrect.setText("%" + str(percent))
self.win.ui.label_typingWord.setText("Correct!\n" + answer)
self.win.ui.pushButton_enter.setText("Continue")
self.win.ui.lineEdit_answer.setPlaceholderText("Press Enter to continue")
self.win.ui.lineEdit_answer.setDisabled(True)
self.win.ui.pushButton_enter.clicked.disconnect()
self.win.ui.pushButton_enter.clicked.connect(self.nextWord)
def intermission(self):
review = ReviewDialog(self.win)
combinedSet = self.missedWordSet | self.reviewSet
self.model.clear()
for i in combinedSet:
cardNum = QStandardItem(str(i.cardNum))
timesMissed = i.timesAttempted - i.timesCorrect
isStarred = QStandardItem(str(i.isStarred))
vocabulary = QStandardItem(i.vocabulary)
definition = QStandardItem(i.definition)
pronunciation = QStandardItem(i.pronunciation)
timesCorrect = QStandardItem(str(i.timesCorrect))
timesMissed = QStandardItem(str(timesMissed))
row = [cardNum, isStarred, vocabulary, definition, pronunciation, timesCorrect, timesMissed]
print(i.timesAttempted, i.timesCorrect)
self.model.appendRow(row)
self.n = StarDelegate(review.rd .tableView)
headers = ["#", "⭐", "Vocabulary", "Definition", "Pronunciation", "# Correct", "# Missed"]
self.model.sort(5)
review.rd.tableView.setItemDelegateForColumn(1, self.n)
self.model.setHorizontalHeaderLabels(headers)
review.rd.tableView.setColumnWidth(0, 40)
review.rd.tableView.setModel(self.model)
review.rd.tableView.sortByColumn(5)
review.show()
def changeStar(self, index, isStarred:QStandardItem):
if isStarred.text() == 0:
isStarred.setText(1)
elif isStarred.text() == 1:
isStarred.setText(0)
def updateSession(self):
pass
DATABASE_PATH = '../test_data/vocab2.db'
database = SqlTools(self.DATABASE_PATH)
combinedSet = self.missedWordSet | self.reviewSet
rows = (self.startTime, self.win.nameOfCurrentDeck)
database.insertSession(rows)
database.close()
def nextWord(self):
self.win.ui.progressBar_typing.setValue(self.wordDeck.cardNum + 1)
self.win.ui.label_typingFractionCorrect.clear()
print(self.wordDeck.cardNum+1, " of ", len(self.wordDeck.studyList))
if self.wordDeck.cardNum == len(self.wordDeck.studyList):
print("END GAME")
return
elif self.wordDeck.cardNum in self.wordDeck.summaryIndexList:
print("OVERVIEW")
print("Here are the words most commonly missed:")
self.intermission()
for i in self.missedWordSet:
timesMissed = i.timesAttempted - i.timesCorrect
print(i, timesMissed)
self.wordDeck.cardNum += 1
self.setStatLabels()
self.win.ui.lineEdit_answer.setEnabled(True)
self.win.ui.pushButton_notSure_Skip.hide()
self.win.ui.lineEdit_answer.setFocus()
self.win.ui.lineEdit_answer.clear()
self.win.ui.lineEdit_answer.setPlaceholderText("Enter your answer")
self.win.ui.pushButton_enter.setText("Don't Know")
self.win.ui.label_typingWord.setText(self.wordDeck.studyList[self.wordDeck.cardNum].vocabulary)
self.win.ui.pushButton_enter.clicked.disconnect()
self.win.ui.pushButton_enter.clicked.connect(self.submitAnswer)
try:
self.win.ui.lineEdit_answer.textChanged.disconnect()
except RuntimeError:
print("didnt have connection?")
self.win.ui.pushButton_enter.setEnabled(True)
def sanitizeInput(self, input_str:str):
punct = '!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~ '
return input_str.translate({ord(ch): '' for ch in punct}).lower()
| true | true |
f728f20365e4e5bd9201c8a3f0be78904dc5cadc | 12,097 | py | Python | kifurushi/packet.py | lewoudar/kifurushi | 996e631933f55a96dd7a23360b693731414bd944 | [
"Apache-2.0"
] | null | null | null | kifurushi/packet.py | lewoudar/kifurushi | 996e631933f55a96dd7a23360b693731414bd944 | [
"Apache-2.0"
] | 4 | 2021-08-18T16:07:47.000Z | 2021-12-08T17:21:50.000Z | kifurushi/packet.py | lewoudar/kifurushi | 996e631933f55a96dd7a23360b693731414bd944 | [
"Apache-2.0"
] | null | null | null | """This module defines the base Packet class and helper functions."""
import enum
import inspect
from copy import copy
from typing import Iterable, Dict, Union, Any, Callable, List, Type
from kifurushi.utils.network import hexdump
from .abc import Field, CommonField
from .fields import BitsField, FieldPart, ConditionalField
class Packet:
__fields__: Iterable[Field] = None
def __init__(self, **kwargs):
self._fields = [field.clone() for field in self.__fields__]
self._field_mapping = self._create_field_mapping(self._fields)
self._check_arguments(kwargs)
value_mapping = {**self._get_default_values(), **kwargs}
for name, value in value_mapping.items():
setattr(self, name, value)
@staticmethod
def _create_field_mapping(fields: List[Field]) -> Dict[str, Field]:
error_message = 'you already have a field with name {name}'
field_mapping = {}
for field in fields:
if isinstance(field, BitsField):
for field_part in field.parts:
if field_part.name in field_mapping:
raise AttributeError(error_message.format(name=field_part.name))
field_mapping[field_part.name] = field
else:
if field.name in field_mapping:
raise AttributeError(error_message.format(name=field.name))
field_mapping[field.name] = field
return field_mapping
def _check_arguments(self, arguments: Dict[str, Any]) -> None:
for argument in arguments:
if argument not in self._field_mapping:
raise AttributeError(f'there is no attribute with name {argument}')
def _get_default_values(self) -> Dict[str, Union[str, int]]:
result = {}
for name, field in self._field_mapping.items():
if isinstance(field, BitsField):
result[name] = field[name].value
else:
result[name] = field.value
return result
@staticmethod
def _set_packet_attributes(packet: 'Packet') -> None:
for name, field in packet._field_mapping.items():
value = field[name].value if isinstance(field, BitsField) else field.value
setattr(packet, name, value)
@staticmethod
def _set_packet_attribute(field: Field, packet: 'Packet') -> None:
if isinstance(field, BitsField):
for field_part in field.parts:
setattr(packet, field_part.name, field_part.value)
else:
setattr(packet, field.name, field.value)
@classmethod
def from_bytes(cls, data: bytes) -> 'Packet':
"""
Creates a packet from bytes and returns it.
**Parameters:**
**data:** The raw bytes used to construct a packet object.
"""
packet = cls()
for field in packet._fields:
data = field.compute_value(data, packet)
# we need to set directly the field after it is parsed, so that next fields depending on
# previous fields can check whether or not they need to parse data
cls._set_packet_attribute(field, packet)
return packet
@classmethod
def random_packet(cls) -> 'Packet':
"""Returns a packet with fields having random values."""
packet = cls()
for field in packet._fields:
field.value = field.random_value()
cls._set_packet_attributes(packet)
return packet
@property
def fields(self) -> List[Field]:
"""Returns a copy of the list of fields composing the packet object."""
return [field.clone() for field in self._fields]
@staticmethod
def _set_enum_field(
field: Union[CommonField, FieldPart],
value: Union[int, str, enum.Enum],
set_attr: Callable[[str, Any], None]
) -> None:
if isinstance(value, str):
found = False
if field.enumeration is not None:
for key, name in field.enumeration.items():
if name == value:
field.value = key
found = True
break
if not found:
raise ValueError(f'{field.name} has no value represented by {value}')
elif isinstance(value, enum.Enum):
field.value = value.value
else:
field.value = value
set_attr(field.name, field.value)
def __setattr__(self, name: str, value: Union[int, str, enum.Enum]):
super_set_attr = super().__setattr__
# if name does not represent a field, we directly call the parent __setattr__ method without
# further processing
if name in ['_fields', '_field_mapping'] or name not in self._field_mapping:
super_set_attr(name, value)
return
field = self._field_mapping[name]
if isinstance(field, BitsField):
self._set_enum_field(field[name], value, super_set_attr)
elif isinstance(field, CommonField) and hasattr(field, '_enumeration'):
self._set_enum_field(field, value, super_set_attr)
else:
field.value = value
super_set_attr(name, value)
@property
def raw(self) -> bytes:
"""Returns bytes corresponding to what will be sent on the network."""
return b''.join(field.raw(self) for field in self._fields)
def __bytes__(self):
return self.raw
@property
def hexdump(self) -> str:
"""Returns tcpdump / wireshark like hexadecimal view of the packet."""
return hexdump(self.raw)
@property
def all_fields_are_computed(self) -> bool:
"""Returns True if all packet fields have been computed using from_bytes class method, False otherwise."""
for field in self._fields:
if not field.value_was_computed:
if isinstance(field, ConditionalField) and not field.condition(self):
continue
else:
return False
return True
def __eq__(self, other: Any):
if not isinstance(other, self.__class__):
raise NotImplementedError
return self.raw == other.raw
def __ne__(self, other: Any):
return not self.__eq__(other)
def clone(self) -> 'Packet':
"""Returns a copy of the packet."""
cloned_packet = copy(self)
cloned_fields = self.fields
cloned_packet._fields = cloned_fields
cloned_packet._field_mapping = self._create_field_mapping(cloned_fields)
return cloned_packet
def evolve(self, **kwargs) -> 'Packet':
"""
Returns a new packet with attributes updated by arguments passed as input.
**Parameters:**
**kwargs:** keyword-only arguments representing packet attributes with the value to set on them.
"""
self._check_arguments(kwargs)
cloned_packet = self.clone()
for field, value in kwargs.items():
setattr(cloned_packet, field, value)
return cloned_packet
def __repr__(self):
representation = f'<{self.__class__.__name__}: '
template = '{name}={value}, '
for field in self._fields:
# we don't represent fields where condition is not true
if isinstance(field, ConditionalField) and not field.condition(self):
continue
if isinstance(field, BitsField):
for field_part in field.parts:
value = hex(field_part.value) if field_part.hex else field_part.value
representation += template.format(name=field_part.name, value=value)
else:
value = hex(field.value) if (hasattr(field, 'hex') and field.hex) else field.value
representation += template.format(name=field.name, value=value)
representation = representation[:-2]
return f'{representation}>'
def show(self) -> None:
"""Prints a clarified state of packet with type, current and default values of every field."""
representation = ''
template = '{name} : {type} = {value} ({default})\n'
names = sorted(self._field_mapping.keys(), key=len)
max_length = len(names[-1])
for field in self._fields:
# we don't represent fields where condition is not true
if isinstance(field, ConditionalField) and not field.condition(self):
continue
if isinstance(field, ConditionalField):
class_name = getattr(field, '_field').__class__.__name__
else:
class_name = field.__class__.__name__
if isinstance(field, BitsField):
for field_part in field.parts:
value = hex(field_part.value) if field_part.hex else field_part.value
default = hex(field_part.default) if field_part.hex else field_part.default
representation += template.format(
name=field_part.name.ljust(max_length),
type=f'{field_part.__class__.__name__} of {class_name}',
value=value,
default=default
)
else:
value = hex(field.value) if field.hex else field.value
default = hex(field.default) if field.hex else field.default
representation += template.format(
name=field.name.ljust(max_length), type=class_name, value=value, default=default
)
print(representation, end='')
def create_packet_class(name: str, fields: Iterable[Field]) -> type(Packet):
"""
Creates and returns a packet class.
**Parameters:**
**name:** The name of the class to create.
**fields:** An iterable of fields that compose the packet.
"""
if not isinstance(name, str):
raise TypeError(f'class name must be a string but you provided {name}')
if not fields:
raise ValueError('the list of fields must not be empty')
for field in fields:
if not isinstance(field, Field):
raise TypeError(f'each item in the list must be a Field object but you provided {field}')
return type(name, (Packet,), {'__fields__': fields})
def extract_layers(data: bytes, *args: Type[Packet]) -> List[Packet]:
"""
Extract various packet from raw binary data.
For example, imagine you want to send an [ICMP](https://en.wikipedia.org/wiki/Internet_Control_Message_Protocol)
ping request. You will probably need to create two Packet classes, ICMP and IP and send the sum of them over
the network i.e something like `socket.sendto(ICMP(...).raw + IP(...).raw, address)`.
Now you want to get the ICMP reply, how to get it? You can't use `ICMP.from_bytes` because you will have IP and
ICMP returned all at once! The solution is to use *extract_layers* with code like the following:
`icmp, ip = extract_layers(socket.recvfrom(1024)[0], ICMP, IP)`
** Parameters: **
* **data:** The raw bytes to parse.
* **args:** A list of Packet layers used to reconstruct the expected layers. You must provide at least one
class, if not, you will get an error.
"""
if not isinstance(data, bytes):
raise TypeError(f'data must be bytes but you provided {data}')
if not args:
raise ValueError('you must provide at least one Packet subclass to use for layer extraction')
for packet_class in args:
if not inspect.isclass(packet_class) or not issubclass(packet_class, Packet):
raise TypeError(
f'all arguments following the given data must be subclasses'
f' of Packet class but you provided {packet_class}'
)
packets = []
cursor = 0
for packet_class in args:
packet = packet_class.from_bytes(data[cursor:])
cursor = len(packet.raw)
packets.append(packet)
return packets
| 38.160883 | 116 | 0.614698 | import enum
import inspect
from copy import copy
from typing import Iterable, Dict, Union, Any, Callable, List, Type
from kifurushi.utils.network import hexdump
from .abc import Field, CommonField
from .fields import BitsField, FieldPart, ConditionalField
class Packet:
__fields__: Iterable[Field] = None
def __init__(self, **kwargs):
self._fields = [field.clone() for field in self.__fields__]
self._field_mapping = self._create_field_mapping(self._fields)
self._check_arguments(kwargs)
value_mapping = {**self._get_default_values(), **kwargs}
for name, value in value_mapping.items():
setattr(self, name, value)
@staticmethod
def _create_field_mapping(fields: List[Field]) -> Dict[str, Field]:
error_message = 'you already have a field with name {name}'
field_mapping = {}
for field in fields:
if isinstance(field, BitsField):
for field_part in field.parts:
if field_part.name in field_mapping:
raise AttributeError(error_message.format(name=field_part.name))
field_mapping[field_part.name] = field
else:
if field.name in field_mapping:
raise AttributeError(error_message.format(name=field.name))
field_mapping[field.name] = field
return field_mapping
def _check_arguments(self, arguments: Dict[str, Any]) -> None:
for argument in arguments:
if argument not in self._field_mapping:
raise AttributeError(f'there is no attribute with name {argument}')
def _get_default_values(self) -> Dict[str, Union[str, int]]:
result = {}
for name, field in self._field_mapping.items():
if isinstance(field, BitsField):
result[name] = field[name].value
else:
result[name] = field.value
return result
@staticmethod
def _set_packet_attributes(packet: 'Packet') -> None:
for name, field in packet._field_mapping.items():
value = field[name].value if isinstance(field, BitsField) else field.value
setattr(packet, name, value)
@staticmethod
def _set_packet_attribute(field: Field, packet: 'Packet') -> None:
if isinstance(field, BitsField):
for field_part in field.parts:
setattr(packet, field_part.name, field_part.value)
else:
setattr(packet, field.name, field.value)
@classmethod
def from_bytes(cls, data: bytes) -> 'Packet':
packet = cls()
for field in packet._fields:
data = field.compute_value(data, packet)
cls._set_packet_attribute(field, packet)
return packet
@classmethod
def random_packet(cls) -> 'Packet':
packet = cls()
for field in packet._fields:
field.value = field.random_value()
cls._set_packet_attributes(packet)
return packet
@property
def fields(self) -> List[Field]:
return [field.clone() for field in self._fields]
@staticmethod
def _set_enum_field(
field: Union[CommonField, FieldPart],
value: Union[int, str, enum.Enum],
set_attr: Callable[[str, Any], None]
) -> None:
if isinstance(value, str):
found = False
if field.enumeration is not None:
for key, name in field.enumeration.items():
if name == value:
field.value = key
found = True
break
if not found:
raise ValueError(f'{field.name} has no value represented by {value}')
elif isinstance(value, enum.Enum):
field.value = value.value
else:
field.value = value
set_attr(field.name, field.value)
def __setattr__(self, name: str, value: Union[int, str, enum.Enum]):
super_set_attr = super().__setattr__
if name in ['_fields', '_field_mapping'] or name not in self._field_mapping:
super_set_attr(name, value)
return
field = self._field_mapping[name]
if isinstance(field, BitsField):
self._set_enum_field(field[name], value, super_set_attr)
elif isinstance(field, CommonField) and hasattr(field, '_enumeration'):
self._set_enum_field(field, value, super_set_attr)
else:
field.value = value
super_set_attr(name, value)
@property
def raw(self) -> bytes:
return b''.join(field.raw(self) for field in self._fields)
def __bytes__(self):
return self.raw
@property
def hexdump(self) -> str:
return hexdump(self.raw)
@property
def all_fields_are_computed(self) -> bool:
for field in self._fields:
if not field.value_was_computed:
if isinstance(field, ConditionalField) and not field.condition(self):
continue
else:
return False
return True
def __eq__(self, other: Any):
if not isinstance(other, self.__class__):
raise NotImplementedError
return self.raw == other.raw
def __ne__(self, other: Any):
return not self.__eq__(other)
def clone(self) -> 'Packet':
cloned_packet = copy(self)
cloned_fields = self.fields
cloned_packet._fields = cloned_fields
cloned_packet._field_mapping = self._create_field_mapping(cloned_fields)
return cloned_packet
def evolve(self, **kwargs) -> 'Packet':
self._check_arguments(kwargs)
cloned_packet = self.clone()
for field, value in kwargs.items():
setattr(cloned_packet, field, value)
return cloned_packet
def __repr__(self):
representation = f'<{self.__class__.__name__}: '
template = '{name}={value}, '
for field in self._fields:
if isinstance(field, ConditionalField) and not field.condition(self):
continue
if isinstance(field, BitsField):
for field_part in field.parts:
value = hex(field_part.value) if field_part.hex else field_part.value
representation += template.format(name=field_part.name, value=value)
else:
value = hex(field.value) if (hasattr(field, 'hex') and field.hex) else field.value
representation += template.format(name=field.name, value=value)
representation = representation[:-2]
return f'{representation}>'
def show(self) -> None:
representation = ''
template = '{name} : {type} = {value} ({default})\n'
names = sorted(self._field_mapping.keys(), key=len)
max_length = len(names[-1])
for field in self._fields:
# we don't represent fields where condition is not true
if isinstance(field, ConditionalField) and not field.condition(self):
continue
if isinstance(field, ConditionalField):
class_name = getattr(field, '_field').__class__.__name__
else:
class_name = field.__class__.__name__
if isinstance(field, BitsField):
for field_part in field.parts:
value = hex(field_part.value) if field_part.hex else field_part.value
default = hex(field_part.default) if field_part.hex else field_part.default
representation += template.format(
name=field_part.name.ljust(max_length),
type=f'{field_part.__class__.__name__} of {class_name}',
value=value,
default=default
)
else:
value = hex(field.value) if field.hex else field.value
default = hex(field.default) if field.hex else field.default
representation += template.format(
name=field.name.ljust(max_length), type=class_name, value=value, default=default
)
print(representation, end='')
def create_packet_class(name: str, fields: Iterable[Field]) -> type(Packet):
if not isinstance(name, str):
raise TypeError(f'class name must be a string but you provided {name}')
if not fields:
raise ValueError('the list of fields must not be empty')
for field in fields:
if not isinstance(field, Field):
raise TypeError(f'each item in the list must be a Field object but you provided {field}')
return type(name, (Packet,), {'__fields__': fields})
def extract_layers(data: bytes, *args: Type[Packet]) -> List[Packet]:
if not isinstance(data, bytes):
raise TypeError(f'data must be bytes but you provided {data}')
if not args:
raise ValueError('you must provide at least one Packet subclass to use for layer extraction')
for packet_class in args:
if not inspect.isclass(packet_class) or not issubclass(packet_class, Packet):
raise TypeError(
f'all arguments following the given data must be subclasses'
f' of Packet class but you provided {packet_class}'
)
packets = []
cursor = 0
for packet_class in args:
packet = packet_class.from_bytes(data[cursor:])
cursor = len(packet.raw)
packets.append(packet)
return packets
| true | true |
f728f2be183bdd9218b7e27cbee79e9189e0e109 | 379 | py | Python | HLTrigger/Configuration/python/HLT_75e33/modules/hltEgammaClusterShapeL1Seeded_cfi.py | PKUfudawei/cmssw | 8fbb5ce74398269c8a32956d7c7943766770c093 | [
"Apache-2.0"
] | 1 | 2021-11-30T16:24:46.000Z | 2021-11-30T16:24:46.000Z | HLTrigger/Configuration/python/HLT_75e33/modules/hltEgammaClusterShapeL1Seeded_cfi.py | PKUfudawei/cmssw | 8fbb5ce74398269c8a32956d7c7943766770c093 | [
"Apache-2.0"
] | 4 | 2021-11-29T13:57:56.000Z | 2022-03-29T06:28:36.000Z | HLTrigger/Configuration/python/HLT_75e33/modules/hltEgammaClusterShapeL1Seeded_cfi.py | PKUfudawei/cmssw | 8fbb5ce74398269c8a32956d7c7943766770c093 | [
"Apache-2.0"
] | 1 | 2021-11-30T16:16:05.000Z | 2021-11-30T16:16:05.000Z | import FWCore.ParameterSet.Config as cms
hltEgammaClusterShapeL1Seeded = cms.EDProducer("EgammaHLTClusterShapeProducer",
ecalRechitEB = cms.InputTag("hltRechitInRegionsECAL","EcalRecHitsEB"),
ecalRechitEE = cms.InputTag("hltRechitInRegionsECAL","EcalRecHitsEE"),
isIeta = cms.bool(True),
recoEcalCandidateProducer = cms.InputTag("hltEgammaCandidatesL1Seeded")
)
| 42.111111 | 79 | 0.796834 | import FWCore.ParameterSet.Config as cms
hltEgammaClusterShapeL1Seeded = cms.EDProducer("EgammaHLTClusterShapeProducer",
ecalRechitEB = cms.InputTag("hltRechitInRegionsECAL","EcalRecHitsEB"),
ecalRechitEE = cms.InputTag("hltRechitInRegionsECAL","EcalRecHitsEE"),
isIeta = cms.bool(True),
recoEcalCandidateProducer = cms.InputTag("hltEgammaCandidatesL1Seeded")
)
| true | true |
f728f3fe761e36ce0b53a9bcf5b8cfaff9cb2a53 | 3,907 | py | Python | src/users/models/componentsschemasmicrosoft_graph_mailfolderallof1.py | peombwa/Sample-Graph-Python-Client | 3396f531fbe6bb40a740767c4e31aee95a3b932e | [
"MIT"
] | null | null | null | src/users/models/componentsschemasmicrosoft_graph_mailfolderallof1.py | peombwa/Sample-Graph-Python-Client | 3396f531fbe6bb40a740767c4e31aee95a3b932e | [
"MIT"
] | null | null | null | src/users/models/componentsschemasmicrosoft_graph_mailfolderallof1.py | peombwa/Sample-Graph-Python-Client | 3396f531fbe6bb40a740767c4e31aee95a3b932e | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ComponentsschemasmicrosoftGraphMailfolderallof1(Model):
"""mailFolder.
:param display_name:
:type display_name: str
:param parent_folder_id:
:type parent_folder_id: str
:param child_folder_count:
:type child_folder_count: int
:param unread_item_count:
:type unread_item_count: int
:param total_item_count:
:type total_item_count: int
:param well_known_name:
:type well_known_name: str
:param single_value_extended_properties:
:type single_value_extended_properties:
list[~users.models.MicrosoftgraphsingleValueLegacyExtendedProperty]
:param multi_value_extended_properties:
:type multi_value_extended_properties:
list[~users.models.MicrosoftgraphmultiValueLegacyExtendedProperty]
:param messages:
:type messages: list[~users.models.Microsoftgraphmessage]
:param message_rules:
:type message_rules: list[~users.models.MicrosoftgraphmessageRule]
:param child_folders:
:type child_folders: list[~users.models.MicrosoftgraphmailFolder]
:param user_configurations:
:type user_configurations:
list[~users.models.MicrosoftgraphuserConfiguration]
"""
_validation = {
'child_folder_count': {'maximum': 2147483647, 'minimum': -2147483648},
'unread_item_count': {'maximum': 2147483647, 'minimum': -2147483648},
'total_item_count': {'maximum': 2147483647, 'minimum': -2147483648},
}
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'parent_folder_id': {'key': 'parentFolderId', 'type': 'str'},
'child_folder_count': {'key': 'childFolderCount', 'type': 'int'},
'unread_item_count': {'key': 'unreadItemCount', 'type': 'int'},
'total_item_count': {'key': 'totalItemCount', 'type': 'int'},
'well_known_name': {'key': 'wellKnownName', 'type': 'str'},
'single_value_extended_properties': {'key': 'singleValueExtendedProperties', 'type': '[MicrosoftgraphsingleValueLegacyExtendedProperty]'},
'multi_value_extended_properties': {'key': 'multiValueExtendedProperties', 'type': '[MicrosoftgraphmultiValueLegacyExtendedProperty]'},
'messages': {'key': 'messages', 'type': '[Microsoftgraphmessage]'},
'message_rules': {'key': 'messageRules', 'type': '[MicrosoftgraphmessageRule]'},
'child_folders': {'key': 'childFolders', 'type': '[MicrosoftgraphmailFolder]'},
'user_configurations': {'key': 'userConfigurations', 'type': '[MicrosoftgraphuserConfiguration]'},
}
def __init__(self, display_name=None, parent_folder_id=None, child_folder_count=None, unread_item_count=None, total_item_count=None, well_known_name=None, single_value_extended_properties=None, multi_value_extended_properties=None, messages=None, message_rules=None, child_folders=None, user_configurations=None):
super(ComponentsschemasmicrosoftGraphMailfolderallof1, self).__init__()
self.display_name = display_name
self.parent_folder_id = parent_folder_id
self.child_folder_count = child_folder_count
self.unread_item_count = unread_item_count
self.total_item_count = total_item_count
self.well_known_name = well_known_name
self.single_value_extended_properties = single_value_extended_properties
self.multi_value_extended_properties = multi_value_extended_properties
self.messages = messages
self.message_rules = message_rules
self.child_folders = child_folders
self.user_configurations = user_configurations
| 50.089744 | 317 | 0.699514 |
from msrest.serialization import Model
class ComponentsschemasmicrosoftGraphMailfolderallof1(Model):
_validation = {
'child_folder_count': {'maximum': 2147483647, 'minimum': -2147483648},
'unread_item_count': {'maximum': 2147483647, 'minimum': -2147483648},
'total_item_count': {'maximum': 2147483647, 'minimum': -2147483648},
}
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'parent_folder_id': {'key': 'parentFolderId', 'type': 'str'},
'child_folder_count': {'key': 'childFolderCount', 'type': 'int'},
'unread_item_count': {'key': 'unreadItemCount', 'type': 'int'},
'total_item_count': {'key': 'totalItemCount', 'type': 'int'},
'well_known_name': {'key': 'wellKnownName', 'type': 'str'},
'single_value_extended_properties': {'key': 'singleValueExtendedProperties', 'type': '[MicrosoftgraphsingleValueLegacyExtendedProperty]'},
'multi_value_extended_properties': {'key': 'multiValueExtendedProperties', 'type': '[MicrosoftgraphmultiValueLegacyExtendedProperty]'},
'messages': {'key': 'messages', 'type': '[Microsoftgraphmessage]'},
'message_rules': {'key': 'messageRules', 'type': '[MicrosoftgraphmessageRule]'},
'child_folders': {'key': 'childFolders', 'type': '[MicrosoftgraphmailFolder]'},
'user_configurations': {'key': 'userConfigurations', 'type': '[MicrosoftgraphuserConfiguration]'},
}
def __init__(self, display_name=None, parent_folder_id=None, child_folder_count=None, unread_item_count=None, total_item_count=None, well_known_name=None, single_value_extended_properties=None, multi_value_extended_properties=None, messages=None, message_rules=None, child_folders=None, user_configurations=None):
super(ComponentsschemasmicrosoftGraphMailfolderallof1, self).__init__()
self.display_name = display_name
self.parent_folder_id = parent_folder_id
self.child_folder_count = child_folder_count
self.unread_item_count = unread_item_count
self.total_item_count = total_item_count
self.well_known_name = well_known_name
self.single_value_extended_properties = single_value_extended_properties
self.multi_value_extended_properties = multi_value_extended_properties
self.messages = messages
self.message_rules = message_rules
self.child_folders = child_folders
self.user_configurations = user_configurations
| true | true |
f728f51808701cc3a00f24a9a0c6b2a2c87aa602 | 28,011 | py | Python | utils/build_swift/tests/expected_options.py | JiarenWang/swift | ed104a81342d65328ec2ab20bde2399d22858f07 | [
"Apache-2.0"
] | 1 | 2018-10-12T17:49:57.000Z | 2018-10-12T17:49:57.000Z | utils/build_swift/tests/expected_options.py | stevendinggang/swift | 95aeaf7b64c9df798c3b0064129f03e5b97e0e80 | [
"Apache-2.0"
] | 1 | 2019-01-16T23:21:36.000Z | 2019-01-16T23:21:36.000Z | utils/build_swift/tests/expected_options.py | stevendinggang/swift | 95aeaf7b64c9df798c3b0064129f03e5b97e0e80 | [
"Apache-2.0"
] | 2 | 2017-07-20T10:09:21.000Z | 2018-10-15T13:09:30.000Z | # This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2020 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
from __future__ import absolute_import, unicode_literals
import multiprocessing
from build_swift import argparse
from build_swift import defaults
from swift_build_support.swift_build_support import targets
__all__ = [
'HelpOption',
'SetOption',
'SetTrueOption',
'SetFalseOption',
'DisableOption',
'EnableOption',
'ChoicesOption',
'IntOption',
'StrOption',
'PathOption',
'AppendOption',
'UnsupportedOption',
'IgnoreOption',
'EXPECTED_OPTIONS',
'EXPECTED_DEFAULTS',
]
# -----------------------------------------------------------------------------
EXPECTED_DEFAULTS = {
'android': False,
'android_api_level': '21',
'android_deploy_device_path': '/data/local/tmp',
'android_icu_i18n': None,
'android_icu_i18n_include': None,
'android_icu_uc': None,
'android_icu_uc_include': None,
'android_icu_data': None,
'android_ndk': None,
'android_ndk_gcc_version': '4.9',
'android_arch': 'armv7',
'assertions': True,
'benchmark': False,
'benchmark_num_o_iterations': 3,
'benchmark_num_onone_iterations': 3,
'build_android': False,
'build_args': [],
'build_benchmarks': True,
'build_clang_tools_extra': True,
'build_cygwin': True,
'build_external_benchmarks': False,
'build_foundation': False,
'build_cmark': True,
'build_swift': True,
'build_llvm': True,
'build_freebsd': True,
'build_ios': True,
'build_ios_device': False,
'build_ios_simulator': False,
'build_jobs': multiprocessing.cpu_count(),
'build_libdispatch': False,
'build_libicu': False,
'build_linux': True,
'build_llbuild': False,
'build_lldb': False,
'build_libcxx': False,
'build_ninja': False,
'build_osx': True,
'build_playgroundsupport': False,
'build_runtime_with_host_compiler': False,
'build_stdlib_deployment_targets': ['all'],
'build_subdir': None,
'build_swift_dynamic_sdk_overlay': True,
'build_swift_dynamic_stdlib': True,
'build_swift_inspect': False,
'build_swift_static_sdk_overlay': False,
'build_swift_static_stdlib': False,
'build_swift_stdlib_unittest_extra': False,
'build_swiftpm': False,
'build_swift_driver': False,
'build_early_swift_driver': True,
'build_swiftsyntax': False,
'build_libparser_only': False,
'build_skstresstester': False,
'build_swiftformat': False,
'build_swiftevolve': False,
'build_indexstoredb': False,
'test_indexstoredb_sanitize_all': False,
'test_sourcekitlsp_sanitize_all': False,
'build_sourcekitlsp': False,
'install_swiftpm': False,
'install_swiftsyntax': False,
'install_swift_driver': False,
'swiftsyntax_verify_generated_files': False,
'install_playgroundsupport': False,
'install_sourcekitlsp': False,
'install_skstresstester': False,
'install_swiftevolve': False,
'build_toolchainbenchmarks': False,
'build_tvos': True,
'build_tvos_device': False,
'build_tvos_simulator': False,
'build_variant': 'Debug',
'build_watchos': True,
'build_watchos_device': False,
'build_watchos_simulator': False,
'build_xctest': False,
'cmake_c_launcher': None,
'cmake_cxx_launcher': None,
'clang_compiler_version': None,
'clang_profile_instr_use': None,
'clang_user_visible_version': defaults.CLANG_USER_VISIBLE_VERSION,
'clean': False,
'cmake': None,
'cmake_generator': 'Ninja',
'cmark_assertions': True,
'cmark_build_variant': 'Debug',
'compiler_vendor': defaults.COMPILER_VENDOR,
'coverage_db': None,
'cross_compile_hosts': [],
'darwin_deployment_version_ios':
defaults.DARWIN_DEPLOYMENT_VERSION_IOS,
'darwin_deployment_version_osx':
defaults.DARWIN_DEPLOYMENT_VERSION_OSX,
'darwin_deployment_version_tvos':
defaults.DARWIN_DEPLOYMENT_VERSION_TVOS,
'darwin_deployment_version_watchos':
defaults.DARWIN_DEPLOYMENT_VERSION_WATCHOS,
'darwin_symroot_path_filters': [],
'darwin_xcrun_toolchain': None,
'distcc': False,
'sccache': False,
'dry_run': False,
'dsymutil_jobs': defaults.DSYMUTIL_JOBS,
'enable_asan': False,
'enable_experimental_differentiable_programming': True,
'enable_experimental_concurrency': True,
'enable_lsan': False,
'enable_sanitize_coverage': False,
'disable_guaranteed_normal_arguments': False,
'enable_stdlibcore_exclusivity_checking': False,
'enable_tsan': False,
'enable_tsan_runtime': False,
'enable_ubsan': False,
'export_compile_commands': False,
'extra_cmake_options': [],
'extra_swift_args': [],
'force_optimized_typechecker': False,
'foundation_build_variant': 'Debug',
'host_cc': None,
'host_cxx': None,
'host_libtool': None,
'host_lipo': None,
'host_target': targets.StdlibDeploymentTarget.host_target().name,
'host_test': False,
'only_executable_test': False,
'only_non_executable_test': False,
'infer_dependencies': False,
'install_prefix': targets.install_prefix(),
'install_symroot': None,
'install_destdir': None,
'install_all': False,
'ios': False,
'ios_all': False,
'legacy_impl': False,
'libdispatch_build_variant': 'Debug',
'libicu_build_variant': 'Debug',
'lit_args': '-sv',
'llbuild_assertions': True,
'lldb_assertions': True,
'lldb_build_variant': 'Debug',
'lldb_build_with_xcode': '0',
'llvm_assertions': True,
'llvm_build_variant': 'Debug',
'llvm_ninja_targets': [],
'llvm_ninja_targets_for_cross_compile_hosts': [],
'llvm_max_parallel_lto_link_jobs':
defaults.LLVM_MAX_PARALLEL_LTO_LINK_JOBS,
'llvm_targets_to_build': 'X86;ARM;AArch64;PowerPC;SystemZ;Mips',
'tsan_libdispatch_test': False,
'long_test': False,
'lto_type': None,
'maccatalyst': False,
'maccatalyst_ios_tests': False,
'native_clang_tools_path': None,
'native_llvm_tools_path': None,
'native_swift_tools_path': None,
'dump_config': False,
'relocate_xdg_cache_home_under_build_subdir': False,
'show_sdks': False,
'skip_build': False,
'skip_local_build': False,
'stdlib_deployment_targets': None,
'stress_test': False,
'swift_analyze_code_coverage': defaults.SWIFT_ANALYZE_CODE_COVERAGE,
'swift_assertions': True,
'swift_build_variant': 'Debug',
'swift_compiler_version': None,
'swift_disable_dead_stripping': False,
'swift_darwin_module_archs': None,
'swift_darwin_supported_archs': None,
'swift_stdlib_assertions': True,
'swift_stdlib_build_variant': 'Debug',
'swift_tools_max_parallel_lto_link_jobs':
defaults.SWIFT_MAX_PARALLEL_LTO_LINK_JOBS,
'swift_user_visible_version': defaults.SWIFT_USER_VISIBLE_VERSION,
'symbols_package': None,
'clean_llbuild': True,
'clean_swiftpm': True,
'clean_swift_driver': True,
'clean_early_swift_driver': False,
'test': None,
'test_early_swift_driver': None,
'test_android': False,
'test_android_host': False,
'test_cygwin': False,
'test_freebsd': False,
'test_ios': False,
'test_ios_32bit_simulator': False,
'test_watchos_32bit_simulator': True,
'test_ios_host': False,
'test_ios_simulator': False,
'test_linux': False,
'test_optimize_for_size': None,
'test_optimize_none_with_implicit_dynamic': None,
'test_optimized': None,
'test_osx': False,
'test_paths': [],
'test_swift_inspect': True,
'test_tvos': False,
'test_tvos_host': False,
'test_tvos_simulator': False,
'test_watchos': False,
'test_watchos_host': False,
'test_watchos_simulator': False,
'test_playgroundsupport': True,
'test_swiftpm': False,
'test_swift_driver': False,
'test_swiftsyntax': False,
'test_indexstoredb': False,
'test_sourcekitlsp': False,
'test_skstresstester': False,
'test_swiftformat': False,
'test_swiftevolve': False,
'test_toolchainbenchmarks': False,
'tvos': False,
'tvos_all': False,
'validation_test': None,
'verbose_build': False,
'watchos': False,
'watchos_all': False,
'llvm_install_components': defaults.llvm_install_components(),
}
# -----------------------------------------------------------------------------
def _sanitize_option_string(option_string):
if option_string.startswith('--'):
return option_string[2:].replace('-', '_')
if len(option_string) == 2 and option_string[0] == '-':
return option_string[1]
raise ValueError('invalid option_string format: ' + option_string)
class _BaseOption(object):
def __init__(self, option_string, dest=None, default=None):
if dest is None:
dest = _sanitize_option_string(option_string)
if default is None:
default = EXPECTED_DEFAULTS.get(dest, None)
self.option_string = option_string
self.dest = dest
self.default = default
def sanitized_string(self):
return _sanitize_option_string(self.option_string)
class HelpOption(_BaseOption):
"""Option that prints the help message and exits."""
pass
class SetOption(_BaseOption):
"""Option that accepts no arguments, setting the destination to a
hard-coded value or None.
"""
def __init__(self, *args, **kwargs):
self.value = kwargs.pop('value', None)
super(SetOption, self).__init__(*args, **kwargs)
class SetTrueOption(_BaseOption):
"""Option that accepts no arguments, setting the destination value to True
if parsed and defaulting to False otherwise.
"""
pass
class SetFalseOption(_BaseOption):
"""Option that accepts no arguments, setting the destination value to False
if parsed and defaulting to True otherwise.
"""
pass
class EnableOption(_BaseOption):
"""Option that sets the destination to True when parsed and False by default.
Can be toggled True or False with an optional bool argument.
"""
pass
class DisableOption(_BaseOption):
"""Option that sets the destination to False when parsed and True by default.
Can be toggled True or False with an optional bool argument, which is then
negated. Thus if an option is passed the value 'True' it will set the
destination to False and vice versa.
"""
pass
class ChoicesOption(_BaseOption):
"""Option that accepts an argument from a predifined list of choices."""
def __init__(self, *args, **kwargs):
self.choices = kwargs.pop('choices', None)
super(ChoicesOption, self).__init__(*args, **kwargs)
class IntOption(_BaseOption):
"""Option that accepts an int argument."""
pass
class StrOption(_BaseOption):
"""Option that accepts a str argument."""
pass
class PathOption(_BaseOption):
"""Option that accepts a path argument."""
pass
class AppendOption(_BaseOption):
"""Option that can be called more than once to append argument to internal
list.
"""
pass
class UnsupportedOption(_BaseOption):
"""Option that is not supported."""
pass
class IgnoreOption(_BaseOption):
"""Option that should be ignored when generating tests. Instead a test
should be written manually as the behavior cannot or should not be auto-
generated.
"""
pass
class BuildScriptImplOption(_BaseOption):
"""Option that gets forwarded to build-script-impl by migration.py and is
only listed for disambiguation by argparse.
"""
pass
# -----------------------------------------------------------------------------
EXPECTED_OPTIONS = [
# Ignore the help options since they always call sys.exit(0)
HelpOption('-h', dest='help', default=argparse.SUPPRESS),
HelpOption('--help', dest='help', default=argparse.SUPPRESS),
SetOption('--debug', dest='build_variant', value='Debug'),
SetOption('--debug-cmark', dest='cmark_build_variant', value='Debug'),
SetOption('--debug-foundation',
dest='foundation_build_variant', value='Debug'),
SetOption('--debug-libdispatch',
dest='libdispatch_build_variant', value='Debug'),
SetOption('--debug-libicu', dest='libicu_build_variant', value='Debug'),
SetOption('--debug-lldb', dest='lldb_build_variant', value='Debug'),
SetOption('--lldb-build-with-xcode', dest='lldb_build_with_xcode',
value='1'),
SetOption('--lldb-build-with-cmake', dest='lldb_build_with_xcode',
value='0'),
SetOption('--debug-llvm', dest='llvm_build_variant', value='Debug'),
SetOption('--debug-swift', dest='swift_build_variant', value='Debug'),
SetOption('--debug-swift-stdlib',
dest='swift_stdlib_build_variant', value='Debug'),
SetOption('--eclipse',
dest='cmake_generator', value='Eclipse CDT4 - Ninja'),
SetOption('--make', dest='cmake_generator', value='Unix Makefiles'),
SetOption('--release', dest='build_variant', value='Release'),
SetOption('--release-debuginfo',
dest='build_variant', value='RelWithDebInfo'),
SetOption('--min-size-release',
dest='build_variant', value='MinSizeRel'),
SetOption('--xcode', dest='cmake_generator', value='Xcode'),
SetOption('-R', dest='build_variant', value='Release'),
SetOption('-d', dest='build_variant', value='Debug'),
SetOption('-e', dest='cmake_generator', value='Eclipse CDT4 - Ninja'),
SetOption('-m', dest='cmake_generator', value='Unix Makefiles'),
SetOption('-r', dest='build_variant', value='RelWithDebInfo'),
SetOption('-x', dest='cmake_generator', value='Xcode'),
# FIXME: Convert these options to set_true actions
SetOption('--assertions', value=True),
SetOption('--cmark-assertions', value=True),
SetOption('--lldb-assertions', value=True),
SetOption('--llvm-assertions', value=True),
SetOption('--llbuild-assertions', value=True),
SetOption('--swift-assertions', value=True),
SetOption('--swift-stdlib-assertions', value=True),
SetOption('-T', dest='validation_test', value=True),
SetOption('-o', dest='test_optimized', value=True),
SetOption('-s', dest='test_optimize_for_size', value=True),
SetOption('-y',
dest='test_optimize_none_with_implicit_dynamic', value=True),
SetOption('-t', dest='test', value=True),
SetOption('-a', dest='assertions', value=True),
# FIXME: Convert these options to set_false actions
SetOption('--no-assertions', dest='assertions', value=False),
SetOption('-A', dest='assertions', value=False),
SetOption('--no-lldb-assertions', dest='lldb_assertions', value=False),
SetOption('--no-llvm-assertions', dest='llvm_assertions', value=False),
SetOption('--no-llbuild-assertions',
dest='llbuild_assertions', value=False),
SetOption('--no-swift-assertions', dest='swift_assertions', value=False),
SetOption('--no-swift-stdlib-assertions',
dest='swift_stdlib_assertions', value=False),
SetOption('--skip-ios', dest='ios', value=False),
SetOption('--skip-tvos', dest='tvos', value=False),
SetOption('--skip-watchos', dest='watchos', value=False),
SetOption('--skip-test-early-swift-driver',
dest='test_early_swift_driver', value=False),
SetTrueOption('--benchmark'),
SetTrueOption('--clean'),
SetTrueOption('--dry-run'),
SetTrueOption('--dump-config'),
SetTrueOption('--disable-guaranteed-normal-arguments'),
SetTrueOption('--enable-stdlibcore-exclusivity-checking'),
SetTrueOption('--force-optimized-typechecker'),
SetTrueOption('--ios'),
SetTrueOption('--llbuild', dest='build_llbuild'),
SetTrueOption('--lldb', dest='build_lldb'),
SetTrueOption('--libcxx', dest='build_libcxx'),
SetTrueOption('--maccatalyst', dest='maccatalyst'),
SetTrueOption('--maccatalyst-ios-tests', dest='maccatalyst_ios_tests'),
SetTrueOption('--playgroundsupport', dest='build_playgroundsupport'),
SetTrueOption('--install-playgroundsupport',
dest='install_playgroundsupport'),
SetTrueOption('--skip-build'),
SetTrueOption('--swiftpm', dest='build_swiftpm'),
SetTrueOption('--swift-driver', dest='build_swift_driver'),
SetTrueOption('--swiftsyntax', dest='build_swiftsyntax'),
SetTrueOption('--build-libparser-only', dest='build_libparser_only'),
SetTrueOption('--skstresstester', dest='build_skstresstester'),
SetTrueOption('--swiftformat', dest='build_swiftformat'),
SetTrueOption('--swiftevolve', dest='build_swiftevolve'),
SetTrueOption('-B', dest='benchmark'),
SetTrueOption('-S', dest='skip_build'),
SetTrueOption('-b', dest='build_llbuild'),
SetTrueOption('-c', dest='clean'),
SetTrueOption('-i', dest='ios'),
SetTrueOption('-l', dest='build_lldb'),
SetTrueOption('-n', dest='dry_run'),
SetTrueOption('-p', dest='build_swiftpm'),
SetTrueOption('--legacy-impl', dest='legacy_impl'),
SetTrueOption('--infer', dest='infer_dependencies'),
EnableOption('--android'),
EnableOption('--build-external-benchmarks'),
EnableOption('--build-ninja'),
EnableOption('--build-runtime-with-host-compiler'),
EnableOption('--build-swift-dynamic-sdk-overlay'),
EnableOption('--build-swift-dynamic-stdlib'),
EnableOption('--build-swift-static-sdk-overlay'),
EnableOption('--build-swift-static-stdlib'),
EnableOption('--build-swift-stdlib-unittest-extra'),
EnableOption('--distcc'),
EnableOption('--sccache'),
EnableOption('--enable-asan'),
EnableOption('--enable-experimental-differentiable-programming'),
EnableOption('--enable-experimental-concurrency'),
EnableOption('--enable-lsan'),
EnableOption('--enable-sanitize-coverage'),
EnableOption('--enable-tsan'),
EnableOption('--enable-tsan-runtime'),
EnableOption('--enable-ubsan'),
EnableOption('--export-compile-commands'),
EnableOption('--foundation', dest='build_foundation'),
EnableOption('--host-test'),
EnableOption('--only-executable-test'),
EnableOption('--only-non-executable-test'),
EnableOption('--libdispatch', dest='build_libdispatch'),
EnableOption('--libicu', dest='build_libicu'),
EnableOption('--indexstore-db', dest='build_indexstoredb'),
EnableOption('--test-indexstore-db-sanitize-all',
dest='test_indexstoredb_sanitize_all'),
EnableOption('--sourcekit-lsp', dest='build_sourcekitlsp'),
EnableOption('--test-sourcekit-lsp-sanitize-all',
dest='test_sourcekitlsp_sanitize_all'),
EnableOption('--install-swiftsyntax', dest='install_swiftsyntax'),
EnableOption('--swiftsyntax-verify-generated-files',
dest='swiftsyntax_verify_generated_files'),
EnableOption('--install-swiftpm', dest='install_swiftpm'),
EnableOption('--install-swift-driver', dest='install_swift_driver'),
EnableOption('--install-sourcekit-lsp', dest='install_sourcekitlsp'),
EnableOption('--install-skstresstester', dest='install_skstresstester'),
EnableOption('--install-swiftevolve', dest='install_swiftevolve'),
EnableOption('--toolchain-benchmarks', dest='build_toolchainbenchmarks'),
EnableOption('--swift-inspect', dest='build_swift_inspect'),
EnableOption('--tsan-libdispatch-test'),
EnableOption('--long-test'),
EnableOption('--show-sdks'),
EnableOption('--skip-local-build'),
EnableOption('--stress-test'),
EnableOption('--test'),
EnableOption('--test-optimize-for-size'),
EnableOption('--test-optimize-none-with-implicit-dynamic'),
EnableOption('--test-optimized'),
EnableOption('--tvos'),
EnableOption('--validation-test'),
EnableOption('--verbose-build'),
EnableOption('--watchos'),
EnableOption('--xctest', dest='build_xctest'),
EnableOption('--swift-disable-dead-stripping'),
EnableOption('--clean-early-swift-driver', dest='clean_early_swift_driver'),
DisableOption('--skip-build-cmark', dest='build_cmark'),
DisableOption('--skip-build-llvm', dest='build_llvm'),
DisableOption('--skip-build-swift', dest='build_swift'),
DisableOption('--skip-build-android', dest='build_android'),
DisableOption('--skip-build-benchmarks', dest='build_benchmarks'),
DisableOption('--skip-build-cygwin', dest='build_cygwin'),
DisableOption('--skip-build-freebsd', dest='build_freebsd'),
DisableOption('--skip-build-ios', dest='build_ios'),
DisableOption('--skip-build-ios-device', dest='build_ios_device'),
DisableOption('--skip-build-ios-simulator',
dest='build_ios_simulator'),
DisableOption('--skip-build-linux', dest='build_linux'),
DisableOption('--skip-build-osx', dest='build_osx'),
DisableOption('--skip-build-tvos', dest='build_tvos'),
DisableOption('--skip-build-tvos-device', dest='build_tvos_device'),
DisableOption('--skip-build-tvos-simulator',
dest='build_tvos_simulator'),
DisableOption('--skip-build-watchos', dest='build_watchos'),
DisableOption('--skip-build-watchos-device',
dest='build_watchos_device'),
DisableOption('--skip-build-watchos-simulator',
dest='build_watchos_simulator'),
DisableOption('--skip-clean-llbuild', dest='clean_llbuild'),
DisableOption('--skip-early-swift-driver', dest='build_early_swift_driver'),
DisableOption('--skip-clean-swiftpm', dest='clean_swiftpm'),
DisableOption('--skip-clean-swift-driver', dest='clean_swift_driver'),
DisableOption('--skip-test-android', dest='test_android'),
DisableOption('--skip-test-android-host', dest='test_android_host'),
DisableOption('--skip-test-cygwin', dest='test_cygwin'),
DisableOption('--skip-test-freebsd', dest='test_freebsd'),
DisableOption('--skip-test-ios', dest='test_ios'),
DisableOption('--skip-test-ios-32bit-simulator',
dest='test_ios_32bit_simulator'),
DisableOption('--skip-test-watchos-32bit-simulator',
dest='test_watchos_32bit_simulator'),
DisableOption('--skip-test-ios-host', dest='test_ios_host'),
DisableOption('--skip-test-ios-simulator', dest='test_ios_simulator'),
DisableOption('--skip-test-linux', dest='test_linux'),
DisableOption('--skip-test-osx', dest='test_osx'),
DisableOption('--skip-test-tvos', dest='test_tvos'),
DisableOption('--skip-test-tvos-host', dest='test_tvos_host'),
DisableOption('--skip-test-tvos-simulator',
dest='test_tvos_simulator'),
DisableOption('--skip-test-watchos', dest='test_watchos'),
DisableOption('--skip-test-watchos-host', dest='test_watchos_host'),
DisableOption('--skip-test-watchos-simulator',
dest='test_watchos_simulator'),
DisableOption('--skip-test-playgroundsupport',
dest='test_playgroundsupport'),
DisableOption('--skip-test-swiftpm', dest='test_swiftpm'),
DisableOption('--skip-test-swift-driver', dest='test_swift_driver'),
DisableOption('--skip-test-swiftsyntax', dest='test_swiftsyntax'),
DisableOption('--skip-test-indexstore-db', dest='test_indexstoredb'),
DisableOption('--skip-test-sourcekit-lsp', dest='test_sourcekitlsp'),
DisableOption('--skip-test-skstresstester', dest='test_skstresstester'),
DisableOption('--skip-test-swiftformat', dest='test_swiftformat'),
DisableOption('--skip-test-swiftevolve', dest='test_swiftevolve'),
DisableOption('--skip-test-toolchain-benchmarks',
dest='test_toolchainbenchmarks'),
DisableOption('--skip-test-swift-inspect',
dest='test_swift_inspect'),
DisableOption('--skip-build-clang-tools-extra',
dest='build_clang_tools_extra'),
ChoicesOption('--android-ndk-gcc-version',
choices=['4.8', '4.9']),
ChoicesOption('--compiler-vendor',
choices=['none', 'apple']),
ChoicesOption('--swift-analyze-code-coverage',
choices=['false', 'not-merged', 'merged']),
ChoicesOption('--android-arch',
choices=['armv7', 'aarch64']),
StrOption('--android-api-level'),
StrOption('--build-args'),
StrOption('--build-stdlib-deployment-targets'),
StrOption('--darwin-deployment-version-ios'),
StrOption('--darwin-deployment-version-osx'),
StrOption('--darwin-deployment-version-tvos'),
StrOption('--darwin-deployment-version-watchos'),
StrOption('--darwin-xcrun-toolchain'),
StrOption('--host-target'),
StrOption('--lit-args'),
StrOption('--llvm-targets-to-build'),
StrOption('--stdlib-deployment-targets'),
StrOption('--swift-darwin-module-archs'),
StrOption('--swift-darwin-supported-archs'),
PathOption('--android-deploy-device-path'),
PathOption('--android-icu-i18n'),
PathOption('--android-icu-i18n-include'),
PathOption('--android-icu-uc'),
PathOption('--android-icu-uc-include'),
PathOption('--android-icu-data'),
PathOption('--android-ndk'),
PathOption('--build-subdir'),
SetTrueOption('--relocate-xdg-cache-home-under-build-subdir'),
PathOption('--clang-profile-instr-use'),
PathOption('--cmake'),
PathOption('--coverage-db'),
PathOption('--host-cc'),
PathOption('--host-cxx'),
PathOption('--host-libtool'),
PathOption('--host-lipo'),
PathOption('--install-prefix'),
PathOption('--install-symroot'),
PathOption('--install-destdir'),
EnableOption('--install-all'),
PathOption('--native-clang-tools-path'),
PathOption('--native-llvm-tools-path'),
PathOption('--native-swift-tools-path'),
PathOption('--symbols-package'),
PathOption('--cmake-c-launcher'),
PathOption('--cmake-cxx-launcher'),
IntOption('--benchmark-num-o-iterations'),
IntOption('--benchmark-num-onone-iterations'),
IntOption('--jobs', dest='build_jobs'),
IntOption('--llvm-max-parallel-lto-link-jobs'),
IntOption('--swift-tools-max-parallel-lto-link-jobs'),
IntOption('-j', dest='build_jobs'),
IntOption('--dsymutil-jobs', dest='dsymutil_jobs'),
AppendOption('--cross-compile-hosts'),
AppendOption('--extra-cmake-options'),
AppendOption('--extra-swift-args'),
AppendOption('--test-paths'),
AppendOption('--llvm-ninja-targets'),
AppendOption('--llvm-ninja-targets-for-cross-compile-hosts'),
AppendOption('--darwin-symroot-path-filters'),
UnsupportedOption('--build-jobs'),
UnsupportedOption('--common-cmake-options'),
UnsupportedOption('--only-execute'),
UnsupportedOption('--skip-test-optimize-for-size'),
UnsupportedOption('--skip-test-optimize-none-with-implicit-dynamic'),
UnsupportedOption('--skip-test-optimized'),
# Options forwared to build-script-impl
BuildScriptImplOption('--skip-test-swift', dest='impl_skip_test_swift'),
BuildScriptImplOption('--install-swift', dest='impl_install_swift'),
# NOTE: LTO flag is a special case that acts both as an option and has
# valid choices
SetOption('--lto', dest='lto_type'),
ChoicesOption('--lto', dest='lto_type', choices=['thin', 'full']),
# NOTE: We'll need to manually test the behavior of these since they
# validate compiler version strings.
IgnoreOption('--clang-compiler-version'),
IgnoreOption('--clang-user-visible-version'),
IgnoreOption('--swift-compiler-version'),
IgnoreOption('--swift-user-visible-version'),
# TODO: Migrate to unavailable options once new parser is in place
IgnoreOption('-I'),
IgnoreOption('--ios-all'),
IgnoreOption('--tvos-all'),
IgnoreOption('--watchos-all'),
StrOption('--llvm-install-components'),
]
| 38.266393 | 81 | 0.676627 |
from __future__ import absolute_import, unicode_literals
import multiprocessing
from build_swift import argparse
from build_swift import defaults
from swift_build_support.swift_build_support import targets
__all__ = [
'HelpOption',
'SetOption',
'SetTrueOption',
'SetFalseOption',
'DisableOption',
'EnableOption',
'ChoicesOption',
'IntOption',
'StrOption',
'PathOption',
'AppendOption',
'UnsupportedOption',
'IgnoreOption',
'EXPECTED_OPTIONS',
'EXPECTED_DEFAULTS',
]
EXPECTED_DEFAULTS = {
'android': False,
'android_api_level': '21',
'android_deploy_device_path': '/data/local/tmp',
'android_icu_i18n': None,
'android_icu_i18n_include': None,
'android_icu_uc': None,
'android_icu_uc_include': None,
'android_icu_data': None,
'android_ndk': None,
'android_ndk_gcc_version': '4.9',
'android_arch': 'armv7',
'assertions': True,
'benchmark': False,
'benchmark_num_o_iterations': 3,
'benchmark_num_onone_iterations': 3,
'build_android': False,
'build_args': [],
'build_benchmarks': True,
'build_clang_tools_extra': True,
'build_cygwin': True,
'build_external_benchmarks': False,
'build_foundation': False,
'build_cmark': True,
'build_swift': True,
'build_llvm': True,
'build_freebsd': True,
'build_ios': True,
'build_ios_device': False,
'build_ios_simulator': False,
'build_jobs': multiprocessing.cpu_count(),
'build_libdispatch': False,
'build_libicu': False,
'build_linux': True,
'build_llbuild': False,
'build_lldb': False,
'build_libcxx': False,
'build_ninja': False,
'build_osx': True,
'build_playgroundsupport': False,
'build_runtime_with_host_compiler': False,
'build_stdlib_deployment_targets': ['all'],
'build_subdir': None,
'build_swift_dynamic_sdk_overlay': True,
'build_swift_dynamic_stdlib': True,
'build_swift_inspect': False,
'build_swift_static_sdk_overlay': False,
'build_swift_static_stdlib': False,
'build_swift_stdlib_unittest_extra': False,
'build_swiftpm': False,
'build_swift_driver': False,
'build_early_swift_driver': True,
'build_swiftsyntax': False,
'build_libparser_only': False,
'build_skstresstester': False,
'build_swiftformat': False,
'build_swiftevolve': False,
'build_indexstoredb': False,
'test_indexstoredb_sanitize_all': False,
'test_sourcekitlsp_sanitize_all': False,
'build_sourcekitlsp': False,
'install_swiftpm': False,
'install_swiftsyntax': False,
'install_swift_driver': False,
'swiftsyntax_verify_generated_files': False,
'install_playgroundsupport': False,
'install_sourcekitlsp': False,
'install_skstresstester': False,
'install_swiftevolve': False,
'build_toolchainbenchmarks': False,
'build_tvos': True,
'build_tvos_device': False,
'build_tvos_simulator': False,
'build_variant': 'Debug',
'build_watchos': True,
'build_watchos_device': False,
'build_watchos_simulator': False,
'build_xctest': False,
'cmake_c_launcher': None,
'cmake_cxx_launcher': None,
'clang_compiler_version': None,
'clang_profile_instr_use': None,
'clang_user_visible_version': defaults.CLANG_USER_VISIBLE_VERSION,
'clean': False,
'cmake': None,
'cmake_generator': 'Ninja',
'cmark_assertions': True,
'cmark_build_variant': 'Debug',
'compiler_vendor': defaults.COMPILER_VENDOR,
'coverage_db': None,
'cross_compile_hosts': [],
'darwin_deployment_version_ios':
defaults.DARWIN_DEPLOYMENT_VERSION_IOS,
'darwin_deployment_version_osx':
defaults.DARWIN_DEPLOYMENT_VERSION_OSX,
'darwin_deployment_version_tvos':
defaults.DARWIN_DEPLOYMENT_VERSION_TVOS,
'darwin_deployment_version_watchos':
defaults.DARWIN_DEPLOYMENT_VERSION_WATCHOS,
'darwin_symroot_path_filters': [],
'darwin_xcrun_toolchain': None,
'distcc': False,
'sccache': False,
'dry_run': False,
'dsymutil_jobs': defaults.DSYMUTIL_JOBS,
'enable_asan': False,
'enable_experimental_differentiable_programming': True,
'enable_experimental_concurrency': True,
'enable_lsan': False,
'enable_sanitize_coverage': False,
'disable_guaranteed_normal_arguments': False,
'enable_stdlibcore_exclusivity_checking': False,
'enable_tsan': False,
'enable_tsan_runtime': False,
'enable_ubsan': False,
'export_compile_commands': False,
'extra_cmake_options': [],
'extra_swift_args': [],
'force_optimized_typechecker': False,
'foundation_build_variant': 'Debug',
'host_cc': None,
'host_cxx': None,
'host_libtool': None,
'host_lipo': None,
'host_target': targets.StdlibDeploymentTarget.host_target().name,
'host_test': False,
'only_executable_test': False,
'only_non_executable_test': False,
'infer_dependencies': False,
'install_prefix': targets.install_prefix(),
'install_symroot': None,
'install_destdir': None,
'install_all': False,
'ios': False,
'ios_all': False,
'legacy_impl': False,
'libdispatch_build_variant': 'Debug',
'libicu_build_variant': 'Debug',
'lit_args': '-sv',
'llbuild_assertions': True,
'lldb_assertions': True,
'lldb_build_variant': 'Debug',
'lldb_build_with_xcode': '0',
'llvm_assertions': True,
'llvm_build_variant': 'Debug',
'llvm_ninja_targets': [],
'llvm_ninja_targets_for_cross_compile_hosts': [],
'llvm_max_parallel_lto_link_jobs':
defaults.LLVM_MAX_PARALLEL_LTO_LINK_JOBS,
'llvm_targets_to_build': 'X86;ARM;AArch64;PowerPC;SystemZ;Mips',
'tsan_libdispatch_test': False,
'long_test': False,
'lto_type': None,
'maccatalyst': False,
'maccatalyst_ios_tests': False,
'native_clang_tools_path': None,
'native_llvm_tools_path': None,
'native_swift_tools_path': None,
'dump_config': False,
'relocate_xdg_cache_home_under_build_subdir': False,
'show_sdks': False,
'skip_build': False,
'skip_local_build': False,
'stdlib_deployment_targets': None,
'stress_test': False,
'swift_analyze_code_coverage': defaults.SWIFT_ANALYZE_CODE_COVERAGE,
'swift_assertions': True,
'swift_build_variant': 'Debug',
'swift_compiler_version': None,
'swift_disable_dead_stripping': False,
'swift_darwin_module_archs': None,
'swift_darwin_supported_archs': None,
'swift_stdlib_assertions': True,
'swift_stdlib_build_variant': 'Debug',
'swift_tools_max_parallel_lto_link_jobs':
defaults.SWIFT_MAX_PARALLEL_LTO_LINK_JOBS,
'swift_user_visible_version': defaults.SWIFT_USER_VISIBLE_VERSION,
'symbols_package': None,
'clean_llbuild': True,
'clean_swiftpm': True,
'clean_swift_driver': True,
'clean_early_swift_driver': False,
'test': None,
'test_early_swift_driver': None,
'test_android': False,
'test_android_host': False,
'test_cygwin': False,
'test_freebsd': False,
'test_ios': False,
'test_ios_32bit_simulator': False,
'test_watchos_32bit_simulator': True,
'test_ios_host': False,
'test_ios_simulator': False,
'test_linux': False,
'test_optimize_for_size': None,
'test_optimize_none_with_implicit_dynamic': None,
'test_optimized': None,
'test_osx': False,
'test_paths': [],
'test_swift_inspect': True,
'test_tvos': False,
'test_tvos_host': False,
'test_tvos_simulator': False,
'test_watchos': False,
'test_watchos_host': False,
'test_watchos_simulator': False,
'test_playgroundsupport': True,
'test_swiftpm': False,
'test_swift_driver': False,
'test_swiftsyntax': False,
'test_indexstoredb': False,
'test_sourcekitlsp': False,
'test_skstresstester': False,
'test_swiftformat': False,
'test_swiftevolve': False,
'test_toolchainbenchmarks': False,
'tvos': False,
'tvos_all': False,
'validation_test': None,
'verbose_build': False,
'watchos': False,
'watchos_all': False,
'llvm_install_components': defaults.llvm_install_components(),
}
def _sanitize_option_string(option_string):
if option_string.startswith('--'):
return option_string[2:].replace('-', '_')
if len(option_string) == 2 and option_string[0] == '-':
return option_string[1]
raise ValueError('invalid option_string format: ' + option_string)
class _BaseOption(object):
def __init__(self, option_string, dest=None, default=None):
if dest is None:
dest = _sanitize_option_string(option_string)
if default is None:
default = EXPECTED_DEFAULTS.get(dest, None)
self.option_string = option_string
self.dest = dest
self.default = default
def sanitized_string(self):
return _sanitize_option_string(self.option_string)
class HelpOption(_BaseOption):
pass
class SetOption(_BaseOption):
def __init__(self, *args, **kwargs):
self.value = kwargs.pop('value', None)
super(SetOption, self).__init__(*args, **kwargs)
class SetTrueOption(_BaseOption):
pass
class SetFalseOption(_BaseOption):
pass
class EnableOption(_BaseOption):
pass
class DisableOption(_BaseOption):
pass
class ChoicesOption(_BaseOption):
def __init__(self, *args, **kwargs):
self.choices = kwargs.pop('choices', None)
super(ChoicesOption, self).__init__(*args, **kwargs)
class IntOption(_BaseOption):
pass
class StrOption(_BaseOption):
pass
class PathOption(_BaseOption):
pass
class AppendOption(_BaseOption):
pass
class UnsupportedOption(_BaseOption):
pass
class IgnoreOption(_BaseOption):
pass
class BuildScriptImplOption(_BaseOption):
pass
EXPECTED_OPTIONS = [
HelpOption('-h', dest='help', default=argparse.SUPPRESS),
HelpOption('--help', dest='help', default=argparse.SUPPRESS),
SetOption('--debug', dest='build_variant', value='Debug'),
SetOption('--debug-cmark', dest='cmark_build_variant', value='Debug'),
SetOption('--debug-foundation',
dest='foundation_build_variant', value='Debug'),
SetOption('--debug-libdispatch',
dest='libdispatch_build_variant', value='Debug'),
SetOption('--debug-libicu', dest='libicu_build_variant', value='Debug'),
SetOption('--debug-lldb', dest='lldb_build_variant', value='Debug'),
SetOption('--lldb-build-with-xcode', dest='lldb_build_with_xcode',
value='1'),
SetOption('--lldb-build-with-cmake', dest='lldb_build_with_xcode',
value='0'),
SetOption('--debug-llvm', dest='llvm_build_variant', value='Debug'),
SetOption('--debug-swift', dest='swift_build_variant', value='Debug'),
SetOption('--debug-swift-stdlib',
dest='swift_stdlib_build_variant', value='Debug'),
SetOption('--eclipse',
dest='cmake_generator', value='Eclipse CDT4 - Ninja'),
SetOption('--make', dest='cmake_generator', value='Unix Makefiles'),
SetOption('--release', dest='build_variant', value='Release'),
SetOption('--release-debuginfo',
dest='build_variant', value='RelWithDebInfo'),
SetOption('--min-size-release',
dest='build_variant', value='MinSizeRel'),
SetOption('--xcode', dest='cmake_generator', value='Xcode'),
SetOption('-R', dest='build_variant', value='Release'),
SetOption('-d', dest='build_variant', value='Debug'),
SetOption('-e', dest='cmake_generator', value='Eclipse CDT4 - Ninja'),
SetOption('-m', dest='cmake_generator', value='Unix Makefiles'),
SetOption('-r', dest='build_variant', value='RelWithDebInfo'),
SetOption('-x', dest='cmake_generator', value='Xcode'),
SetOption('--assertions', value=True),
SetOption('--cmark-assertions', value=True),
SetOption('--lldb-assertions', value=True),
SetOption('--llvm-assertions', value=True),
SetOption('--llbuild-assertions', value=True),
SetOption('--swift-assertions', value=True),
SetOption('--swift-stdlib-assertions', value=True),
SetOption('-T', dest='validation_test', value=True),
SetOption('-o', dest='test_optimized', value=True),
SetOption('-s', dest='test_optimize_for_size', value=True),
SetOption('-y',
dest='test_optimize_none_with_implicit_dynamic', value=True),
SetOption('-t', dest='test', value=True),
SetOption('-a', dest='assertions', value=True),
SetOption('--no-assertions', dest='assertions', value=False),
SetOption('-A', dest='assertions', value=False),
SetOption('--no-lldb-assertions', dest='lldb_assertions', value=False),
SetOption('--no-llvm-assertions', dest='llvm_assertions', value=False),
SetOption('--no-llbuild-assertions',
dest='llbuild_assertions', value=False),
SetOption('--no-swift-assertions', dest='swift_assertions', value=False),
SetOption('--no-swift-stdlib-assertions',
dest='swift_stdlib_assertions', value=False),
SetOption('--skip-ios', dest='ios', value=False),
SetOption('--skip-tvos', dest='tvos', value=False),
SetOption('--skip-watchos', dest='watchos', value=False),
SetOption('--skip-test-early-swift-driver',
dest='test_early_swift_driver', value=False),
SetTrueOption('--benchmark'),
SetTrueOption('--clean'),
SetTrueOption('--dry-run'),
SetTrueOption('--dump-config'),
SetTrueOption('--disable-guaranteed-normal-arguments'),
SetTrueOption('--enable-stdlibcore-exclusivity-checking'),
SetTrueOption('--force-optimized-typechecker'),
SetTrueOption('--ios'),
SetTrueOption('--llbuild', dest='build_llbuild'),
SetTrueOption('--lldb', dest='build_lldb'),
SetTrueOption('--libcxx', dest='build_libcxx'),
SetTrueOption('--maccatalyst', dest='maccatalyst'),
SetTrueOption('--maccatalyst-ios-tests', dest='maccatalyst_ios_tests'),
SetTrueOption('--playgroundsupport', dest='build_playgroundsupport'),
SetTrueOption('--install-playgroundsupport',
dest='install_playgroundsupport'),
SetTrueOption('--skip-build'),
SetTrueOption('--swiftpm', dest='build_swiftpm'),
SetTrueOption('--swift-driver', dest='build_swift_driver'),
SetTrueOption('--swiftsyntax', dest='build_swiftsyntax'),
SetTrueOption('--build-libparser-only', dest='build_libparser_only'),
SetTrueOption('--skstresstester', dest='build_skstresstester'),
SetTrueOption('--swiftformat', dest='build_swiftformat'),
SetTrueOption('--swiftevolve', dest='build_swiftevolve'),
SetTrueOption('-B', dest='benchmark'),
SetTrueOption('-S', dest='skip_build'),
SetTrueOption('-b', dest='build_llbuild'),
SetTrueOption('-c', dest='clean'),
SetTrueOption('-i', dest='ios'),
SetTrueOption('-l', dest='build_lldb'),
SetTrueOption('-n', dest='dry_run'),
SetTrueOption('-p', dest='build_swiftpm'),
SetTrueOption('--legacy-impl', dest='legacy_impl'),
SetTrueOption('--infer', dest='infer_dependencies'),
EnableOption('--android'),
EnableOption('--build-external-benchmarks'),
EnableOption('--build-ninja'),
EnableOption('--build-runtime-with-host-compiler'),
EnableOption('--build-swift-dynamic-sdk-overlay'),
EnableOption('--build-swift-dynamic-stdlib'),
EnableOption('--build-swift-static-sdk-overlay'),
EnableOption('--build-swift-static-stdlib'),
EnableOption('--build-swift-stdlib-unittest-extra'),
EnableOption('--distcc'),
EnableOption('--sccache'),
EnableOption('--enable-asan'),
EnableOption('--enable-experimental-differentiable-programming'),
EnableOption('--enable-experimental-concurrency'),
EnableOption('--enable-lsan'),
EnableOption('--enable-sanitize-coverage'),
EnableOption('--enable-tsan'),
EnableOption('--enable-tsan-runtime'),
EnableOption('--enable-ubsan'),
EnableOption('--export-compile-commands'),
EnableOption('--foundation', dest='build_foundation'),
EnableOption('--host-test'),
EnableOption('--only-executable-test'),
EnableOption('--only-non-executable-test'),
EnableOption('--libdispatch', dest='build_libdispatch'),
EnableOption('--libicu', dest='build_libicu'),
EnableOption('--indexstore-db', dest='build_indexstoredb'),
EnableOption('--test-indexstore-db-sanitize-all',
dest='test_indexstoredb_sanitize_all'),
EnableOption('--sourcekit-lsp', dest='build_sourcekitlsp'),
EnableOption('--test-sourcekit-lsp-sanitize-all',
dest='test_sourcekitlsp_sanitize_all'),
EnableOption('--install-swiftsyntax', dest='install_swiftsyntax'),
EnableOption('--swiftsyntax-verify-generated-files',
dest='swiftsyntax_verify_generated_files'),
EnableOption('--install-swiftpm', dest='install_swiftpm'),
EnableOption('--install-swift-driver', dest='install_swift_driver'),
EnableOption('--install-sourcekit-lsp', dest='install_sourcekitlsp'),
EnableOption('--install-skstresstester', dest='install_skstresstester'),
EnableOption('--install-swiftevolve', dest='install_swiftevolve'),
EnableOption('--toolchain-benchmarks', dest='build_toolchainbenchmarks'),
EnableOption('--swift-inspect', dest='build_swift_inspect'),
EnableOption('--tsan-libdispatch-test'),
EnableOption('--long-test'),
EnableOption('--show-sdks'),
EnableOption('--skip-local-build'),
EnableOption('--stress-test'),
EnableOption('--test'),
EnableOption('--test-optimize-for-size'),
EnableOption('--test-optimize-none-with-implicit-dynamic'),
EnableOption('--test-optimized'),
EnableOption('--tvos'),
EnableOption('--validation-test'),
EnableOption('--verbose-build'),
EnableOption('--watchos'),
EnableOption('--xctest', dest='build_xctest'),
EnableOption('--swift-disable-dead-stripping'),
EnableOption('--clean-early-swift-driver', dest='clean_early_swift_driver'),
DisableOption('--skip-build-cmark', dest='build_cmark'),
DisableOption('--skip-build-llvm', dest='build_llvm'),
DisableOption('--skip-build-swift', dest='build_swift'),
DisableOption('--skip-build-android', dest='build_android'),
DisableOption('--skip-build-benchmarks', dest='build_benchmarks'),
DisableOption('--skip-build-cygwin', dest='build_cygwin'),
DisableOption('--skip-build-freebsd', dest='build_freebsd'),
DisableOption('--skip-build-ios', dest='build_ios'),
DisableOption('--skip-build-ios-device', dest='build_ios_device'),
DisableOption('--skip-build-ios-simulator',
dest='build_ios_simulator'),
DisableOption('--skip-build-linux', dest='build_linux'),
DisableOption('--skip-build-osx', dest='build_osx'),
DisableOption('--skip-build-tvos', dest='build_tvos'),
DisableOption('--skip-build-tvos-device', dest='build_tvos_device'),
DisableOption('--skip-build-tvos-simulator',
dest='build_tvos_simulator'),
DisableOption('--skip-build-watchos', dest='build_watchos'),
DisableOption('--skip-build-watchos-device',
dest='build_watchos_device'),
DisableOption('--skip-build-watchos-simulator',
dest='build_watchos_simulator'),
DisableOption('--skip-clean-llbuild', dest='clean_llbuild'),
DisableOption('--skip-early-swift-driver', dest='build_early_swift_driver'),
DisableOption('--skip-clean-swiftpm', dest='clean_swiftpm'),
DisableOption('--skip-clean-swift-driver', dest='clean_swift_driver'),
DisableOption('--skip-test-android', dest='test_android'),
DisableOption('--skip-test-android-host', dest='test_android_host'),
DisableOption('--skip-test-cygwin', dest='test_cygwin'),
DisableOption('--skip-test-freebsd', dest='test_freebsd'),
DisableOption('--skip-test-ios', dest='test_ios'),
DisableOption('--skip-test-ios-32bit-simulator',
dest='test_ios_32bit_simulator'),
DisableOption('--skip-test-watchos-32bit-simulator',
dest='test_watchos_32bit_simulator'),
DisableOption('--skip-test-ios-host', dest='test_ios_host'),
DisableOption('--skip-test-ios-simulator', dest='test_ios_simulator'),
DisableOption('--skip-test-linux', dest='test_linux'),
DisableOption('--skip-test-osx', dest='test_osx'),
DisableOption('--skip-test-tvos', dest='test_tvos'),
DisableOption('--skip-test-tvos-host', dest='test_tvos_host'),
DisableOption('--skip-test-tvos-simulator',
dest='test_tvos_simulator'),
DisableOption('--skip-test-watchos', dest='test_watchos'),
DisableOption('--skip-test-watchos-host', dest='test_watchos_host'),
DisableOption('--skip-test-watchos-simulator',
dest='test_watchos_simulator'),
DisableOption('--skip-test-playgroundsupport',
dest='test_playgroundsupport'),
DisableOption('--skip-test-swiftpm', dest='test_swiftpm'),
DisableOption('--skip-test-swift-driver', dest='test_swift_driver'),
DisableOption('--skip-test-swiftsyntax', dest='test_swiftsyntax'),
DisableOption('--skip-test-indexstore-db', dest='test_indexstoredb'),
DisableOption('--skip-test-sourcekit-lsp', dest='test_sourcekitlsp'),
DisableOption('--skip-test-skstresstester', dest='test_skstresstester'),
DisableOption('--skip-test-swiftformat', dest='test_swiftformat'),
DisableOption('--skip-test-swiftevolve', dest='test_swiftevolve'),
DisableOption('--skip-test-toolchain-benchmarks',
dest='test_toolchainbenchmarks'),
DisableOption('--skip-test-swift-inspect',
dest='test_swift_inspect'),
DisableOption('--skip-build-clang-tools-extra',
dest='build_clang_tools_extra'),
ChoicesOption('--android-ndk-gcc-version',
choices=['4.8', '4.9']),
ChoicesOption('--compiler-vendor',
choices=['none', 'apple']),
ChoicesOption('--swift-analyze-code-coverage',
choices=['false', 'not-merged', 'merged']),
ChoicesOption('--android-arch',
choices=['armv7', 'aarch64']),
StrOption('--android-api-level'),
StrOption('--build-args'),
StrOption('--build-stdlib-deployment-targets'),
StrOption('--darwin-deployment-version-ios'),
StrOption('--darwin-deployment-version-osx'),
StrOption('--darwin-deployment-version-tvos'),
StrOption('--darwin-deployment-version-watchos'),
StrOption('--darwin-xcrun-toolchain'),
StrOption('--host-target'),
StrOption('--lit-args'),
StrOption('--llvm-targets-to-build'),
StrOption('--stdlib-deployment-targets'),
StrOption('--swift-darwin-module-archs'),
StrOption('--swift-darwin-supported-archs'),
PathOption('--android-deploy-device-path'),
PathOption('--android-icu-i18n'),
PathOption('--android-icu-i18n-include'),
PathOption('--android-icu-uc'),
PathOption('--android-icu-uc-include'),
PathOption('--android-icu-data'),
PathOption('--android-ndk'),
PathOption('--build-subdir'),
SetTrueOption('--relocate-xdg-cache-home-under-build-subdir'),
PathOption('--clang-profile-instr-use'),
PathOption('--cmake'),
PathOption('--coverage-db'),
PathOption('--host-cc'),
PathOption('--host-cxx'),
PathOption('--host-libtool'),
PathOption('--host-lipo'),
PathOption('--install-prefix'),
PathOption('--install-symroot'),
PathOption('--install-destdir'),
EnableOption('--install-all'),
PathOption('--native-clang-tools-path'),
PathOption('--native-llvm-tools-path'),
PathOption('--native-swift-tools-path'),
PathOption('--symbols-package'),
PathOption('--cmake-c-launcher'),
PathOption('--cmake-cxx-launcher'),
IntOption('--benchmark-num-o-iterations'),
IntOption('--benchmark-num-onone-iterations'),
IntOption('--jobs', dest='build_jobs'),
IntOption('--llvm-max-parallel-lto-link-jobs'),
IntOption('--swift-tools-max-parallel-lto-link-jobs'),
IntOption('-j', dest='build_jobs'),
IntOption('--dsymutil-jobs', dest='dsymutil_jobs'),
AppendOption('--cross-compile-hosts'),
AppendOption('--extra-cmake-options'),
AppendOption('--extra-swift-args'),
AppendOption('--test-paths'),
AppendOption('--llvm-ninja-targets'),
AppendOption('--llvm-ninja-targets-for-cross-compile-hosts'),
AppendOption('--darwin-symroot-path-filters'),
UnsupportedOption('--build-jobs'),
UnsupportedOption('--common-cmake-options'),
UnsupportedOption('--only-execute'),
UnsupportedOption('--skip-test-optimize-for-size'),
UnsupportedOption('--skip-test-optimize-none-with-implicit-dynamic'),
UnsupportedOption('--skip-test-optimized'),
BuildScriptImplOption('--skip-test-swift', dest='impl_skip_test_swift'),
BuildScriptImplOption('--install-swift', dest='impl_install_swift'),
SetOption('--lto', dest='lto_type'),
ChoicesOption('--lto', dest='lto_type', choices=['thin', 'full']),
# validate compiler version strings.
IgnoreOption('--clang-compiler-version'),
IgnoreOption('--clang-user-visible-version'),
IgnoreOption('--swift-compiler-version'),
IgnoreOption('--swift-user-visible-version'),
# TODO: Migrate to unavailable options once new parser is in place
IgnoreOption('-I'),
IgnoreOption('--ios-all'),
IgnoreOption('--tvos-all'),
IgnoreOption('--watchos-all'),
StrOption('--llvm-install-components'),
]
| true | true |
f728f523e3d2d32e7e3c55c564c297fd1e635901 | 1,969 | py | Python | libralli/circcuitpython/adafruit-circuitpython-bundle-7.x-mpy-20211225/examples/displayio_ssd1305_simpletest.py | Yarik9008/SoftAcademic | 118c9dc4620ca444c1557edd141a838820577202 | [
"MIT"
] | 1 | 2022-01-08T16:00:54.000Z | 2022-01-08T16:00:54.000Z | libralli/circcuitpython/adafruit-circuitpython-bundle-7.x-mpy-20211225/examples/displayio_ssd1305_simpletest.py | Yarik9008/SoftAcademic | 118c9dc4620ca444c1557edd141a838820577202 | [
"MIT"
] | 2 | 2020-12-05T02:58:20.000Z | 2021-09-27T16:43:31.000Z | libralli/circcuitpython/adafruit-circuitpython-bundle-7.x-mpy-20211225/examples/displayio_ssd1305_simpletest.py | Yarik9008/SoftAcademic | 118c9dc4620ca444c1557edd141a838820577202 | [
"MIT"
] | 4 | 2019-11-06T16:59:45.000Z | 2021-10-31T22:05:00.000Z | # SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
# SPDX-License-Identifier: MIT
"""
This test will initialize the display using displayio and draw a solid white
background, a smaller black rectangle, and some white text.
"""
import board
import displayio
import terminalio
from adafruit_display_text import label
import adafruit_displayio_ssd1305
displayio.release_displays()
# Reset is usedfor both SPI and I2C
oled_reset = board.D9
# Use for SPI
spi = board.SPI()
oled_cs = board.D5
oled_dc = board.D6
display_bus = displayio.FourWire(
spi, command=oled_dc, chip_select=oled_cs, baudrate=1000000, reset=oled_reset
)
# Use for I2C
# i2c = board.I2C()
# display_bus = displayio.I2CDisplay(i2c, device_address=0x3c, reset=oled_reset)
WIDTH = 128
HEIGHT = 64 # Change to 32 if needed
BORDER = 8
FONTSCALE = 1
display = adafruit_displayio_ssd1305.SSD1305(display_bus, width=WIDTH, height=HEIGHT)
# Make the display context
splash = displayio.Group()
display.show(splash)
color_bitmap = displayio.Bitmap(display.width, display.height, 1)
color_palette = displayio.Palette(1)
color_palette[0] = 0xFFFFFF # White
bg_sprite = displayio.TileGrid(color_bitmap, pixel_shader=color_palette, x=0, y=0)
splash.append(bg_sprite)
# Draw a smaller inner rectangle
inner_bitmap = displayio.Bitmap(
display.width - BORDER * 2, display.height - BORDER * 2, 1
)
inner_palette = displayio.Palette(1)
inner_palette[0] = 0x000000 # Black
inner_sprite = displayio.TileGrid(
inner_bitmap, pixel_shader=inner_palette, x=BORDER, y=BORDER
)
splash.append(inner_sprite)
# Draw a label
text = "Hello World!"
text_area = label.Label(terminalio.FONT, text=text, color=0xFFFFFF)
text_width = text_area.bounding_box[2] * FONTSCALE
text_group = displayio.Group(
scale=FONTSCALE,
x=display.width // 2 - text_width // 2,
y=display.height // 2,
)
text_group.append(text_area) # Subgroup for text scaling
splash.append(text_group)
while True:
pass
| 26.253333 | 85 | 0.765871 |
import board
import displayio
import terminalio
from adafruit_display_text import label
import adafruit_displayio_ssd1305
displayio.release_displays()
oled_reset = board.D9
spi = board.SPI()
oled_cs = board.D5
oled_dc = board.D6
display_bus = displayio.FourWire(
spi, command=oled_dc, chip_select=oled_cs, baudrate=1000000, reset=oled_reset
)
WIDTH = 128
HEIGHT = 64
BORDER = 8
FONTSCALE = 1
display = adafruit_displayio_ssd1305.SSD1305(display_bus, width=WIDTH, height=HEIGHT)
splash = displayio.Group()
display.show(splash)
color_bitmap = displayio.Bitmap(display.width, display.height, 1)
color_palette = displayio.Palette(1)
color_palette[0] = 0xFFFFFF
bg_sprite = displayio.TileGrid(color_bitmap, pixel_shader=color_palette, x=0, y=0)
splash.append(bg_sprite)
inner_bitmap = displayio.Bitmap(
display.width - BORDER * 2, display.height - BORDER * 2, 1
)
inner_palette = displayio.Palette(1)
inner_palette[0] = 0x000000
inner_sprite = displayio.TileGrid(
inner_bitmap, pixel_shader=inner_palette, x=BORDER, y=BORDER
)
splash.append(inner_sprite)
text = "Hello World!"
text_area = label.Label(terminalio.FONT, text=text, color=0xFFFFFF)
text_width = text_area.bounding_box[2] * FONTSCALE
text_group = displayio.Group(
scale=FONTSCALE,
x=display.width // 2 - text_width // 2,
y=display.height // 2,
)
text_group.append(text_area)
splash.append(text_group)
while True:
pass
| true | true |
f728f6bebe196dfc999874cdca77d254d18bd01b | 1,799 | py | Python | custom_components/hacs/repositories/hacsrepositorytheme.py | sjabby/home-assistant-conf | 89f4bcea5daeddc1e657bc0d7f6db3197f6bddb8 | [
"MIT"
] | 3 | 2019-03-12T21:27:56.000Z | 2019-05-03T06:18:48.000Z | custom_components/hacs/repositories/hacsrepositorytheme.py | sjabby/home-assistant-config | 89f4bcea5daeddc1e657bc0d7f6db3197f6bddb8 | [
"MIT"
] | null | null | null | custom_components/hacs/repositories/hacsrepositorytheme.py | sjabby/home-assistant-config | 89f4bcea5daeddc1e657bc0d7f6db3197f6bddb8 | [
"MIT"
] | null | null | null | """Blueprint for HacsRepositoryThemes."""
# pylint: disable=too-many-instance-attributes,invalid-name,broad-except,access-member-before-definition
import logging
from .hacsrepositorybase import HacsRepositoryBase
from ..hacsbase.exceptions import HacsRequirement
_LOGGER = logging.getLogger("custom_components.hacs.repository")
class HacsRepositoryThemes(HacsRepositoryBase):
"""
Set up a HacsRepositoryThemes object.
repository_name(str): The full name of a repository
(example: awesome-dev/awesome-repo)
"""
def __init__(self, repository_name: str, repositoryobject=None):
"""Initialize a HacsRepositoryThemes object."""
super().__init__()
self.repository = repositoryobject
self.repository_name = repository_name
self.repository_type = "theme"
self.manifest_content = None
self.name = repository_name.split("/")[-1]
async def update(self):
"""Run update tasks."""
if await self.comperson2_update():
return
await self.set_repository_content()
async def set_repository_content(self):
"""Set repository content attributes."""
contentfiles = []
if self.content_path is None:
self.content_objects = await self.repository.get_contents(
"themes", self.ref
)
self.content_path = self.content_objects[0].path
self.name = self.content_objects[0].name.replace(".yaml", "")
if not isinstance(self.content_objects, list):
raise HacsRequirement("Repository structure does not meet the requirements")
for filename in self.content_objects:
contentfiles.append(filename.name)
if contentfiles:
self.content_files = contentfiles
| 32.709091 | 104 | 0.675931 |
import logging
from .hacsrepositorybase import HacsRepositoryBase
from ..hacsbase.exceptions import HacsRequirement
_LOGGER = logging.getLogger("custom_components.hacs.repository")
class HacsRepositoryThemes(HacsRepositoryBase):
def __init__(self, repository_name: str, repositoryobject=None):
super().__init__()
self.repository = repositoryobject
self.repository_name = repository_name
self.repository_type = "theme"
self.manifest_content = None
self.name = repository_name.split("/")[-1]
async def update(self):
if await self.comperson2_update():
return
await self.set_repository_content()
async def set_repository_content(self):
contentfiles = []
if self.content_path is None:
self.content_objects = await self.repository.get_contents(
"themes", self.ref
)
self.content_path = self.content_objects[0].path
self.name = self.content_objects[0].name.replace(".yaml", "")
if not isinstance(self.content_objects, list):
raise HacsRequirement("Repository structure does not meet the requirements")
for filename in self.content_objects:
contentfiles.append(filename.name)
if contentfiles:
self.content_files = contentfiles
| true | true |
f728f6d0ff571bb845a5fc0cf5dbf16903315a1b | 3,552 | py | Python | proj/hps_accel/gateware/stream/stream.py | keadwen/CFU-Playground | 74c79158e85e1365170ececd1d91ea3fa48faba0 | [
"Apache-2.0"
] | 240 | 2021-03-09T23:32:09.000Z | 2022-03-28T14:54:08.000Z | proj/hps_accel/gateware/stream/stream.py | keadwen/CFU-Playground | 74c79158e85e1365170ececd1d91ea3fa48faba0 | [
"Apache-2.0"
] | 283 | 2021-03-10T00:09:11.000Z | 2022-03-31T16:39:40.000Z | proj/hps_accel/gateware/stream/stream.py | keadwen/CFU-Playground | 74c79158e85e1365170ececd1d91ea3fa48faba0 | [
"Apache-2.0"
] | 78 | 2021-03-10T06:13:39.000Z | 2022-03-30T14:09:00.000Z | #!/bin/env python
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nmigen import Shape
from nmigen.hdl.rec import Layout, Record
__all__ = ['PayloadDefinition', 'Endpoint', 'connect']
class PayloadDefinition:
"""Defines a stream's payload
Attributes
----------
payload_type: Shape or Layout
The type of the payload.
stream_layout: Layout
A record Layout for a stream containing the given payload.
"""
@staticmethod
def cast(obj, *, src_loc_at=0):
"""Returns a definition.
Arguments
---------
obj:
PayloadDefinition or something that can be used in Layout - a
Shape, a Layout or an iterable of tuples
"""
if isinstance(obj, PayloadDefinition):
return obj
return PayloadDefinition(payload_type=obj, src_loc_at=src_loc_at+1)
def __init__(self, *, payload_type, src_loc_at=0):
"""Constructor.
Arguments
---------
payload_type: Shape or Layout
The type of the payload.
"""
self.payload_type = payload_type
self.stream_layout = Layout([
("valid", Shape()),
("ready", Shape()),
("payload", payload_type),
],
src_loc_at=1 + src_loc_at
)
class Endpoint:
"""One endpoint of a stream
Parameters
----------
definition: StreamDefintion
Specifies the payload type and other parameters of this type.
"""
def __init__(self, definition=None, *, name=None, src_loc_at=0):
self.definition = PayloadDefinition.cast(definition)
self._record = Record(
self.definition.stream_layout,
name=name,
src_loc_at=1 + src_loc_at)
self.valid = self._record.valid
self.ready = self._record.ready
self.payload = self._record.payload
@staticmethod
def like(other):
return Endpoint(other.definition)
def is_transferring(self):
"""Is a transfer taking place this cycle?
True iff valid and ready are both asserted.
"""
return self.valid & self.ready
def connect(from_endpoint, to_endpoint):
"""Connects an upstream endpoint to a downstream endpoint.
This is a convenience function only. Endpoint users may instead
choose to write the 3 eq statements required to make a connection.
Example uses:
m.d.comb += connect(one_components_output, another_components_input)
m.d.comb += connect(my_input, child_input)
Arguments
---------
from_endpoint:
The upstream side of the stream. Presents payload and valid.
to_endpoint:
The downstream side of the stream. Presents ready.
Result
------
A list of assignment statements to be used combinatorially.
"""
return [
to_endpoint.valid.eq(from_endpoint.valid),
to_endpoint.payload.eq(from_endpoint.payload),
from_endpoint.ready.eq(to_endpoint.ready),
]
| 27.75 | 75 | 0.642736 |
from nmigen import Shape
from nmigen.hdl.rec import Layout, Record
__all__ = ['PayloadDefinition', 'Endpoint', 'connect']
class PayloadDefinition:
@staticmethod
def cast(obj, *, src_loc_at=0):
if isinstance(obj, PayloadDefinition):
return obj
return PayloadDefinition(payload_type=obj, src_loc_at=src_loc_at+1)
def __init__(self, *, payload_type, src_loc_at=0):
self.payload_type = payload_type
self.stream_layout = Layout([
("valid", Shape()),
("ready", Shape()),
("payload", payload_type),
],
src_loc_at=1 + src_loc_at
)
class Endpoint:
def __init__(self, definition=None, *, name=None, src_loc_at=0):
self.definition = PayloadDefinition.cast(definition)
self._record = Record(
self.definition.stream_layout,
name=name,
src_loc_at=1 + src_loc_at)
self.valid = self._record.valid
self.ready = self._record.ready
self.payload = self._record.payload
@staticmethod
def like(other):
return Endpoint(other.definition)
def is_transferring(self):
return self.valid & self.ready
def connect(from_endpoint, to_endpoint):
return [
to_endpoint.valid.eq(from_endpoint.valid),
to_endpoint.payload.eq(from_endpoint.payload),
from_endpoint.ready.eq(to_endpoint.ready),
]
| true | true |
f728f7c271ee7c605af31c7fecdfcc131b9064e8 | 19,096 | py | Python | src/pytools/dheng/dhcore.py | septag/darkhammer | dd7e32737059ce6dba0aa79f1ae4a59137db06a7 | [
"BSD-2-Clause"
] | 68 | 2015-01-06T08:38:32.000Z | 2022-01-20T15:26:17.000Z | src/pytools/dheng/dhcore.py | septag/darkhammer | dd7e32737059ce6dba0aa79f1ae4a59137db06a7 | [
"BSD-2-Clause"
] | null | null | null | src/pytools/dheng/dhcore.py | septag/darkhammer | dd7e32737059ce6dba0aa79f1ae4a59137db06a7 | [
"BSD-2-Clause"
] | 12 | 2015-10-31T11:30:15.000Z | 2020-04-13T18:31:17.000Z | import sys, os, inspect
from ctypes import *
import math
MY_DIR = os.path.dirname(os.path.abspath(inspect.getframeinfo(inspect.currentframe())[0]))
HELPER_DIR = os.path.abspath(os.path.join(MY_DIR, '..', 'helpers'))
sys.path.append(HELPER_DIR)
import dhlog
class _API:
is_init = False
@staticmethod
def init(debug=False):
if _API.is_init:
return
postfix = ''
if debug:
postfix = '-dbg'
if sys.platform == 'win32':
shlib = 'dhcore' + postfix + '.dll'
elif sys.platform == 'linux':
shlib = 'libdhcore' + postfix + '.so'
# load library
try:
dhcorelib = cdll.LoadLibrary(shlib)
except:
dhlog.Log.warn(str(sys.exc_info()[1]))
dhlog.Log.fatal('could not load dynamic library %s' % shlib)
sys.exit(-1)
dhlog.Log.msgline('module "%s" loaded' % shlib, dhlog.TERM_GREEN)
# core.h
_API.core_init = dhcorelib.core_init
_API.core_init.restype = c_int
_API.core_init.argtypes = [c_uint]
_API.core_release = dhcorelib.core_release
_API.core_release.argtypes = [c_int]
# err.h
_API.err_getstring = dhcorelib.err_getstring
_API.err_getstring.restype = c_char_p
# log.h
_API.log_outputconsole = dhcorelib.log_outputconsole
_API.log_outputconsole.restype = c_uint
_API.log_outputconsole.argtypes = [c_int]
_API.log_outputfile = dhcorelib.log_outputfile
_API.log_outputfile.restype = c_uint
_API.log_outputfile.argtypes = [c_int, c_char_p]
_API.log_isfile = dhcorelib.log_isfile
_API.log_isfile.restype = c_int
_API.log_isconsole = dhcorelib.log_isconsole
_API.log_isconsole.restype = c_int
_API.log_print = dhcorelib.log_print
_API.log_print.argtypes = [c_uint, c_char_p]
# file-io.h
_API.fio_addvdir = dhcorelib.fio_addvdir
_API.fio_addvdir.restype = c_int
_API.fio_addvdir.argtypes = [c_char_p, c_int]
# vec-math.h
_API.mat3_muls = dhcorelib.mat3_muls
_API.mat3_muls.restype = POINTER(Matrix3)
_API.mat3_muls.argtypes = [POINTER(Matrix3), POINTER(Matrix3), c_float]
_API.mat3_set_roteuler = dhcorelib.mat3_set_roteuler
_API.mat3_set_roteuler.restype = POINTER(Matrix3)
_API.mat3_set_roteuler.argtypes = [POINTER(Matrix3), c_float, c_float, c_float]
_API.quat_slerp = dhcorelib.quat_slerp
_API.quat_slerp.restype = POINTER(Quat)
_API.quat_slerp.argtypes = [POINTER(Quat), POINTER(Quat), POINTER(Quat), c_float]
_API.quat_fromaxis = dhcorelib.quat_fromaxis
_API.quat_fromaxis.restype = POINTER(Quat)
_API.quat_fromaxis.argtypes = [POINTER(Quat), POINTER(Vec3), c_float]
_API.quat_fromeuler = dhcorelib.quat_fromeuler
_API.quat_fromeuler.restype = POINTER(Quat)
_API.quat_fromeuler.argtypes = [POINTER(Quat), c_float, c_float, c_float]
_API.quat_frommat3 = dhcorelib.quat_frommat3
_API.quat_frommat3.restype = POINTER(Quat)
_API.quat_frommat3.argtypes = [POINTER(Quat), POINTER(Matrix3)]
_API.mat3_inv = dhcorelib.mat3_inv
_API.mat3_inv.restype = POINTER(Matrix3)
_API.mat3_inv.argtypes = [POINTER(Matrix3), POINTER(Matrix3)]
_API.mat3_set_rotaxis = dhcorelib.mat3_set_rotaxis
_API.mat3_set_rotaxis.restype = POINTER(Matrix3)
_API.mat3_set_rotaxis.argtypes = [POINTER(Matrix3), POINTER(Vec3), c_float]
_API.mat3_set_roteuler = dhcorelib.mat3_set_roteuler
_API.mat3_set_roteuler.restype = POINTER(Matrix3)
_API.mat3_set_roteuler.argtypes = [POINTER(Matrix3), c_float, c_float, c_float]
_API.mat3_set_rotquat = dhcorelib.mat3_set_rotquat
_API.mat3_set_rotquat.restype = POINTER(Matrix3)
_API.mat3_set_rotquat.argtypes = [POINTER(Matrix3), POINTER(Quat)]
_API.mat3_inv = dhcorelib.mat3_inv
_API.mat3_inv.restype = POINTER(Matrix3)
_API.mat3_inv.argtypes = [POINTER(Matrix3), POINTER(Matrix3)]
_API.mat3_det = dhcorelib.mat3_det
_API.mat3_det.restype = c_float
_API.mat3_det.argtypes = [POINTER(Matrix3)]
_API.is_init = True
def IS_FAIL(r):
if r <= 0: return True
else: return False
INVALID_HANDLE = 0xffffffffffffffff
INVALID_INDEX = 0xffffffff
def to_cstr(s):
return create_string_buffer(s.encode('ascii'))
class Errors:
@staticmethod
def last_error():
r = _API.err_getstring()
return r.decode()
class Log:
class LogType:
TEXT = 0
ERROR = 1
WARNING = 3,
INFO = 3,
LOAD = 4
@staticmethod
def set_console_output(enable):
_API.log_outputconsole(c_int(enable))
@staticmethod
def set_file_output(logfile):
if logfile != None:
_API.log_outputfile(c_int(True), create_string_buffer(logfile.encode('ascii')))
else:
_API.log_outputfile(c_int(False), None)
@staticmethod
def msg(log_type, msg):
_API.log_print(c_uint(log_type), create_string_buffer(msg.encode('ascii')))
class Core:
class InitFlags():
TRACE_MEM = (1<<0)
CRASH_DUMP = (1<<1)
LOGGER = (1<<2)
ERRORS = (1<<3)
JSON = (1<<4)
FILE_IO = (1<<5)
TIMER = (1<<6)
ALL = 0xffffffff
@staticmethod
def init(flags = InitFlags.ALL):
if IS_FAIL(_API.core_init(c_uint(flags))):
raise Exception(_API.err_getstring())
@staticmethod
def release(report_leaks = True):
_API.core_release(c_int(report_leaks))
class Vec3(Structure):
_fields_ = [('x', c_float), ('y', c_float), ('z', c_float), ('w', c_float)]
def __init__(self, _x = 0, _y = 0, _z = 0, _w = 1):
self.x = _x
self.y = _y
self.z = _z
self.w = 1
def __add__(a, b):
return Vec3(a.x + b.x, a.y + b.y, a.z + b.z)
def __mul__(a, b):
if type(b) is float or type(b) is int:
return Vec3(a.x*b, a.y*b, a.z*b)
elif type(b) is Matrix3:
return Vec3(\
a.x*b.m11 + a.y*b.m21 + a.z*b.m31 + b.m41,
a.x*b.m12 + a.y*b.m22 + a.z*b.m32 + b.m42,
a.x*b.m13 + a.y*b.m23 + a.z*b.m33 + b.m43);
def copy(self):
return Vec3(self.x, self.y, self.z)
def __div__(a, b):
return Vec3(a.x/b, a.y/b, a.z/b)
def __eq__(a, b):
if a.x == b.x and a.y == b.y and a.z == b.z:
return True
else:
return False
def __sub__(a, b):
return Vec3(a.x - b.x, a.y - b.y, a.z - b.z)
def get_length(self):
return math.sqrt(self.x*self.x + self.y*self.y + self.z*self.z)
length = property(get_length)
@staticmethod
def dot(a, b):
return a.x*b.x + a.y*b.y + a.z*b.z
@staticmethod
def normalize(v):
scale = 1.0 / v.length
return Vec3(v.x*scale, v.y*scale, v.z*scale)
@staticmethod
def cross(v1, v2):
return Vec3(v1.y*v2.z - v1.z*v2.y, v1.z*v2.x - v1.x*v2.z, v1.x*v2.y - v1.y*v2.x)
@staticmethod
def lerp(v1, v2, t):
return Vec3(\
v1.x + t*(v2.x - v1.x),
v1.y + t*(v2.y - v1.y),
v1.z + t*(v2.z - v1.z))
def __str__(self):
return 'Vec3: %f, %f, %f' % (self.x, self.y, self.z)
class Vec2(Structure):
_fields_ = [('x', c_float), ('y', c_float)]
def __init__(self, _x = 0, _y = 0):
self.x = _x
self.y = _y
def copy(self):
return Vec2(self.x, self.y)
def __add__(a, b):
return Vec2(a.x + b.x, a.y + b.y)
def __sub__(a, b):
return Vec2(a.x - b.x, a.y - b.y)
def __mul__(a, b):
return Vec2(a.x*b, a.y*b)
def __div__(a, b):
return Vec2(a.x/b, a.y/b)
def __str__(self):
return 'Vec2: %f, %f' % (self.x, self.y)
class Vec2i(Structure):
_fields_ = [('x', c_int), ('y', c_int)]
def __init__(self, _x = 0, _y = 0):
self.x = int(_x)
self.y = int(_y)
def copy(self):
return Vec2i(self.x, self.y)
def __add__(a, b):
return Vec2(a.x + b.x, a.y + b.y)
def __sub__(a, b):
return Vec2(a.x - b.x, a.y - b.y)
def __mul__(a, b):
return Vec2(a.x*b, a.y*b)
def __str__(self):
return 'Vec2i: %d, %d' % (self.x, self.y)
class Vec4(Structure):
_fields_ = [('x', c_float), ('y', c_float), ('z', c_float), ('w', c_float)]
def __init__(self, _x = 0, _y = 0, _z = 0, _w = 1):
self.x = _x
self.y = _y
self.z = _z
self.w = 1
def copy(self):
return Vec4(self.x, self.y, self.z, self.w)
def __add__(a, b):
return Vec4(a.x + b.x, a.y + b.y, a.z + b.z, a.w + b.w)
def __sub__(a, b):
return Vec4(a.x - b.x, a.y - b.y, a.z - b.z, a.w - b.w)
def __mul__(a, b):
return Vec4(a.x*b, a.y*b, a.z*b, a.w*b)
def __div__(a, b):
return Vec4(a.x/b, a.y/b, a.z/b, a.w/b)
def __str__(self):
return 'Vec4: %f, %f, %f, %f' % (self.x, self.y, self.z, self.w)
class Color(Structure):
_fields_ = [('r', c_float), ('g', c_float), ('b', c_float), ('a', c_float)]
def __init__(self, _r = 0, _g = 0, _b = 0, _a = 1):
self.r = _r
self.g = _g
self.b = _b
self.a = _a
def copy(self):
return Color(self.r, self.g, self.b, self.a)
def __mul__(a, b):
return Color(a.r*b, a.g*b, a.g*b, a.a)
def __mul__(a, b):
return Color(a.r*b.r, a.g*b.g, a.g*b.b, min(a.a, b.a))
def __add__(a, b):
return Color(a.r+b.r, a.g+b.g, a.b+b.b, max(a.a, b.a))
@staticmethod
def lerp(c1, c2, t):
tinv = 1 - t
return Color(
c1.r*t + c2.r*tinv,
c1.g*t + c2.g*tinv,
c1.b*t + c2.b*tinv,
c1.a*t + c2.a*tinv)
class Quat(Structure):
_fields_ = [('x', c_float), ('y', c_float), ('z', c_float), ('w', c_float)]
def __init__(self, _x = 0, _y = 0, _z = 0, _w = 1):
self.x = _x
self.y = _y
self.z = _z
self.w = _w
def copy(self):
return Color(self.x, self.y, self.z, self.w)
def __mul__(q1, q2):
return Quat(\
q1.w*q2.x + q1.x*q2.w + q1.z*q2.y - q1.y*q2.z,
q1.w*q2.y + q1.y*q2.w + q1.x*q2.z - q1.z*q2.x,
q1.w*q2.z + q1.z*q2.w + q1.y*q2.x - q1.x*q2.y,
q1.w*q2.w - q1.x*q2.x - q1.y*q2.y - q1.z*q2.z)
def __eq__(q1, q2):
if q1.x == q2.x and q1.y == q2.y and q1.z == q2.z and q1.w == q2.w:
return True
else:
return False
def from_axis(self, axis, angle):
_API.quat_fromaxis(byref(self), byref(axis), c_float(angle))
def from_euler(self, pitch, yaw, roll):
_API.quat_fromeuler(byref(self), c_float(pitch), c_float(yaw), c_float(roll))
def from_matrix3(self, mat):
_API.quat_frommat3(byref(self), byref(mat))
@staticmethod
def inverse(q):
return Quat(-q.x, -q.y, -q.z, q.w)
@staticmethod
def slerp(q1, q2, t):
q = Quat()
_API.quat_slerp(byref(q), byref(q1), byref(q2), c_float(t))
return q
def __str__(self):
return 'Quat: %f %f %f %f' % (self.x, self.y, self.z, self.w)
class Matrix3(Structure):
_fields_ = [\
('m11', c_float), ('m12', c_float), ('m13', c_float), ('m14', c_float),
('m21', c_float), ('m22', c_float), ('m23', c_float), ('m24', c_float),
('m31', c_float), ('m32', c_float), ('m33', c_float), ('m34', c_float),
('m41', c_float), ('m42', c_float), ('m43', c_float), ('m44', c_float)]
def __init__(self, _m11 = 1, _m12 = 0, _m13 = 0, _m21 = 0, _m22 = 1, _m23 = 0,
_m31 = 0, _m32 = 0, _m33 = 1, _m41 = 0, _m42 = 0, _m43 = 0):
self.m11 = _m11
self.m12 = _m12
self.m13 = _m13
self.m21 = _m21
self.m22 = _m22
self.m23 = _m23
self.m31 = _m31
self.m32 = _m32
self.m33 = _m33
self.m41 = _m41
self.m42 = _m42
self.m43 = _m43
def copy(self):
return Matrix3(\
self.m11, self.m12, self.m13,
self.m21, self.m22, self.m23,
self.m31, self.m32, self.m33,
self.m41, self.m42, self.m43)
def __mul__(a, b):
if type(b) is float or type(b) is int:
return Matrix3(\
a.m11*b, a.m21*b, a.m31*b, a.m41*b,
a.m12*b, a.m22*b, a.m32*b, a.m42*b,
a.m13*b, a.m23*b, a.m33*b, a.m43*b)
else:
return Matrix3(\
a.m11*b.m11 + a.m12*b.m21 + a.m13*b.m31,
a.m11*b.m12 + a.m12*b.m22 + a.m13*b.m32,
a.m11*b.m13 + a.m12*b.m23 + a.m13*b.m33,
a.m21*b.m11 + a.m22*b.m21 + a.m23*b.m31,
a.m21*b.m12 + a.m22*b.m22 + a.m23*b.m32,
a.m21*b.m13 + a.m22*b.m23 + a.m23*b.m33,
a.m31*b.m11 + a.m32*b.m21 + a.m33*b.m31,
a.m31*b.m12 + a.m32*b.m22 + a.m33*b.m32,
a.m31*b.m13 + a.m32*b.m23 + a.m33*b.m33,
a.m41*b.m11 + a.m42*b.m21 + a.m43*b.m31 + b.m41,
a.m41*b.m12 + a.m42*b.m22 + a.m43*b.m32 + b.m42,
a.m41*b.m13 + a.m42*b.m23 + a.m43*b.m33 + b.m43);
def translate(self, x, y, z):
self.m41 = x
self.m42 = y
self.m43 = z
def translate(self, v):
self.m41 = v.x
self.m42 = v.y
self.m43 = v.z
def rotate_euler(self, pitch, yaw, roll):
_API.mat3_set_roteuler(byref(self), c_float(pitch), c_float(yaw), c_float(roll))
def rotate_quat(self, q):
_API.mat3_set_rotquat(byref(self), byref(q))
def rotate_axis(self, axis, angle):
_API.mat3_set_rotaxis(byref(self), byref(axis), c_float(angle))
def scale(self, sx, sy, sz):
self.m11 = sx
self.m22 = sy
self.m33 = sz
def __get_determinant(self):
return _API.mat3_det(byref(self))
determinant = property(__get_determinant)
def __get_translation(self):
return Vec3(self.m41, self.m42, self.m43)
translation = property(__get_translation)
@staticmethod
def transpose(m):
return Matrix3(\
self.m11, self.m21, self.m31,
self.m12, self.m22, self.m32,
self.m13, self.m23, self.m33,
self.m14, self.m24, self.m34)
@staticmethod
def invert(m):
r = Matrix3()
_API.mat3_inv(byref(r), byref(m))
return r
class Matrix4(Structure):
_fields_ = [\
('m11', c_float), ('m12', c_float), ('m13', c_float), ('m14', c_float),
('m21', c_float), ('m22', c_float), ('m23', c_float), ('m24', c_float),
('m31', c_float), ('m32', c_float), ('m33', c_float), ('m34', c_float),
('m41', c_float), ('m42', c_float), ('m43', c_float), ('m44', c_float)]
def __init__(self,
_m11 = 1, _m12 = 0, _m13 = 0, _m14 = 0,
_m21 = 0, _m22 = 1, _m23 = 0, _m24 = 0,
_m31 = 0, _m32 = 0, _m33 = 1, _m34 = 0,
_m41 = 0, _m42 = 0, _m43 = 0, _m44 = 1):
self.m11 = _m11
self.m12 = _m12
self.m13 = _m13
self.m14 = _m14
self.m21 = _m21
self.m22 = _m22
self.m23 = _m23
self.m24 = _m24
self.m31 = _m31
self.m32 = _m32
self.m33 = _m33
self.m34 = _m34
self.m41 = _m41
self.m42 = _m42
self.m43 = _m43
self.m44 = _m44
def copy(self):
return Matrix4(\
self.m11, self.m12, self.m13, self.m14,
self.m21, self.m22, self.m23, self.m24,
self.m31, self.m32, self.m33, self.m34,
self.m41, self.m42, self.m43, self.m44)
class Math:
PI = 3.14159265
@staticmethod
def to_rad(x):
return x*Math.PI/180.0
@staticmethod
def to_deg(x):
return 180.0*x/Math.PI
class FileIO:
@staticmethod
def add_virtual_path(path, monitor=False):
path = os.path.abspath(os.path.expanduser(path))
if not _API.fio_addvdir(to_cstr(path), c_int(monitor)):
raise Exception(Errors.last_error())
class Variant(Structure):
class VarType:
BOOL = 1
INT = 2
UINT = 3
FLOAT = 4
FLOAT2 = 5
FLOAT3 = 6
FLOAT4 = 7
INT2 = 8
INT3 = 9
INT4 = 10
STRING = 11
class _Value(Union):
_fields_ = [\
('b', c_int),
('i', c_int),
('ui', c_uint),
('f', c_float),
('fv', c_float*4),
('iv', c_int*4),
('s', c_char*16)]
_fields_ = [('type', c_uint), ('value', _Value)]
def set_value(self, v):
if type(v) is bool:
self.type = Variant.VarType.BOOL
self.value.b = int(v)
elif type(v) is int:
self.type = Variant.VarType.INT
self.value.i = v
elif type(v) is float:
self.type = Variant.VarType.FLOAT
self.value.f = v
elif type(v) is Vec2:
self.type = Variant.VarType.FLOAT2
self.value.fv[0] = v.x
self.value.fv[1] = v.y
elif type(v) is Vec3:
self.type = Variant.VarType.FLOAT3
self.value.fv[0] = v.x
self.value.fv[1] = v.y
self.value.fv[2] = v.z
elif type(v) is Vec2i:
self.type = Variant.VarType.INT2
self.value.iv[0] = v.x
self.value.iv[1] = v.y
elif (type(v) is Color) or (type(v) is Vec4):
self.type = Variant.VarType.FLOAT4
self.value.fv[0] = v.x
self.value.fv[1] = v.y
self.value.fv[2] = v.z
self.value.fv[3] = v.w
elif type(v) is str:
self.type = Variant.VarType.STRING
self.value.s = to_cstr(v)
else:
raise Exception('unknown type')
def get_value(self):
if self.type == Variant.VarType.BOOL:
return self.value.b
elif self.type == Variant.VarType.INT:
return self.value.i
elif self.type == Variant.VarType.FLOAT:
return self.value.f
elif self.type == Variant.VarType.FLOAT2:
return Vec2(self.value.fv[0], self.value.fv[1])
elif self.type == Variant.VarType.FLOAT3:
return Vec3(self.value.fv[0], self.value.fv[1], self.value.fv[2])
elif self.type == Variant.VarType.INT2:
return Vec2i(self.value.iv[0], self.value.iv[1])
elif self.type == Variant.VarType.FLOAT4:
return Vec4(self.value.fv[0], self.value.fv[1], self.value.fv[2], self.value.fv[3])
elif self.type == Variant.VarType.STRING:
return self.value.s
else:
raise Exception('unknown type')
_API.init(debug = ('--debug' in sys.argv)) | 30.311111 | 95 | 0.54116 | import sys, os, inspect
from ctypes import *
import math
MY_DIR = os.path.dirname(os.path.abspath(inspect.getframeinfo(inspect.currentframe())[0]))
HELPER_DIR = os.path.abspath(os.path.join(MY_DIR, '..', 'helpers'))
sys.path.append(HELPER_DIR)
import dhlog
class _API:
is_init = False
@staticmethod
def init(debug=False):
if _API.is_init:
return
postfix = ''
if debug:
postfix = '-dbg'
if sys.platform == 'win32':
shlib = 'dhcore' + postfix + '.dll'
elif sys.platform == 'linux':
shlib = 'libdhcore' + postfix + '.so'
try:
dhcorelib = cdll.LoadLibrary(shlib)
except:
dhlog.Log.warn(str(sys.exc_info()[1]))
dhlog.Log.fatal('could not load dynamic library %s' % shlib)
sys.exit(-1)
dhlog.Log.msgline('module "%s" loaded' % shlib, dhlog.TERM_GREEN)
_API.core_init = dhcorelib.core_init
_API.core_init.restype = c_int
_API.core_init.argtypes = [c_uint]
_API.core_release = dhcorelib.core_release
_API.core_release.argtypes = [c_int]
_API.err_getstring = dhcorelib.err_getstring
_API.err_getstring.restype = c_char_p
_API.log_outputconsole = dhcorelib.log_outputconsole
_API.log_outputconsole.restype = c_uint
_API.log_outputconsole.argtypes = [c_int]
_API.log_outputfile = dhcorelib.log_outputfile
_API.log_outputfile.restype = c_uint
_API.log_outputfile.argtypes = [c_int, c_char_p]
_API.log_isfile = dhcorelib.log_isfile
_API.log_isfile.restype = c_int
_API.log_isconsole = dhcorelib.log_isconsole
_API.log_isconsole.restype = c_int
_API.log_print = dhcorelib.log_print
_API.log_print.argtypes = [c_uint, c_char_p]
_API.fio_addvdir = dhcorelib.fio_addvdir
_API.fio_addvdir.restype = c_int
_API.fio_addvdir.argtypes = [c_char_p, c_int]
_API.mat3_muls = dhcorelib.mat3_muls
_API.mat3_muls.restype = POINTER(Matrix3)
_API.mat3_muls.argtypes = [POINTER(Matrix3), POINTER(Matrix3), c_float]
_API.mat3_set_roteuler = dhcorelib.mat3_set_roteuler
_API.mat3_set_roteuler.restype = POINTER(Matrix3)
_API.mat3_set_roteuler.argtypes = [POINTER(Matrix3), c_float, c_float, c_float]
_API.quat_slerp = dhcorelib.quat_slerp
_API.quat_slerp.restype = POINTER(Quat)
_API.quat_slerp.argtypes = [POINTER(Quat), POINTER(Quat), POINTER(Quat), c_float]
_API.quat_fromaxis = dhcorelib.quat_fromaxis
_API.quat_fromaxis.restype = POINTER(Quat)
_API.quat_fromaxis.argtypes = [POINTER(Quat), POINTER(Vec3), c_float]
_API.quat_fromeuler = dhcorelib.quat_fromeuler
_API.quat_fromeuler.restype = POINTER(Quat)
_API.quat_fromeuler.argtypes = [POINTER(Quat), c_float, c_float, c_float]
_API.quat_frommat3 = dhcorelib.quat_frommat3
_API.quat_frommat3.restype = POINTER(Quat)
_API.quat_frommat3.argtypes = [POINTER(Quat), POINTER(Matrix3)]
_API.mat3_inv = dhcorelib.mat3_inv
_API.mat3_inv.restype = POINTER(Matrix3)
_API.mat3_inv.argtypes = [POINTER(Matrix3), POINTER(Matrix3)]
_API.mat3_set_rotaxis = dhcorelib.mat3_set_rotaxis
_API.mat3_set_rotaxis.restype = POINTER(Matrix3)
_API.mat3_set_rotaxis.argtypes = [POINTER(Matrix3), POINTER(Vec3), c_float]
_API.mat3_set_roteuler = dhcorelib.mat3_set_roteuler
_API.mat3_set_roteuler.restype = POINTER(Matrix3)
_API.mat3_set_roteuler.argtypes = [POINTER(Matrix3), c_float, c_float, c_float]
_API.mat3_set_rotquat = dhcorelib.mat3_set_rotquat
_API.mat3_set_rotquat.restype = POINTER(Matrix3)
_API.mat3_set_rotquat.argtypes = [POINTER(Matrix3), POINTER(Quat)]
_API.mat3_inv = dhcorelib.mat3_inv
_API.mat3_inv.restype = POINTER(Matrix3)
_API.mat3_inv.argtypes = [POINTER(Matrix3), POINTER(Matrix3)]
_API.mat3_det = dhcorelib.mat3_det
_API.mat3_det.restype = c_float
_API.mat3_det.argtypes = [POINTER(Matrix3)]
_API.is_init = True
def IS_FAIL(r):
if r <= 0: return True
else: return False
INVALID_HANDLE = 0xffffffffffffffff
INVALID_INDEX = 0xffffffff
def to_cstr(s):
return create_string_buffer(s.encode('ascii'))
class Errors:
@staticmethod
def last_error():
r = _API.err_getstring()
return r.decode()
class Log:
class LogType:
TEXT = 0
ERROR = 1
WARNING = 3,
INFO = 3,
LOAD = 4
@staticmethod
def set_console_output(enable):
_API.log_outputconsole(c_int(enable))
@staticmethod
def set_file_output(logfile):
if logfile != None:
_API.log_outputfile(c_int(True), create_string_buffer(logfile.encode('ascii')))
else:
_API.log_outputfile(c_int(False), None)
@staticmethod
def msg(log_type, msg):
_API.log_print(c_uint(log_type), create_string_buffer(msg.encode('ascii')))
class Core:
class InitFlags():
TRACE_MEM = (1<<0)
CRASH_DUMP = (1<<1)
LOGGER = (1<<2)
ERRORS = (1<<3)
JSON = (1<<4)
FILE_IO = (1<<5)
TIMER = (1<<6)
ALL = 0xffffffff
@staticmethod
def init(flags = InitFlags.ALL):
if IS_FAIL(_API.core_init(c_uint(flags))):
raise Exception(_API.err_getstring())
@staticmethod
def release(report_leaks = True):
_API.core_release(c_int(report_leaks))
class Vec3(Structure):
_fields_ = [('x', c_float), ('y', c_float), ('z', c_float), ('w', c_float)]
def __init__(self, _x = 0, _y = 0, _z = 0, _w = 1):
self.x = _x
self.y = _y
self.z = _z
self.w = 1
def __add__(a, b):
return Vec3(a.x + b.x, a.y + b.y, a.z + b.z)
def __mul__(a, b):
if type(b) is float or type(b) is int:
return Vec3(a.x*b, a.y*b, a.z*b)
elif type(b) is Matrix3:
return Vec3(\
a.x*b.m11 + a.y*b.m21 + a.z*b.m31 + b.m41,
a.x*b.m12 + a.y*b.m22 + a.z*b.m32 + b.m42,
a.x*b.m13 + a.y*b.m23 + a.z*b.m33 + b.m43);
def copy(self):
return Vec3(self.x, self.y, self.z)
def __div__(a, b):
return Vec3(a.x/b, a.y/b, a.z/b)
def __eq__(a, b):
if a.x == b.x and a.y == b.y and a.z == b.z:
return True
else:
return False
def __sub__(a, b):
return Vec3(a.x - b.x, a.y - b.y, a.z - b.z)
def get_length(self):
return math.sqrt(self.x*self.x + self.y*self.y + self.z*self.z)
length = property(get_length)
@staticmethod
def dot(a, b):
return a.x*b.x + a.y*b.y + a.z*b.z
@staticmethod
def normalize(v):
scale = 1.0 / v.length
return Vec3(v.x*scale, v.y*scale, v.z*scale)
@staticmethod
def cross(v1, v2):
return Vec3(v1.y*v2.z - v1.z*v2.y, v1.z*v2.x - v1.x*v2.z, v1.x*v2.y - v1.y*v2.x)
@staticmethod
def lerp(v1, v2, t):
return Vec3(\
v1.x + t*(v2.x - v1.x),
v1.y + t*(v2.y - v1.y),
v1.z + t*(v2.z - v1.z))
def __str__(self):
return 'Vec3: %f, %f, %f' % (self.x, self.y, self.z)
class Vec2(Structure):
_fields_ = [('x', c_float), ('y', c_float)]
def __init__(self, _x = 0, _y = 0):
self.x = _x
self.y = _y
def copy(self):
return Vec2(self.x, self.y)
def __add__(a, b):
return Vec2(a.x + b.x, a.y + b.y)
def __sub__(a, b):
return Vec2(a.x - b.x, a.y - b.y)
def __mul__(a, b):
return Vec2(a.x*b, a.y*b)
def __div__(a, b):
return Vec2(a.x/b, a.y/b)
def __str__(self):
return 'Vec2: %f, %f' % (self.x, self.y)
class Vec2i(Structure):
_fields_ = [('x', c_int), ('y', c_int)]
def __init__(self, _x = 0, _y = 0):
self.x = int(_x)
self.y = int(_y)
def copy(self):
return Vec2i(self.x, self.y)
def __add__(a, b):
return Vec2(a.x + b.x, a.y + b.y)
def __sub__(a, b):
return Vec2(a.x - b.x, a.y - b.y)
def __mul__(a, b):
return Vec2(a.x*b, a.y*b)
def __str__(self):
return 'Vec2i: %d, %d' % (self.x, self.y)
class Vec4(Structure):
_fields_ = [('x', c_float), ('y', c_float), ('z', c_float), ('w', c_float)]
def __init__(self, _x = 0, _y = 0, _z = 0, _w = 1):
self.x = _x
self.y = _y
self.z = _z
self.w = 1
def copy(self):
return Vec4(self.x, self.y, self.z, self.w)
def __add__(a, b):
return Vec4(a.x + b.x, a.y + b.y, a.z + b.z, a.w + b.w)
def __sub__(a, b):
return Vec4(a.x - b.x, a.y - b.y, a.z - b.z, a.w - b.w)
def __mul__(a, b):
return Vec4(a.x*b, a.y*b, a.z*b, a.w*b)
def __div__(a, b):
return Vec4(a.x/b, a.y/b, a.z/b, a.w/b)
def __str__(self):
return 'Vec4: %f, %f, %f, %f' % (self.x, self.y, self.z, self.w)
class Color(Structure):
_fields_ = [('r', c_float), ('g', c_float), ('b', c_float), ('a', c_float)]
def __init__(self, _r = 0, _g = 0, _b = 0, _a = 1):
self.r = _r
self.g = _g
self.b = _b
self.a = _a
def copy(self):
return Color(self.r, self.g, self.b, self.a)
def __mul__(a, b):
return Color(a.r*b, a.g*b, a.g*b, a.a)
def __mul__(a, b):
return Color(a.r*b.r, a.g*b.g, a.g*b.b, min(a.a, b.a))
def __add__(a, b):
return Color(a.r+b.r, a.g+b.g, a.b+b.b, max(a.a, b.a))
@staticmethod
def lerp(c1, c2, t):
tinv = 1 - t
return Color(
c1.r*t + c2.r*tinv,
c1.g*t + c2.g*tinv,
c1.b*t + c2.b*tinv,
c1.a*t + c2.a*tinv)
class Quat(Structure):
_fields_ = [('x', c_float), ('y', c_float), ('z', c_float), ('w', c_float)]
def __init__(self, _x = 0, _y = 0, _z = 0, _w = 1):
self.x = _x
self.y = _y
self.z = _z
self.w = _w
def copy(self):
return Color(self.x, self.y, self.z, self.w)
def __mul__(q1, q2):
return Quat(\
q1.w*q2.x + q1.x*q2.w + q1.z*q2.y - q1.y*q2.z,
q1.w*q2.y + q1.y*q2.w + q1.x*q2.z - q1.z*q2.x,
q1.w*q2.z + q1.z*q2.w + q1.y*q2.x - q1.x*q2.y,
q1.w*q2.w - q1.x*q2.x - q1.y*q2.y - q1.z*q2.z)
def __eq__(q1, q2):
if q1.x == q2.x and q1.y == q2.y and q1.z == q2.z and q1.w == q2.w:
return True
else:
return False
def from_axis(self, axis, angle):
_API.quat_fromaxis(byref(self), byref(axis), c_float(angle))
def from_euler(self, pitch, yaw, roll):
_API.quat_fromeuler(byref(self), c_float(pitch), c_float(yaw), c_float(roll))
def from_matrix3(self, mat):
_API.quat_frommat3(byref(self), byref(mat))
@staticmethod
def inverse(q):
return Quat(-q.x, -q.y, -q.z, q.w)
@staticmethod
def slerp(q1, q2, t):
q = Quat()
_API.quat_slerp(byref(q), byref(q1), byref(q2), c_float(t))
return q
def __str__(self):
return 'Quat: %f %f %f %f' % (self.x, self.y, self.z, self.w)
class Matrix3(Structure):
_fields_ = [\
('m11', c_float), ('m12', c_float), ('m13', c_float), ('m14', c_float),
('m21', c_float), ('m22', c_float), ('m23', c_float), ('m24', c_float),
('m31', c_float), ('m32', c_float), ('m33', c_float), ('m34', c_float),
('m41', c_float), ('m42', c_float), ('m43', c_float), ('m44', c_float)]
def __init__(self, _m11 = 1, _m12 = 0, _m13 = 0, _m21 = 0, _m22 = 1, _m23 = 0,
_m31 = 0, _m32 = 0, _m33 = 1, _m41 = 0, _m42 = 0, _m43 = 0):
self.m11 = _m11
self.m12 = _m12
self.m13 = _m13
self.m21 = _m21
self.m22 = _m22
self.m23 = _m23
self.m31 = _m31
self.m32 = _m32
self.m33 = _m33
self.m41 = _m41
self.m42 = _m42
self.m43 = _m43
def copy(self):
return Matrix3(\
self.m11, self.m12, self.m13,
self.m21, self.m22, self.m23,
self.m31, self.m32, self.m33,
self.m41, self.m42, self.m43)
def __mul__(a, b):
if type(b) is float or type(b) is int:
return Matrix3(\
a.m11*b, a.m21*b, a.m31*b, a.m41*b,
a.m12*b, a.m22*b, a.m32*b, a.m42*b,
a.m13*b, a.m23*b, a.m33*b, a.m43*b)
else:
return Matrix3(\
a.m11*b.m11 + a.m12*b.m21 + a.m13*b.m31,
a.m11*b.m12 + a.m12*b.m22 + a.m13*b.m32,
a.m11*b.m13 + a.m12*b.m23 + a.m13*b.m33,
a.m21*b.m11 + a.m22*b.m21 + a.m23*b.m31,
a.m21*b.m12 + a.m22*b.m22 + a.m23*b.m32,
a.m21*b.m13 + a.m22*b.m23 + a.m23*b.m33,
a.m31*b.m11 + a.m32*b.m21 + a.m33*b.m31,
a.m31*b.m12 + a.m32*b.m22 + a.m33*b.m32,
a.m31*b.m13 + a.m32*b.m23 + a.m33*b.m33,
a.m41*b.m11 + a.m42*b.m21 + a.m43*b.m31 + b.m41,
a.m41*b.m12 + a.m42*b.m22 + a.m43*b.m32 + b.m42,
a.m41*b.m13 + a.m42*b.m23 + a.m43*b.m33 + b.m43);
def translate(self, x, y, z):
self.m41 = x
self.m42 = y
self.m43 = z
def translate(self, v):
self.m41 = v.x
self.m42 = v.y
self.m43 = v.z
def rotate_euler(self, pitch, yaw, roll):
_API.mat3_set_roteuler(byref(self), c_float(pitch), c_float(yaw), c_float(roll))
def rotate_quat(self, q):
_API.mat3_set_rotquat(byref(self), byref(q))
def rotate_axis(self, axis, angle):
_API.mat3_set_rotaxis(byref(self), byref(axis), c_float(angle))
def scale(self, sx, sy, sz):
self.m11 = sx
self.m22 = sy
self.m33 = sz
def __get_determinant(self):
return _API.mat3_det(byref(self))
determinant = property(__get_determinant)
def __get_translation(self):
return Vec3(self.m41, self.m42, self.m43)
translation = property(__get_translation)
@staticmethod
def transpose(m):
return Matrix3(\
self.m11, self.m21, self.m31,
self.m12, self.m22, self.m32,
self.m13, self.m23, self.m33,
self.m14, self.m24, self.m34)
@staticmethod
def invert(m):
r = Matrix3()
_API.mat3_inv(byref(r), byref(m))
return r
class Matrix4(Structure):
_fields_ = [\
('m11', c_float), ('m12', c_float), ('m13', c_float), ('m14', c_float),
('m21', c_float), ('m22', c_float), ('m23', c_float), ('m24', c_float),
('m31', c_float), ('m32', c_float), ('m33', c_float), ('m34', c_float),
('m41', c_float), ('m42', c_float), ('m43', c_float), ('m44', c_float)]
def __init__(self,
_m11 = 1, _m12 = 0, _m13 = 0, _m14 = 0,
_m21 = 0, _m22 = 1, _m23 = 0, _m24 = 0,
_m31 = 0, _m32 = 0, _m33 = 1, _m34 = 0,
_m41 = 0, _m42 = 0, _m43 = 0, _m44 = 1):
self.m11 = _m11
self.m12 = _m12
self.m13 = _m13
self.m14 = _m14
self.m21 = _m21
self.m22 = _m22
self.m23 = _m23
self.m24 = _m24
self.m31 = _m31
self.m32 = _m32
self.m33 = _m33
self.m34 = _m34
self.m41 = _m41
self.m42 = _m42
self.m43 = _m43
self.m44 = _m44
def copy(self):
return Matrix4(\
self.m11, self.m12, self.m13, self.m14,
self.m21, self.m22, self.m23, self.m24,
self.m31, self.m32, self.m33, self.m34,
self.m41, self.m42, self.m43, self.m44)
class Math:
PI = 3.14159265
@staticmethod
def to_rad(x):
return x*Math.PI/180.0
@staticmethod
def to_deg(x):
return 180.0*x/Math.PI
class FileIO:
@staticmethod
def add_virtual_path(path, monitor=False):
path = os.path.abspath(os.path.expanduser(path))
if not _API.fio_addvdir(to_cstr(path), c_int(monitor)):
raise Exception(Errors.last_error())
class Variant(Structure):
class VarType:
BOOL = 1
INT = 2
UINT = 3
FLOAT = 4
FLOAT2 = 5
FLOAT3 = 6
FLOAT4 = 7
INT2 = 8
INT3 = 9
INT4 = 10
STRING = 11
class _Value(Union):
_fields_ = [\
('b', c_int),
('i', c_int),
('ui', c_uint),
('f', c_float),
('fv', c_float*4),
('iv', c_int*4),
('s', c_char*16)]
_fields_ = [('type', c_uint), ('value', _Value)]
def set_value(self, v):
if type(v) is bool:
self.type = Variant.VarType.BOOL
self.value.b = int(v)
elif type(v) is int:
self.type = Variant.VarType.INT
self.value.i = v
elif type(v) is float:
self.type = Variant.VarType.FLOAT
self.value.f = v
elif type(v) is Vec2:
self.type = Variant.VarType.FLOAT2
self.value.fv[0] = v.x
self.value.fv[1] = v.y
elif type(v) is Vec3:
self.type = Variant.VarType.FLOAT3
self.value.fv[0] = v.x
self.value.fv[1] = v.y
self.value.fv[2] = v.z
elif type(v) is Vec2i:
self.type = Variant.VarType.INT2
self.value.iv[0] = v.x
self.value.iv[1] = v.y
elif (type(v) is Color) or (type(v) is Vec4):
self.type = Variant.VarType.FLOAT4
self.value.fv[0] = v.x
self.value.fv[1] = v.y
self.value.fv[2] = v.z
self.value.fv[3] = v.w
elif type(v) is str:
self.type = Variant.VarType.STRING
self.value.s = to_cstr(v)
else:
raise Exception('unknown type')
def get_value(self):
if self.type == Variant.VarType.BOOL:
return self.value.b
elif self.type == Variant.VarType.INT:
return self.value.i
elif self.type == Variant.VarType.FLOAT:
return self.value.f
elif self.type == Variant.VarType.FLOAT2:
return Vec2(self.value.fv[0], self.value.fv[1])
elif self.type == Variant.VarType.FLOAT3:
return Vec3(self.value.fv[0], self.value.fv[1], self.value.fv[2])
elif self.type == Variant.VarType.INT2:
return Vec2i(self.value.iv[0], self.value.iv[1])
elif self.type == Variant.VarType.FLOAT4:
return Vec4(self.value.fv[0], self.value.fv[1], self.value.fv[2], self.value.fv[3])
elif self.type == Variant.VarType.STRING:
return self.value.s
else:
raise Exception('unknown type')
_API.init(debug = ('--debug' in sys.argv)) | true | true |
f728f87dd991e699b34d62bb2c156120a76fee44 | 21,464 | py | Python | crm/main.py | PJHsieh/sonic-utilities | 4e132c1c86c515076815a64cf22aaf8e0101de84 | [
"Apache-2.0"
] | 1 | 2022-01-29T01:31:49.000Z | 2022-01-29T01:31:49.000Z | crm/main.py | PJHsieh/sonic-utilities | 4e132c1c86c515076815a64cf22aaf8e0101de84 | [
"Apache-2.0"
] | 9 | 2019-09-25T05:54:34.000Z | 2022-03-30T09:28:26.000Z | crm/main.py | PJHsieh/sonic-utilities | 4e132c1c86c515076815a64cf22aaf8e0101de84 | [
"Apache-2.0"
] | 1 | 2021-08-30T06:47:30.000Z | 2021-08-30T06:47:30.000Z | #!/usr/bin/env python3
import click
from swsscommon.swsscommon import ConfigDBConnector
from tabulate import tabulate
from sonic_py_common import multi_asic
from utilities_common.general import load_db_config
from utilities_common import multi_asic as multi_asic_util
class Crm:
def __init__(self, db=None):
self.cli_mode = None
self.addr_family = None
self.res_type = None
self.db = None
self.cfgdb = db
self.multi_asic = multi_asic_util.MultiAsic()
@multi_asic_util.run_on_multi_asic
def config(self, attr, val):
"""
CRM handler for 'config' CLI commands.
"""
if self.cfgdb:
self.config_db = self.cfgdb
self.config_db.mod_entry("CRM", 'Config', {attr: val})
def show_summary(self):
"""
CRM Handler to display general information.
"""
configdb = self.cfgdb
if configdb is None:
# Get the namespace list
namespaces = multi_asic.get_namespace_list()
configdb = ConfigDBConnector(namespace=namespaces[0])
configdb.connect()
crm_info = configdb.get_entry('CRM', 'Config')
if crm_info:
try:
click.echo('\nPolling Interval: ' + crm_info['polling_interval'] + ' second(s)\n')
except KeyError:
click.echo('\nError! Could not get CRM configuration.\n')
click.echo('\nError! Please configure polling interval.\n')
else:
click.echo('\nError! Could not get CRM configuration.\n')
def show_thresholds(self, resource):
"""
CRM Handler to display thresholds information.
"""
configdb = self.cfgdb
if configdb is None:
# Get the namespace list
namespaces = multi_asic.get_namespace_list()
configdb = ConfigDBConnector(namespace=namespaces[0])
configdb.connect()
crm_info = configdb.get_entry('CRM', 'Config')
header = ("Resource Name", "Threshold Type", "Low Threshold", "High Threshold")
data = []
if crm_info:
if resource == 'all':
for res in ["ipv4_route", "ipv6_route", "ipv4_nexthop", "ipv6_nexthop", "ipv4_neighbor", "ipv6_neighbor",
"nexthop_group_member", "nexthop_group", "acl_table", "acl_group", "acl_entry",
"acl_counter", "fdb_entry", "ipmc_entry", "snat_entry", "dnat_entry", "mpls_inseg",
"mpls_nexthop","srv6_nexthop", "srv6_my_sid_entry"]:
try:
data.append([res, crm_info[res + "_threshold_type"], crm_info[res + "_low_threshold"], crm_info[res + "_high_threshold"]])
except KeyError:
pass
else:
try:
data.append([resource, crm_info[resource + "_threshold_type"], crm_info[resource + "_low_threshold"], crm_info[resource + "_high_threshold"]])
except KeyError:
pass
else:
click.echo('\nError! Could not get CRM configuration.')
click.echo()
click.echo(tabulate(data, headers=header, tablefmt="simple", missingval=""))
click.echo()
def get_resources(self, resource):
"""
CRM Handler to get resources information.
"""
crm_stats = self.db.get_all(self.db.COUNTERS_DB, 'CRM:STATS')
data = []
if crm_stats:
if resource == 'all':
for res in ["ipv4_route", "ipv6_route", "ipv4_nexthop", "ipv6_nexthop", "ipv4_neighbor", "ipv6_neighbor",
"nexthop_group_member", "nexthop_group", "fdb_entry", "ipmc_entry", "snat_entry", "dnat_entry",
"mpls_inseg", "mpls_nexthop","srv6_nexthop", "srv6_my_sid_entry"]:
if 'crm_stats_' + res + "_used" in crm_stats.keys() and 'crm_stats_' + res + "_available" in crm_stats.keys():
data.append([res, crm_stats['crm_stats_' + res + "_used"], crm_stats['crm_stats_' + res + "_available"]])
else:
if 'crm_stats_' + resource + "_used" in crm_stats.keys() and 'crm_stats_' + resource + "_available" in crm_stats.keys():
data.append([resource, crm_stats['crm_stats_' + resource + "_used"], crm_stats['crm_stats_' + resource + "_available"]])
return data
def get_acl_resources(self):
"""
CRM Handler to get ACL recources information.
"""
data = []
for stage in ["INGRESS", "EGRESS"]:
for bind_point in ["PORT", "LAG", "VLAN", "RIF", "SWITCH"]:
crm_stats = self.db.get_all(self.db.COUNTERS_DB, 'CRM:ACL_STATS:{0}:{1}'.format(stage, bind_point))
if crm_stats:
for res in ["acl_group", "acl_table"]:
data.append([
stage, bind_point, res,
crm_stats['crm_stats_' + res + "_used"],
crm_stats['crm_stats_' + res + "_available"]
])
return data
def get_acl_table_resources(self):
"""
CRM Handler to display ACL table information.
"""
# Retrieve all ACL table keys from CRM:ACL_TABLE_STATS
crm_acl_keys = self.db.keys(self.db.COUNTERS_DB, 'CRM:ACL_TABLE_STATS*')
data = []
for key in crm_acl_keys or [None]:
if key:
id = key.replace('CRM:ACL_TABLE_STATS:', '')
crm_stats = self.db.get_all(self.db.COUNTERS_DB, key)
for res in ['acl_entry', 'acl_counter']:
if ('crm_stats_' + res + '_used' in crm_stats) and ('crm_stats_' + res + '_available' in crm_stats):
data.append([id, res, crm_stats['crm_stats_' + res + '_used'], crm_stats['crm_stats_' + res + '_available']])
return data
@multi_asic_util.run_on_multi_asic
def show_resources(self, resource):
"""
CRM Handler to display resources information.
"""
if multi_asic.is_multi_asic():
header = (self.multi_asic.current_namespace.upper() + "\n\nResource Name", "\n\nUsed Count", "\n\nAvailable Count")
err_msg = '\nCRM counters are not ready for '+ self.multi_asic.current_namespace.upper() + '. They would be populated after the polling interval.'
else:
header = ("Resource Name", "Used Count", "Available Count")
err_msg = '\nCRM counters are not ready. They would be populated after the polling interval.'
data = []
data = self.get_resources(resource)
if data:
click.echo()
click.echo(tabulate(data, headers=header, tablefmt="simple", missingval=""))
click.echo()
else:
click.echo(err_msg)
@multi_asic_util.run_on_multi_asic
def show_acl_resources(self):
"""
CRM Handler to display ACL recources information.
"""
if multi_asic.is_multi_asic():
header = (self.multi_asic.current_namespace.upper() + "\n\nStage", "\n\nBind Point", "\n\nResource Name", "\n\nUsed Count", "\n\nAvailable Count")
else:
header = ("Stage", "Bind Point", "Resource Name", "Used Count", "Available Count")
data = []
data = self.get_acl_resources()
click.echo()
click.echo(tabulate(data, headers=header, tablefmt="simple", missingval=""))
click.echo()
@multi_asic_util.run_on_multi_asic
def show_acl_table_resources(self):
"""
CRM Handler to display ACL table information.
"""
if multi_asic.is_multi_asic():
header = (self.multi_asic.current_namespace.upper() + "\n\nTable ID", "\n\nResource Name", "\n\nUsed Count", "\n\nAvailable Count")
else:
header = ("Table ID", "Resource Name", "Used Count", "Available Count")
data = []
data = self.get_acl_table_resources()
click.echo()
click.echo(tabulate(data, headers=header, tablefmt="simple", missingval=""))
click.echo()
@click.group()
@click.pass_context
def cli(ctx):
"""
Utility entry point.
"""
# Use the db object if given as input.
db = None if ctx.obj is None else ctx.obj.cfgdb
# Load database config files
load_db_config()
context = {
"crm": Crm(db)
}
ctx.obj = context
@cli.group()
@click.pass_context
def config(ctx):
"""CRM related configuration"""
pass
@config.group()
@click.pass_context
def polling(ctx):
"""CRM polling configuration"""
pass
@polling.command()
@click.pass_context
@click.argument('interval', type=click.INT)
def interval(ctx, interval):
"""CRM polling interval configuration"""
ctx.obj["crm"].config('polling_interval', interval)
@config.group()
@click.pass_context
def thresholds(ctx):
"""CRM thresholds configuration"""
pass
@thresholds.group()
@click.pass_context
def ipv4(ctx):
"""CRM resource IPv4 address-family"""
ctx.obj["crm"].addr_family = 'ipv4'
@thresholds.group()
@click.pass_context
def ipv6(ctx):
"""CRM resource IPv6 address-family"""
ctx.obj["crm"].addr_family = 'ipv6'
@thresholds.group()
@click.pass_context
def mpls(ctx):
"""CRM resource MPLS address-family"""
ctx.obj["crm"].addr_family = 'mpls'
@mpls.group()
@click.pass_context
def inseg(ctx):
"""CRM configuration for in-segment resource"""
ctx.obj["crm"].res_type = 'inseg'
@ipv4.group()
@click.pass_context
def route(ctx):
"""CRM configuration for route resource"""
ctx.obj["crm"].res_type = 'route'
@ipv4.group()
@click.pass_context
def neighbor(ctx):
"""CRM configuration for neigbor resource"""
ctx.obj["crm"].res_type = 'neighbor'
@ipv4.group()
@click.pass_context
def nexthop(ctx):
"""CRM configuration for nexthop resource"""
ctx.obj["crm"].res_type = 'nexthop'
@route.command()
@click.argument('value', type=click.Choice(['percentage', 'used', 'free']))
@click.pass_context
def type(ctx, value):
"""CRM threshold type configuration"""
attr = ''
if ctx.obj["crm"].addr_family != None:
attr += ctx.obj["crm"].addr_family + '_'
attr += ctx.obj["crm"].res_type + '_' + 'threshold_type'
ctx.obj["crm"].config(attr, value)
@route.command()
@click.argument('value', type=click.INT)
@click.pass_context
def low(ctx, value):
"""CRM low threshold configuration"""
attr = ''
if ctx.obj["crm"].addr_family != None:
attr += ctx.obj["crm"].addr_family + '_'
attr += ctx.obj["crm"].res_type + '_' + 'low_threshold'
ctx.obj["crm"].config(attr, value)
@route.command()
@click.argument('value', type=click.INT)
@click.pass_context
def high(ctx, value):
"""CRM high threshold configuration"""
attr = ''
if ctx.obj["crm"].addr_family != None:
attr += ctx.obj["crm"].addr_family + '_'
attr += ctx.obj["crm"].res_type + '_' + 'high_threshold'
ctx.obj["crm"].config(attr, value)
neighbor.add_command(type)
neighbor.add_command(low)
neighbor.add_command(high)
nexthop.add_command(type)
nexthop.add_command(low)
nexthop.add_command(high)
inseg.add_command(type)
inseg.add_command(low)
inseg.add_command(high)
ipv6.add_command(route)
ipv6.add_command(neighbor)
ipv6.add_command(nexthop)
mpls.add_command(nexthop)
@thresholds.group()
@click.pass_context
def nexthop(ctx):
"""CRM configuration for nexthop resource"""
pass
@nexthop.group()
@click.pass_context
def group(ctx):
"""CRM configuration for nexthop group resource"""
pass
@group.group()
@click.pass_context
def member(ctx):
"""CRM configuration for nexthop group member resource"""
ctx.obj["crm"].res_type = 'nexthop_group_member'
@group.group()
@click.pass_context
def object(ctx):
"""CRM configuration for nexthop group resource"""
ctx.obj["crm"].res_type = 'nexthop_group'
member.add_command(type)
member.add_command(low)
member.add_command(high)
object.add_command(type)
object.add_command(low)
object.add_command(high)
@thresholds.group()
@click.pass_context
def fdb(ctx):
"""CRM configuration for FDB resource"""
ctx.obj["crm"].res_type = 'fdb_entry'
fdb.add_command(type)
fdb.add_command(low)
fdb.add_command(high)
@thresholds.group()
@click.pass_context
def ipmc(ctx):
"""CRM configuration for IPMC resource"""
ctx.obj["crm"].res_type = 'ipmc_entry'
ipmc.add_command(type)
ipmc.add_command(low)
ipmc.add_command(high)
@thresholds.group()
@click.pass_context
def snat(ctx):
"""CRM configuration for Source NAT resource"""
ctx.obj["crm"].res_type = 'snat_entry'
snat.add_command(type)
snat.add_command(low)
snat.add_command(high)
@thresholds.group()
@click.pass_context
def dnat(ctx):
"""CRM configuration for Destination NAT resource"""
ctx.obj["crm"].res_type = 'dnat_entry'
dnat.add_command(type)
dnat.add_command(low)
dnat.add_command(high)
@thresholds.group()
@click.pass_context
def acl(ctx):
"""CRM configuration for ACL resource"""
pass
@acl.group()
@click.pass_context
def table(ctx):
"""CRM configuration for ACL table resource"""
ctx.obj["crm"].res_type = 'acl_table'
table.add_command(type)
table.add_command(low)
table.add_command(high)
@acl.group()
@click.pass_context
def group(ctx):
"""CRM configuration for ACL group resource"""
ctx.obj["crm"].res_type = 'acl_group'
group.add_command(type)
group.add_command(low)
group.add_command(high)
@group.group()
@click.pass_context
def entry(ctx):
"""CRM configuration for ACL entry resource"""
ctx.obj["crm"].res_type = 'acl_entry'
entry.add_command(type)
entry.add_command(low)
entry.add_command(high)
@group.group()
@click.pass_context
def counter(ctx):
"""CRM configuration for ACL counter resource"""
ctx.obj["crm"].res_type = 'acl_counter'
counter.add_command(type)
counter.add_command(low)
counter.add_command(high)
@thresholds.group()
@click.pass_context
def srv6_nexthop(ctx):
"""CRM configuration for SRV6 Nexthop resource"""
ctx.obj["crm"].res_type = 'srv6_nexthop'
srv6_nexthop.add_command(type)
srv6_nexthop.add_command(low)
srv6_nexthop.add_command(high)
@thresholds.group()
@click.pass_context
def srv6_my_sid_entry(ctx):
"""CRM configuration for SRV6 MY_SID resource"""
ctx.obj["crm"].res_type = 'srv6_my_sid_entry'
srv6_my_sid_entry.add_command(type)
srv6_my_sid_entry.add_command(low)
srv6_my_sid_entry.add_command(high)
@cli.group()
@click.pass_context
def show(ctx):
"""Show CRM related information"""
pass
@show.command()
@click.pass_context
def summary(ctx):
"""Show CRM general information"""
ctx.obj["crm"].show_summary()
@show.group()
@click.pass_context
def resources(ctx):
"""Show CRM resources information"""
ctx.obj["crm"].cli_mode = 'resources'
@show.group()
@click.pass_context
def thresholds(ctx):
"""Show CRM thresholds information"""
ctx.obj["crm"].cli_mode = 'thresholds'
@resources.command()
@click.pass_context
def all(ctx):
"""Show CRM information for all resources"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('all')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('all')
ctx.obj["crm"].show_acl_resources()
ctx.obj["crm"].show_acl_table_resources()
@resources.group()
@click.pass_context
def ipv4(ctx):
"""CRM resource IPv4 address family"""
ctx.obj["crm"].addr_family = 'ipv4'
@resources.group()
@click.pass_context
def ipv6(ctx):
"""CRM resource IPv6 address family"""
ctx.obj["crm"].addr_family = 'ipv6'
@resources.group()
@click.pass_context
def mpls(ctx):
"""CRM resource MPLS address family"""
ctx.obj["crm"].addr_family = 'mpls'
@mpls.command()
@click.pass_context
def inseg(ctx):
"""Show CRM information for in-segment resource"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('{0}_inseg'.format(ctx.obj["crm"].addr_family))
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('{0}_inseg'.format(ctx.obj["crm"].addr_family))
@ipv4.command()
@click.pass_context
def route(ctx):
"""Show CRM information for route resource"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('{0}_route'.format(ctx.obj["crm"].addr_family))
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('{0}_route'.format(ctx.obj["crm"].addr_family))
@ipv4.command()
@click.pass_context
def neighbor(ctx):
"""Show CRM information for neighbor resource"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('{0}_neighbor'.format(ctx.obj["crm"].addr_family))
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('{0}_neighbor'.format(ctx.obj["crm"].addr_family))
@ipv4.command()
@click.pass_context
def nexthop(ctx):
"""Show CRM information for nexthop resource"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('{0}_nexthop'.format(ctx.obj["crm"].addr_family))
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('{0}_nexthop'.format(ctx.obj["crm"].addr_family))
ipv6.add_command(route)
ipv6.add_command(neighbor)
ipv6.add_command(nexthop)
mpls.add_command(nexthop)
@resources.group()
@click.pass_context
def nexthop(ctx):
"""Show CRM information for nexthop resource"""
pass
@nexthop.group()
@click.pass_context
def group(ctx):
"""Show CRM information for nexthop group resource"""
pass
@group.command()
@click.pass_context
def member(ctx):
"""Show CRM information for nexthop group member resource"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('nexthop_group_member')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('nexthop_group_member')
@group.command()
@click.pass_context
def object(ctx):
"""Show CRM information for nexthop group resource"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('nexthop_group')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('nexthop_group')
@resources.group()
@click.pass_context
def acl(ctx):
"""Show CRM information for acl resource"""
pass
@acl.command()
@click.pass_context
def table(ctx):
"""Show CRM information for acl table resource"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('acl_table')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_acl_table_resources()
@acl.command()
@click.pass_context
def group(ctx):
"""Show CRM information for acl group resource"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('acl_group')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_acl_resources()
@resources.command()
@click.pass_context
def fdb(ctx):
"""Show CRM information for fdb resource"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('fdb_entry')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('fdb_entry')
@resources.command()
@click.pass_context
def ipmc(ctx):
"""Show CRM information for IPMC resource"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('ipmc_entry')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('ipmc_entry')
@resources.command()
@click.pass_context
def snat(ctx):
"""Show CRM information for SNAT resource"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('snat_entry')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('snat_entry')
@resources.command()
@click.pass_context
def dnat(ctx):
"""Show CRM information for DNAT resource"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('dnat_entry')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('dnat_entry')
@resources.command()
@click.pass_context
def srv6_nexthop(ctx):
"""Show CRM information for SRV6 Nexthop"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('srv6_nexthop')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('srv6_nexthop')
@resources.command()
@click.pass_context
def srv6_my_sid_entry(ctx):
"""Show CRM information for SRV6 MY_SID entry"""
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('srv6_my_sid_entry')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('srv6_my_sid_entry')
thresholds.add_command(acl)
thresholds.add_command(all)
thresholds.add_command(fdb)
thresholds.add_command(ipv4)
thresholds.add_command(ipv6)
thresholds.add_command(mpls)
thresholds.add_command(nexthop)
thresholds.add_command(ipmc)
thresholds.add_command(snat)
thresholds.add_command(dnat)
thresholds.add_command(srv6_nexthop)
thresholds.add_command(srv6_my_sid_entry)
if __name__ == '__main__':
cli()
| 30.619116 | 162 | 0.643729 |
import click
from swsscommon.swsscommon import ConfigDBConnector
from tabulate import tabulate
from sonic_py_common import multi_asic
from utilities_common.general import load_db_config
from utilities_common import multi_asic as multi_asic_util
class Crm:
def __init__(self, db=None):
self.cli_mode = None
self.addr_family = None
self.res_type = None
self.db = None
self.cfgdb = db
self.multi_asic = multi_asic_util.MultiAsic()
@multi_asic_util.run_on_multi_asic
def config(self, attr, val):
if self.cfgdb:
self.config_db = self.cfgdb
self.config_db.mod_entry("CRM", 'Config', {attr: val})
def show_summary(self):
configdb = self.cfgdb
if configdb is None:
namespaces = multi_asic.get_namespace_list()
configdb = ConfigDBConnector(namespace=namespaces[0])
configdb.connect()
crm_info = configdb.get_entry('CRM', 'Config')
if crm_info:
try:
click.echo('\nPolling Interval: ' + crm_info['polling_interval'] + ' second(s)\n')
except KeyError:
click.echo('\nError! Could not get CRM configuration.\n')
click.echo('\nError! Please configure polling interval.\n')
else:
click.echo('\nError! Could not get CRM configuration.\n')
def show_thresholds(self, resource):
configdb = self.cfgdb
if configdb is None:
namespaces = multi_asic.get_namespace_list()
configdb = ConfigDBConnector(namespace=namespaces[0])
configdb.connect()
crm_info = configdb.get_entry('CRM', 'Config')
header = ("Resource Name", "Threshold Type", "Low Threshold", "High Threshold")
data = []
if crm_info:
if resource == 'all':
for res in ["ipv4_route", "ipv6_route", "ipv4_nexthop", "ipv6_nexthop", "ipv4_neighbor", "ipv6_neighbor",
"nexthop_group_member", "nexthop_group", "acl_table", "acl_group", "acl_entry",
"acl_counter", "fdb_entry", "ipmc_entry", "snat_entry", "dnat_entry", "mpls_inseg",
"mpls_nexthop","srv6_nexthop", "srv6_my_sid_entry"]:
try:
data.append([res, crm_info[res + "_threshold_type"], crm_info[res + "_low_threshold"], crm_info[res + "_high_threshold"]])
except KeyError:
pass
else:
try:
data.append([resource, crm_info[resource + "_threshold_type"], crm_info[resource + "_low_threshold"], crm_info[resource + "_high_threshold"]])
except KeyError:
pass
else:
click.echo('\nError! Could not get CRM configuration.')
click.echo()
click.echo(tabulate(data, headers=header, tablefmt="simple", missingval=""))
click.echo()
def get_resources(self, resource):
crm_stats = self.db.get_all(self.db.COUNTERS_DB, 'CRM:STATS')
data = []
if crm_stats:
if resource == 'all':
for res in ["ipv4_route", "ipv6_route", "ipv4_nexthop", "ipv6_nexthop", "ipv4_neighbor", "ipv6_neighbor",
"nexthop_group_member", "nexthop_group", "fdb_entry", "ipmc_entry", "snat_entry", "dnat_entry",
"mpls_inseg", "mpls_nexthop","srv6_nexthop", "srv6_my_sid_entry"]:
if 'crm_stats_' + res + "_used" in crm_stats.keys() and 'crm_stats_' + res + "_available" in crm_stats.keys():
data.append([res, crm_stats['crm_stats_' + res + "_used"], crm_stats['crm_stats_' + res + "_available"]])
else:
if 'crm_stats_' + resource + "_used" in crm_stats.keys() and 'crm_stats_' + resource + "_available" in crm_stats.keys():
data.append([resource, crm_stats['crm_stats_' + resource + "_used"], crm_stats['crm_stats_' + resource + "_available"]])
return data
def get_acl_resources(self):
data = []
for stage in ["INGRESS", "EGRESS"]:
for bind_point in ["PORT", "LAG", "VLAN", "RIF", "SWITCH"]:
crm_stats = self.db.get_all(self.db.COUNTERS_DB, 'CRM:ACL_STATS:{0}:{1}'.format(stage, bind_point))
if crm_stats:
for res in ["acl_group", "acl_table"]:
data.append([
stage, bind_point, res,
crm_stats['crm_stats_' + res + "_used"],
crm_stats['crm_stats_' + res + "_available"]
])
return data
def get_acl_table_resources(self):
crm_acl_keys = self.db.keys(self.db.COUNTERS_DB, 'CRM:ACL_TABLE_STATS*')
data = []
for key in crm_acl_keys or [None]:
if key:
id = key.replace('CRM:ACL_TABLE_STATS:', '')
crm_stats = self.db.get_all(self.db.COUNTERS_DB, key)
for res in ['acl_entry', 'acl_counter']:
if ('crm_stats_' + res + '_used' in crm_stats) and ('crm_stats_' + res + '_available' in crm_stats):
data.append([id, res, crm_stats['crm_stats_' + res + '_used'], crm_stats['crm_stats_' + res + '_available']])
return data
@multi_asic_util.run_on_multi_asic
def show_resources(self, resource):
if multi_asic.is_multi_asic():
header = (self.multi_asic.current_namespace.upper() + "\n\nResource Name", "\n\nUsed Count", "\n\nAvailable Count")
err_msg = '\nCRM counters are not ready for '+ self.multi_asic.current_namespace.upper() + '. They would be populated after the polling interval.'
else:
header = ("Resource Name", "Used Count", "Available Count")
err_msg = '\nCRM counters are not ready. They would be populated after the polling interval.'
data = []
data = self.get_resources(resource)
if data:
click.echo()
click.echo(tabulate(data, headers=header, tablefmt="simple", missingval=""))
click.echo()
else:
click.echo(err_msg)
@multi_asic_util.run_on_multi_asic
def show_acl_resources(self):
if multi_asic.is_multi_asic():
header = (self.multi_asic.current_namespace.upper() + "\n\nStage", "\n\nBind Point", "\n\nResource Name", "\n\nUsed Count", "\n\nAvailable Count")
else:
header = ("Stage", "Bind Point", "Resource Name", "Used Count", "Available Count")
data = []
data = self.get_acl_resources()
click.echo()
click.echo(tabulate(data, headers=header, tablefmt="simple", missingval=""))
click.echo()
@multi_asic_util.run_on_multi_asic
def show_acl_table_resources(self):
if multi_asic.is_multi_asic():
header = (self.multi_asic.current_namespace.upper() + "\n\nTable ID", "\n\nResource Name", "\n\nUsed Count", "\n\nAvailable Count")
else:
header = ("Table ID", "Resource Name", "Used Count", "Available Count")
data = []
data = self.get_acl_table_resources()
click.echo()
click.echo(tabulate(data, headers=header, tablefmt="simple", missingval=""))
click.echo()
@click.group()
@click.pass_context
def cli(ctx):
db = None if ctx.obj is None else ctx.obj.cfgdb
load_db_config()
context = {
"crm": Crm(db)
}
ctx.obj = context
@cli.group()
@click.pass_context
def config(ctx):
pass
@config.group()
@click.pass_context
def polling(ctx):
pass
@polling.command()
@click.pass_context
@click.argument('interval', type=click.INT)
def interval(ctx, interval):
ctx.obj["crm"].config('polling_interval', interval)
@config.group()
@click.pass_context
def thresholds(ctx):
pass
@thresholds.group()
@click.pass_context
def ipv4(ctx):
ctx.obj["crm"].addr_family = 'ipv4'
@thresholds.group()
@click.pass_context
def ipv6(ctx):
ctx.obj["crm"].addr_family = 'ipv6'
@thresholds.group()
@click.pass_context
def mpls(ctx):
ctx.obj["crm"].addr_family = 'mpls'
@mpls.group()
@click.pass_context
def inseg(ctx):
ctx.obj["crm"].res_type = 'inseg'
@ipv4.group()
@click.pass_context
def route(ctx):
ctx.obj["crm"].res_type = 'route'
@ipv4.group()
@click.pass_context
def neighbor(ctx):
ctx.obj["crm"].res_type = 'neighbor'
@ipv4.group()
@click.pass_context
def nexthop(ctx):
ctx.obj["crm"].res_type = 'nexthop'
@route.command()
@click.argument('value', type=click.Choice(['percentage', 'used', 'free']))
@click.pass_context
def type(ctx, value):
attr = ''
if ctx.obj["crm"].addr_family != None:
attr += ctx.obj["crm"].addr_family + '_'
attr += ctx.obj["crm"].res_type + '_' + 'threshold_type'
ctx.obj["crm"].config(attr, value)
@route.command()
@click.argument('value', type=click.INT)
@click.pass_context
def low(ctx, value):
attr = ''
if ctx.obj["crm"].addr_family != None:
attr += ctx.obj["crm"].addr_family + '_'
attr += ctx.obj["crm"].res_type + '_' + 'low_threshold'
ctx.obj["crm"].config(attr, value)
@route.command()
@click.argument('value', type=click.INT)
@click.pass_context
def high(ctx, value):
attr = ''
if ctx.obj["crm"].addr_family != None:
attr += ctx.obj["crm"].addr_family + '_'
attr += ctx.obj["crm"].res_type + '_' + 'high_threshold'
ctx.obj["crm"].config(attr, value)
neighbor.add_command(type)
neighbor.add_command(low)
neighbor.add_command(high)
nexthop.add_command(type)
nexthop.add_command(low)
nexthop.add_command(high)
inseg.add_command(type)
inseg.add_command(low)
inseg.add_command(high)
ipv6.add_command(route)
ipv6.add_command(neighbor)
ipv6.add_command(nexthop)
mpls.add_command(nexthop)
@thresholds.group()
@click.pass_context
def nexthop(ctx):
pass
@nexthop.group()
@click.pass_context
def group(ctx):
pass
@group.group()
@click.pass_context
def member(ctx):
ctx.obj["crm"].res_type = 'nexthop_group_member'
@group.group()
@click.pass_context
def object(ctx):
ctx.obj["crm"].res_type = 'nexthop_group'
member.add_command(type)
member.add_command(low)
member.add_command(high)
object.add_command(type)
object.add_command(low)
object.add_command(high)
@thresholds.group()
@click.pass_context
def fdb(ctx):
ctx.obj["crm"].res_type = 'fdb_entry'
fdb.add_command(type)
fdb.add_command(low)
fdb.add_command(high)
@thresholds.group()
@click.pass_context
def ipmc(ctx):
ctx.obj["crm"].res_type = 'ipmc_entry'
ipmc.add_command(type)
ipmc.add_command(low)
ipmc.add_command(high)
@thresholds.group()
@click.pass_context
def snat(ctx):
ctx.obj["crm"].res_type = 'snat_entry'
snat.add_command(type)
snat.add_command(low)
snat.add_command(high)
@thresholds.group()
@click.pass_context
def dnat(ctx):
ctx.obj["crm"].res_type = 'dnat_entry'
dnat.add_command(type)
dnat.add_command(low)
dnat.add_command(high)
@thresholds.group()
@click.pass_context
def acl(ctx):
pass
@acl.group()
@click.pass_context
def table(ctx):
ctx.obj["crm"].res_type = 'acl_table'
table.add_command(type)
table.add_command(low)
table.add_command(high)
@acl.group()
@click.pass_context
def group(ctx):
ctx.obj["crm"].res_type = 'acl_group'
group.add_command(type)
group.add_command(low)
group.add_command(high)
@group.group()
@click.pass_context
def entry(ctx):
ctx.obj["crm"].res_type = 'acl_entry'
entry.add_command(type)
entry.add_command(low)
entry.add_command(high)
@group.group()
@click.pass_context
def counter(ctx):
ctx.obj["crm"].res_type = 'acl_counter'
counter.add_command(type)
counter.add_command(low)
counter.add_command(high)
@thresholds.group()
@click.pass_context
def srv6_nexthop(ctx):
ctx.obj["crm"].res_type = 'srv6_nexthop'
srv6_nexthop.add_command(type)
srv6_nexthop.add_command(low)
srv6_nexthop.add_command(high)
@thresholds.group()
@click.pass_context
def srv6_my_sid_entry(ctx):
ctx.obj["crm"].res_type = 'srv6_my_sid_entry'
srv6_my_sid_entry.add_command(type)
srv6_my_sid_entry.add_command(low)
srv6_my_sid_entry.add_command(high)
@cli.group()
@click.pass_context
def show(ctx):
pass
@show.command()
@click.pass_context
def summary(ctx):
ctx.obj["crm"].show_summary()
@show.group()
@click.pass_context
def resources(ctx):
ctx.obj["crm"].cli_mode = 'resources'
@show.group()
@click.pass_context
def thresholds(ctx):
ctx.obj["crm"].cli_mode = 'thresholds'
@resources.command()
@click.pass_context
def all(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('all')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('all')
ctx.obj["crm"].show_acl_resources()
ctx.obj["crm"].show_acl_table_resources()
@resources.group()
@click.pass_context
def ipv4(ctx):
ctx.obj["crm"].addr_family = 'ipv4'
@resources.group()
@click.pass_context
def ipv6(ctx):
ctx.obj["crm"].addr_family = 'ipv6'
@resources.group()
@click.pass_context
def mpls(ctx):
ctx.obj["crm"].addr_family = 'mpls'
@mpls.command()
@click.pass_context
def inseg(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('{0}_inseg'.format(ctx.obj["crm"].addr_family))
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('{0}_inseg'.format(ctx.obj["crm"].addr_family))
@ipv4.command()
@click.pass_context
def route(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('{0}_route'.format(ctx.obj["crm"].addr_family))
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('{0}_route'.format(ctx.obj["crm"].addr_family))
@ipv4.command()
@click.pass_context
def neighbor(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('{0}_neighbor'.format(ctx.obj["crm"].addr_family))
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('{0}_neighbor'.format(ctx.obj["crm"].addr_family))
@ipv4.command()
@click.pass_context
def nexthop(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('{0}_nexthop'.format(ctx.obj["crm"].addr_family))
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('{0}_nexthop'.format(ctx.obj["crm"].addr_family))
ipv6.add_command(route)
ipv6.add_command(neighbor)
ipv6.add_command(nexthop)
mpls.add_command(nexthop)
@resources.group()
@click.pass_context
def nexthop(ctx):
pass
@nexthop.group()
@click.pass_context
def group(ctx):
pass
@group.command()
@click.pass_context
def member(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('nexthop_group_member')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('nexthop_group_member')
@group.command()
@click.pass_context
def object(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('nexthop_group')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('nexthop_group')
@resources.group()
@click.pass_context
def acl(ctx):
pass
@acl.command()
@click.pass_context
def table(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('acl_table')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_acl_table_resources()
@acl.command()
@click.pass_context
def group(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('acl_group')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_acl_resources()
@resources.command()
@click.pass_context
def fdb(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('fdb_entry')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('fdb_entry')
@resources.command()
@click.pass_context
def ipmc(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('ipmc_entry')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('ipmc_entry')
@resources.command()
@click.pass_context
def snat(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('snat_entry')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('snat_entry')
@resources.command()
@click.pass_context
def dnat(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('dnat_entry')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('dnat_entry')
@resources.command()
@click.pass_context
def srv6_nexthop(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('srv6_nexthop')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('srv6_nexthop')
@resources.command()
@click.pass_context
def srv6_my_sid_entry(ctx):
if ctx.obj["crm"].cli_mode == 'thresholds':
ctx.obj["crm"].show_thresholds('srv6_my_sid_entry')
elif ctx.obj["crm"].cli_mode == 'resources':
ctx.obj["crm"].show_resources('srv6_my_sid_entry')
thresholds.add_command(acl)
thresholds.add_command(all)
thresholds.add_command(fdb)
thresholds.add_command(ipv4)
thresholds.add_command(ipv6)
thresholds.add_command(mpls)
thresholds.add_command(nexthop)
thresholds.add_command(ipmc)
thresholds.add_command(snat)
thresholds.add_command(dnat)
thresholds.add_command(srv6_nexthop)
thresholds.add_command(srv6_my_sid_entry)
if __name__ == '__main__':
cli()
| true | true |
f728f8b82ee1b35cb06195653de9369b9601a04e | 20,402 | py | Python | brian2/tests/test_codegen.py | Ziaeemehr/brian2 | 0d28f61881a033f877fb333b5e93c56e5c479b4b | [
"BSD-2-Clause"
] | null | null | null | brian2/tests/test_codegen.py | Ziaeemehr/brian2 | 0d28f61881a033f877fb333b5e93c56e5c479b4b | [
"BSD-2-Clause"
] | null | null | null | brian2/tests/test_codegen.py | Ziaeemehr/brian2 | 0d28f61881a033f877fb333b5e93c56e5c479b4b | [
"BSD-2-Clause"
] | null | null | null |
from collections import namedtuple
import os
import numpy as np
import pytest
from brian2 import prefs, clear_cache, _cache_dirs_and_extensions
from brian2.codegen.cpp_prefs import compiler_supports_c99
from brian2.codegen.optimisation import optimise_statements
from brian2.codegen.translation import (analyse_identifiers,
get_identifiers_recursively,
parse_statement,
make_statements,
)
from brian2.codegen.statements import Statement
from brian2.codegen.codeobject import CodeObject
from brian2.parsing.sympytools import str_to_sympy, sympy_to_str
from brian2.core.variables import Subexpression, Variable, Constant, ArrayVariable
from brian2.core.functions import Function, DEFAULT_FUNCTIONS, DEFAULT_CONSTANTS
from brian2.devices.device import auto_target, device
from brian2.units.fundamentalunits import Unit
from brian2.units import second, ms
FakeGroup = namedtuple('FakeGroup', ['variables'])
@pytest.mark.codegen_independent
def test_auto_target():
# very basic test that the "auto" codegen target is useable
assert issubclass(auto_target(), CodeObject)
@pytest.mark.codegen_independent
def test_analyse_identifiers():
'''
Test that the analyse_identifiers function works on a simple clear example.
'''
code = '''
a = b+c
d = e+f
'''
known = {'b': Variable(name='b'),
'c': Variable(name='c'),
'd': Variable(name='d'),
'g': Variable(name='g')}
defined, used_known, dependent = analyse_identifiers(code, known)
assert 'a' in defined # There might be an additional constant added by the
# loop-invariant optimisation
assert used_known == {'b', 'c', 'd'}
assert dependent == {'e', 'f'}
@pytest.mark.codegen_independent
def test_get_identifiers_recursively():
'''
Test finding identifiers including subexpressions.
'''
variables = {'sub1': Subexpression(name='sub1',
dtype=np.float32, expr='sub2 * z',
owner=FakeGroup(variables={}),
device=None),
'sub2': Subexpression(name='sub2',
dtype=np.float32, expr='5 + y',
owner=FakeGroup(variables={}),
device=None),
'x': Variable(name='x')}
identifiers = get_identifiers_recursively(['_x = sub1 + x'],
variables)
assert identifiers == {'x', '_x', 'y', 'z', 'sub1', 'sub2'}
@pytest.mark.codegen_independent
def test_write_to_subexpression():
variables = {
'a': Subexpression(name='a', dtype=np.float32,
owner=FakeGroup(variables={}), device=None,
expr='2*z'),
'z': Variable(name='z')
}
# Writing to a subexpression is not allowed
code = 'a = z'
with pytest.raises(SyntaxError):
make_statements(code, variables, np.float32)
@pytest.mark.codegen_independent
def test_repeated_subexpressions():
variables = {
'a': Subexpression(name='a', dtype=np.float32,
owner=FakeGroup(variables={}), device=None,
expr='2*z'),
'x': Variable(name='x'),
'y': Variable(name='y'),
'z': Variable(name='z')
}
# subexpression a (referring to z) is used twice, but can be reused the
# second time (no change to z)
code = '''
x = a
y = a
'''
scalar_stmts, vector_stmts = make_statements(code, variables, np.float32)
assert len(scalar_stmts) == 0
assert [stmt.var for stmt in vector_stmts] == ['a', 'x', 'y']
assert vector_stmts[0].constant
code = '''
x = a
z *= 2
'''
scalar_stmts, vector_stmts = make_statements(code, variables, np.float32)
assert len(scalar_stmts) == 0
assert [stmt.var for stmt in vector_stmts] == ['a', 'x', 'z']
# Note that we currently do not mark the subexpression as constant in this
# case, because its use after the "z *=2" line would actually redefine it.
# Our algorithm is currently not smart enough to detect that it is actually
# not used afterwards
# a refers to z, therefore we have to redefine a after z changed, and a
# cannot be constant
code = '''
x = a
z *= 2
y = a
'''
scalar_stmts, vector_stmts = make_statements(code, variables, np.float32)
assert len(scalar_stmts) == 0
assert [stmt.var for stmt in vector_stmts] == ['a', 'x', 'z', 'a', 'y']
assert not any(stmt.constant for stmt in vector_stmts)
@pytest.mark.codegen_independent
def test_nested_subexpressions():
'''
This test checks that code translation works with nested subexpressions.
'''
code = '''
x = a + b + c
c = 1
x = a + b + c
d = 1
x = a + b + c
'''
variables = {
'a': Subexpression(name='a', dtype=np.float32, owner=FakeGroup(variables={}), device=None,
expr='b*b+d'),
'b': Subexpression(name='b', dtype=np.float32, owner=FakeGroup(variables={}), device=None,
expr='c*c*c'),
'c': Variable(name='c'),
'd': Variable(name='d'),
}
scalar_stmts, vector_stmts = make_statements(code, variables, np.float32)
assert len(scalar_stmts) == 0
evalorder = ''.join(stmt.var for stmt in vector_stmts)
# This is the order that variables ought to be evaluated in (note that
# previously this test did not expect the last "b" evaluation, because its
# value did not change (c was not changed). We have since removed this
# subexpression caching, because it did not seem to apply in practical
# use cases)
assert evalorder == 'baxcbaxdbax'
@pytest.mark.codegen_independent
def test_apply_loop_invariant_optimisation():
variables = {'v': Variable('v', scalar=False),
'w': Variable('w', scalar=False),
'dt': Constant('dt', dimensions=second.dim, value=0.1*ms),
'tau': Constant('tau', dimensions=second.dim, value=10*ms),
'exp': DEFAULT_FUNCTIONS['exp']}
statements = [Statement('v', '=', 'dt*w*exp(-dt/tau)/tau + v*exp(-dt/tau)', '', np.float32),
Statement('w', '=', 'w*exp(-dt/tau)', '', np.float32)]
scalar, vector = optimise_statements([], statements, variables)
# The optimisation should pull out at least exp(-dt / tau)
assert len(scalar) >= 1
assert np.issubdtype(scalar[0].dtype, np.floating)
assert scalar[0].var == '_lio_1'
assert len(vector) == 2
assert all('_lio_' in stmt.expr for stmt in vector)
@pytest.mark.codegen_independent
def test_apply_loop_invariant_optimisation_integer():
variables = {'v': Variable('v', scalar=False),
'N': Constant('N', 10),
'b': Variable('b', scalar=True, dtype=int),
'c': Variable('c', scalar=True, dtype=int),
'd': Variable('d', scalar=True, dtype=int),
'y': Variable('y', scalar=True, dtype=float),
'z': Variable('z', scalar=True, dtype=float),
'w': Variable('w', scalar=True, dtype=float),
}
statements = [Statement('v', '=', 'v % (2*3*N)', '', np.float32),
# integer version doesn't get rewritten but float version does
Statement('a', ':=', 'b//(c//d)', '', int),
Statement('x', ':=', 'y/(z/w)', '', float),
]
scalar, vector = optimise_statements([], statements, variables)
assert len(scalar) == 3
assert np.issubdtype(scalar[0].dtype, np.signedinteger)
assert scalar[0].var == '_lio_1'
expr = scalar[0].expr.replace(' ', '')
assert expr=='6*N' or expr=='N*6'
assert np.issubdtype(scalar[1].dtype, np.signedinteger)
assert scalar[1].var == '_lio_2'
expr = scalar[1].expr.replace(' ', '')
assert expr=='b//(c//d)'
assert np.issubdtype(scalar[2].dtype, np.floating)
assert scalar[2].var == '_lio_3'
expr = scalar[2].expr.replace(' ', '')
assert expr=='(y*w)/z' or expr=='(w*y)/z'
@pytest.mark.codegen_independent
def test_apply_loop_invariant_optimisation_boolean():
variables = {'v1': Variable('v1', scalar=False),
'v2': Variable('v2', scalar=False),
'N': Constant('N', 10),
'b': Variable('b', scalar=True, dtype=bool),
'c': Variable('c', scalar=True, dtype=bool),
'int': DEFAULT_FUNCTIONS['int'],
'foo': Function(lambda x: None,
arg_units=[Unit(1)], return_unit=Unit(1),
arg_types=['boolean'], return_type='float',
stateless=False)
}
# The calls for "foo" cannot be pulled out, since foo is marked as stateful
statements = [Statement('v1', '=', '1.0*int(b and c)', '', np.float32),
Statement('v1', '=', '1.0*foo(b and c)', '', np.float32),
Statement('v2', '=', 'int(not b and True)', '', np.float32),
Statement('v2', '=', 'foo(not b and True)', '', np.float32)
]
scalar, vector = optimise_statements([], statements, variables)
assert len(scalar) == 4
assert scalar[0].expr == '1.0 * int(b and c)'
assert scalar[1].expr == 'b and c'
assert scalar[2].expr == 'int((not b) and True)'
assert scalar[3].expr == '(not b) and True'
assert len(vector) == 4
assert vector[0].expr == '_lio_1'
assert vector[1].expr == 'foo(_lio_2)'
assert vector[2].expr == '_lio_3'
assert vector[3].expr == 'foo(_lio_4)'
@pytest.mark.codegen_independent
def test_apply_loop_invariant_optimisation_no_optimisation():
variables = {'v1': Variable('v1', scalar=False),
'v2': Variable('v2', scalar=False),
'N': Constant('N', 10),
's1': Variable('s1', scalar=True, dtype=float),
's2': Variable('s2', scalar=True, dtype=float),
'rand': DEFAULT_FUNCTIONS['rand']
}
statements = [
# This hould not be simplified to 0!
Statement('v1', '=', 'rand() - rand()', '', np.float),
Statement('v1', '=', '3*rand() - 3*rand()', '', np.float),
Statement('v1', '=', '3*rand() - ((1+2)*rand())', '', np.float),
# This should not pull out rand()*N
Statement('v1', '=', 's1*rand()*N', '', np.float),
Statement('v1', '=', 's2*rand()*N', '', np.float),
# This is not important mathematically, but it would change the numbers
# that are generated
Statement('v1', '=', '0*rand()*N', '', np.float),
Statement('v1', '=', '0/rand()*N', '', np.float)
]
scalar, vector = optimise_statements([], statements, variables)
for vs in vector[:3]:
assert vs.expr.count('rand()') == 2, 'Expression should still contain two rand() calls, but got ' + str(vs)
for vs in vector[3:]:
assert vs.expr.count('rand()') == 1, 'Expression should still contain a rand() call, but got ' + str(vs)
@pytest.mark.codegen_independent
def test_apply_loop_invariant_optimisation_simplification():
variables = {'v1': Variable('v1', scalar=False),
'v2': Variable('v2', scalar=False),
'i1': Variable('i1', scalar=False, dtype=int),
'N': Constant('N', 10)
}
statements = [
# Should be simplified to 0.0
Statement('v1', '=', 'v1 - v1', '', np.float),
Statement('v1', '=', 'N*v1 - N*v1', '', np.float),
Statement('v1', '=', 'v1*N * 0', '', np.float),
Statement('v1', '=', 'v1 * 0', '', np.float),
Statement('v1', '=', 'v1 * 0.0', '', np.float),
Statement('v1', '=', '0.0 / (v1*N)', '', np.float),
# Should be simplified to 0
Statement('i1', '=', 'i1*N * 0', '', np.int),
Statement('i1', '=', '0 * i1', '', np.int),
Statement('i1', '=', '0 * i1*N', '', np.int),
Statement('i1', '=', 'i1 * 0', '', np.int),
# Should be simplified to v1*N
Statement('v2', '=', '0 + v1*N', '', np.float),
Statement('v2', '=', 'v1*N + 0.0', '', np.float),
Statement('v2', '=', 'v1*N - 0', '', np.float),
Statement('v2', '=', 'v1*N - 0.0', '', np.float),
Statement('v2', '=', '1 * v1*N', '', np.float),
Statement('v2', '=', '1.0 * v1*N', '', np.float),
Statement('v2', '=', 'v1*N / 1.0', '', np.float),
Statement('v2', '=', 'v1*N / 1', '', np.float),
# Should be simplified to i1
Statement('i1', '=', 'i1*1', '', int),
Statement('i1', '=', 'i1//1', '', int),
Statement('i1', '=', 'i1+0', '', int),
Statement('i1', '=', '0+i1', '', int),
Statement('i1', '=', 'i1-0', '', int),
# Should *not* be simplified (because it would change the type,
# important for integer division, for example)
Statement('v1', '=', 'i1*1.0', '', float),
Statement('v1', '=', '1.0*i1', '', float),
Statement('v1', '=', 'i1/1.0', '', float),
Statement('v1', '=', 'i1/1', '', float),
Statement('v1', '=', 'i1+0.0', '', float),
Statement('v1', '=', '0.0+i1', '', float),
Statement('v1', '=', 'i1-0.0', '', float),
## Should *not* be simplified, flooring division by 1 changes the value
Statement('v1', '=', 'v2//1.0', '', float),
Statement('i1', '=', 'i1//1.0', '', float) # changes type
]
scalar, vector = optimise_statements([], statements, variables)
assert len(scalar) == 0
for s in vector[:6]:
assert s.expr == '0.0'
for s in vector[6:10]:
assert s.expr == '0', s.expr # integer
for s in vector[10:18]:
expr = s.expr.replace(' ', '')
assert expr == 'v1*N' or expr == 'N*v1'
for s in vector[18:23]:
expr = s.expr.replace(' ', '')
assert expr == 'i1'
for s in vector[23:27]:
expr = s.expr.replace(' ', '')
assert expr == '1.0*i1' or expr == 'i1*1.0' or expr == 'i1/1.0'
for s in vector[27:30]:
expr = s.expr.replace(' ', '')
assert expr == '0.0+i1' or expr == 'i1+0.0'
for s in vector[30:31]:
expr = s.expr.replace(' ', '')
assert expr == 'v2//1.0' or expr == 'v2//1'
for s in vector[31:]:
expr = s.expr.replace(' ', '')
assert expr == 'i1//1.0'
@pytest.mark.codegen_independent
def test_apply_loop_invariant_optimisation_constant_evaluation():
variables = {'v1': Variable('v1', scalar=False),
'v2': Variable('v2', scalar=False),
'i1': Variable('i1', scalar=False, dtype=int),
'N': Constant('N', 10),
's1': Variable('s1', scalar=True, dtype=float),
's2': Variable('s2', scalar=True, dtype=float),
'exp': DEFAULT_FUNCTIONS['exp']
}
statements = [
Statement('v1', '=', 'v1 * (1 + 2 + 3)', '', np.float),
Statement('v1', '=', 'exp(N)*v1', '', np.float),
Statement('v1', '=', 'exp(0)*v1', '', np.float),
]
scalar, vector = optimise_statements([], statements, variables)
# exp(N) should be pulled out of the vector statements, the rest should be
# evaluated in place
assert len(scalar) == 1
assert scalar[0].expr == 'exp(N)'
assert len(vector) == 3
expr = vector[0].expr.replace(' ', '')
assert expr == '_lio_1*v1' or 'v1*_lio_1'
expr = vector[1].expr.replace(' ', '')
assert expr == '6.0*v1' or 'v1*6.0'
assert vector[2].expr == 'v1'
@pytest.mark.codegen_independent
def test_automatic_augmented_assignments():
# We test that statements that could be rewritten as augmented assignments
# are correctly rewritten (using sympy to test for symbolic equality)
variables = {
'x': ArrayVariable('x', owner=None, size=10,
device=device),
'y': ArrayVariable('y', owner=None, size=10,
device=device),
'z': ArrayVariable('y', owner=None, size=10,
device=device),
'b': ArrayVariable('b', owner=None, size=10,
dtype=np.bool, device=device),
'clip': DEFAULT_FUNCTIONS['clip'],
'inf': DEFAULT_CONSTANTS['inf']
}
statements = [
# examples that should be rewritten
# Note that using our approach, we will never get -= or /= but always
# the equivalent += or *= statements
('x = x + 1.0', 'x += 1.0'),
('x = 2.0 * x', 'x *= 2.0'),
('x = x - 3.0', 'x += -3.0'),
('x = x/2.0', 'x *= 0.5'),
('x = y + (x + 1.0)', 'x += y + 1.0'),
('x = x + x', 'x *= 2.0'),
('x = x + y + z', 'x += y + z'),
('x = x + y + z', 'x += y + z'),
# examples that should not be rewritten
('x = 1.0/x', 'x = 1.0/x'),
('x = 1.0', 'x = 1.0'),
('x = 2.0*(x + 1.0)', 'x = 2.0*(x + 1.0)'),
('x = clip(x + y, 0.0, inf)', 'x = clip(x + y, 0.0, inf)'),
('b = b or False', 'b = b or False')
]
for orig, rewritten in statements:
scalar, vector = make_statements(orig, variables, np.float32)
try: # we augment the assertion error with the original statement
assert len(scalar) == 0, 'Did not expect any scalar statements but got ' + str(scalar)
assert len(vector) == 1, 'Did expect a single statement but got ' + str(vector)
statement = vector[0]
expected_var, expected_op, expected_expr, _ = parse_statement(rewritten)
assert expected_var == statement.var, 'expected write to variable %s, not to %s' % (expected_var, statement.var)
assert expected_op == statement.op, 'expected operation %s, not %s' % (expected_op, statement.op)
# Compare the two expressions using sympy to allow for different order etc.
sympy_expected = str_to_sympy(expected_expr)
sympy_actual = str_to_sympy(statement.expr)
assert sympy_expected == sympy_actual, ('RHS expressions "%s" and "%s" are not identical' % (sympy_to_str(sympy_expected),
sympy_to_str(sympy_actual)))
except AssertionError as ex:
raise AssertionError('Transformation for statement "%s" gave an unexpected result: %s' % (orig, str(ex)))
def test_clear_cache():
target = prefs.codegen.target
if target == 'numpy':
assert 'numpy' not in _cache_dirs_and_extensions
with pytest.raises(ValueError):
clear_cache('numpy')
else:
assert target in _cache_dirs_and_extensions
cache_dir, _ = _cache_dirs_and_extensions[target]
# Create a file that should not be there
fname = os.path.join(cache_dir, 'some_file.py')
open(fname, 'w').close()
# clear_cache should refuse to clear the directory
with pytest.raises(IOError):
clear_cache(target)
os.remove(fname)
def test_compiler_c99():
# On a user's computer, we do not know whether the compiler actually
# has C99 support, so we just check whether the test does not raise an
# error
c99_support = compiler_supports_c99()
# On our Azure test server we know that the compilers support C99
if os.environ.get('AGENT_OS', ''):
assert c99_support
if __name__ == '__main__':
test_auto_target()
test_analyse_identifiers()
test_get_identifiers_recursively()
test_write_to_subexpression()
test_repeated_subexpressions()
test_nested_subexpressions()
test_apply_loop_invariant_optimisation()
test_apply_loop_invariant_optimisation_integer()
test_apply_loop_invariant_optimisation_boolean()
test_apply_loop_invariant_optimisation_no_optimisation()
test_apply_loop_invariant_optimisation_simplification()
test_apply_loop_invariant_optimisation_constant_evaluation()
test_automatic_augmented_assignments()
test_clear_cache()
| 43.316348 | 134 | 0.558181 |
from collections import namedtuple
import os
import numpy as np
import pytest
from brian2 import prefs, clear_cache, _cache_dirs_and_extensions
from brian2.codegen.cpp_prefs import compiler_supports_c99
from brian2.codegen.optimisation import optimise_statements
from brian2.codegen.translation import (analyse_identifiers,
get_identifiers_recursively,
parse_statement,
make_statements,
)
from brian2.codegen.statements import Statement
from brian2.codegen.codeobject import CodeObject
from brian2.parsing.sympytools import str_to_sympy, sympy_to_str
from brian2.core.variables import Subexpression, Variable, Constant, ArrayVariable
from brian2.core.functions import Function, DEFAULT_FUNCTIONS, DEFAULT_CONSTANTS
from brian2.devices.device import auto_target, device
from brian2.units.fundamentalunits import Unit
from brian2.units import second, ms
FakeGroup = namedtuple('FakeGroup', ['variables'])
@pytest.mark.codegen_independent
def test_auto_target():
assert issubclass(auto_target(), CodeObject)
@pytest.mark.codegen_independent
def test_analyse_identifiers():
code = '''
a = b+c
d = e+f
'''
known = {'b': Variable(name='b'),
'c': Variable(name='c'),
'd': Variable(name='d'),
'g': Variable(name='g')}
defined, used_known, dependent = analyse_identifiers(code, known)
assert 'a' in defined
assert used_known == {'b', 'c', 'd'}
assert dependent == {'e', 'f'}
@pytest.mark.codegen_independent
def test_get_identifiers_recursively():
variables = {'sub1': Subexpression(name='sub1',
dtype=np.float32, expr='sub2 * z',
owner=FakeGroup(variables={}),
device=None),
'sub2': Subexpression(name='sub2',
dtype=np.float32, expr='5 + y',
owner=FakeGroup(variables={}),
device=None),
'x': Variable(name='x')}
identifiers = get_identifiers_recursively(['_x = sub1 + x'],
variables)
assert identifiers == {'x', '_x', 'y', 'z', 'sub1', 'sub2'}
@pytest.mark.codegen_independent
def test_write_to_subexpression():
variables = {
'a': Subexpression(name='a', dtype=np.float32,
owner=FakeGroup(variables={}), device=None,
expr='2*z'),
'z': Variable(name='z')
}
code = 'a = z'
with pytest.raises(SyntaxError):
make_statements(code, variables, np.float32)
@pytest.mark.codegen_independent
def test_repeated_subexpressions():
variables = {
'a': Subexpression(name='a', dtype=np.float32,
owner=FakeGroup(variables={}), device=None,
expr='2*z'),
'x': Variable(name='x'),
'y': Variable(name='y'),
'z': Variable(name='z')
}
code = '''
x = a
y = a
'''
scalar_stmts, vector_stmts = make_statements(code, variables, np.float32)
assert len(scalar_stmts) == 0
assert [stmt.var for stmt in vector_stmts] == ['a', 'x', 'y']
assert vector_stmts[0].constant
code = '''
x = a
z *= 2
'''
scalar_stmts, vector_stmts = make_statements(code, variables, np.float32)
assert len(scalar_stmts) == 0
assert [stmt.var for stmt in vector_stmts] == ['a', 'x', 'z']
code = '''
x = a
z *= 2
y = a
'''
scalar_stmts, vector_stmts = make_statements(code, variables, np.float32)
assert len(scalar_stmts) == 0
assert [stmt.var for stmt in vector_stmts] == ['a', 'x', 'z', 'a', 'y']
assert not any(stmt.constant for stmt in vector_stmts)
@pytest.mark.codegen_independent
def test_nested_subexpressions():
code = '''
x = a + b + c
c = 1
x = a + b + c
d = 1
x = a + b + c
'''
variables = {
'a': Subexpression(name='a', dtype=np.float32, owner=FakeGroup(variables={}), device=None,
expr='b*b+d'),
'b': Subexpression(name='b', dtype=np.float32, owner=FakeGroup(variables={}), device=None,
expr='c*c*c'),
'c': Variable(name='c'),
'd': Variable(name='d'),
}
scalar_stmts, vector_stmts = make_statements(code, variables, np.float32)
assert len(scalar_stmts) == 0
evalorder = ''.join(stmt.var for stmt in vector_stmts)
assert evalorder == 'baxcbaxdbax'
@pytest.mark.codegen_independent
def test_apply_loop_invariant_optimisation():
variables = {'v': Variable('v', scalar=False),
'w': Variable('w', scalar=False),
'dt': Constant('dt', dimensions=second.dim, value=0.1*ms),
'tau': Constant('tau', dimensions=second.dim, value=10*ms),
'exp': DEFAULT_FUNCTIONS['exp']}
statements = [Statement('v', '=', 'dt*w*exp(-dt/tau)/tau + v*exp(-dt/tau)', '', np.float32),
Statement('w', '=', 'w*exp(-dt/tau)', '', np.float32)]
scalar, vector = optimise_statements([], statements, variables)
assert len(scalar) >= 1
assert np.issubdtype(scalar[0].dtype, np.floating)
assert scalar[0].var == '_lio_1'
assert len(vector) == 2
assert all('_lio_' in stmt.expr for stmt in vector)
@pytest.mark.codegen_independent
def test_apply_loop_invariant_optimisation_integer():
variables = {'v': Variable('v', scalar=False),
'N': Constant('N', 10),
'b': Variable('b', scalar=True, dtype=int),
'c': Variable('c', scalar=True, dtype=int),
'd': Variable('d', scalar=True, dtype=int),
'y': Variable('y', scalar=True, dtype=float),
'z': Variable('z', scalar=True, dtype=float),
'w': Variable('w', scalar=True, dtype=float),
}
statements = [Statement('v', '=', 'v % (2*3*N)', '', np.float32),
Statement('a', ':=', 'b//(c//d)', '', int),
Statement('x', ':=', 'y/(z/w)', '', float),
]
scalar, vector = optimise_statements([], statements, variables)
assert len(scalar) == 3
assert np.issubdtype(scalar[0].dtype, np.signedinteger)
assert scalar[0].var == '_lio_1'
expr = scalar[0].expr.replace(' ', '')
assert expr=='6*N' or expr=='N*6'
assert np.issubdtype(scalar[1].dtype, np.signedinteger)
assert scalar[1].var == '_lio_2'
expr = scalar[1].expr.replace(' ', '')
assert expr=='b//(c//d)'
assert np.issubdtype(scalar[2].dtype, np.floating)
assert scalar[2].var == '_lio_3'
expr = scalar[2].expr.replace(' ', '')
assert expr=='(y*w)/z' or expr=='(w*y)/z'
@pytest.mark.codegen_independent
def test_apply_loop_invariant_optimisation_boolean():
variables = {'v1': Variable('v1', scalar=False),
'v2': Variable('v2', scalar=False),
'N': Constant('N', 10),
'b': Variable('b', scalar=True, dtype=bool),
'c': Variable('c', scalar=True, dtype=bool),
'int': DEFAULT_FUNCTIONS['int'],
'foo': Function(lambda x: None,
arg_units=[Unit(1)], return_unit=Unit(1),
arg_types=['boolean'], return_type='float',
stateless=False)
}
# The calls for "foo" cannot be pulled out, since foo is marked as stateful
statements = [Statement('v1', '=', '1.0*int(b and c)', '', np.float32),
Statement('v1', '=', '1.0*foo(b and c)', '', np.float32),
Statement('v2', '=', 'int(not b and True)', '', np.float32),
Statement('v2', '=', 'foo(not b and True)', '', np.float32)
]
scalar, vector = optimise_statements([], statements, variables)
assert len(scalar) == 4
assert scalar[0].expr == '1.0 * int(b and c)'
assert scalar[1].expr == 'b and c'
assert scalar[2].expr == 'int((not b) and True)'
assert scalar[3].expr == '(not b) and True'
assert len(vector) == 4
assert vector[0].expr == '_lio_1'
assert vector[1].expr == 'foo(_lio_2)'
assert vector[2].expr == '_lio_3'
assert vector[3].expr == 'foo(_lio_4)'
@pytest.mark.codegen_independent
def test_apply_loop_invariant_optimisation_no_optimisation():
variables = {'v1': Variable('v1', scalar=False),
'v2': Variable('v2', scalar=False),
'N': Constant('N', 10),
's1': Variable('s1', scalar=True, dtype=float),
's2': Variable('s2', scalar=True, dtype=float),
'rand': DEFAULT_FUNCTIONS['rand']
}
statements = [
# This hould not be simplified to 0!
Statement('v1', '=', 'rand() - rand()', '', np.float),
Statement('v1', '=', '3*rand() - 3*rand()', '', np.float),
Statement('v1', '=', '3*rand() - ((1+2)*rand())', '', np.float),
# This should not pull out rand()*N
Statement('v1', '=', 's1*rand()*N', '', np.float),
Statement('v1', '=', 's2*rand()*N', '', np.float),
# This is not important mathematically, but it would change the numbers
# that are generated
Statement('v1', '=', '0*rand()*N', '', np.float),
Statement('v1', '=', '0/rand()*N', '', np.float)
]
scalar, vector = optimise_statements([], statements, variables)
for vs in vector[:3]:
assert vs.expr.count('rand()') == 2, 'Expression should still contain two rand() calls, but got ' + str(vs)
for vs in vector[3:]:
assert vs.expr.count('rand()') == 1, 'Expression should still contain a rand() call, but got ' + str(vs)
@pytest.mark.codegen_independent
def test_apply_loop_invariant_optimisation_simplification():
variables = {'v1': Variable('v1', scalar=False),
'v2': Variable('v2', scalar=False),
'i1': Variable('i1', scalar=False, dtype=int),
'N': Constant('N', 10)
}
statements = [
# Should be simplified to 0.0
Statement('v1', '=', 'v1 - v1', '', np.float),
Statement('v1', '=', 'N*v1 - N*v1', '', np.float),
Statement('v1', '=', 'v1*N * 0', '', np.float),
Statement('v1', '=', 'v1 * 0', '', np.float),
Statement('v1', '=', 'v1 * 0.0', '', np.float),
Statement('v1', '=', '0.0 / (v1*N)', '', np.float),
# Should be simplified to 0
Statement('i1', '=', 'i1*N * 0', '', np.int),
Statement('i1', '=', '0 * i1', '', np.int),
Statement('i1', '=', '0 * i1*N', '', np.int),
Statement('i1', '=', 'i1 * 0', '', np.int),
# Should be simplified to v1*N
Statement('v2', '=', '0 + v1*N', '', np.float),
Statement('v2', '=', 'v1*N + 0.0', '', np.float),
Statement('v2', '=', 'v1*N - 0', '', np.float),
Statement('v2', '=', 'v1*N - 0.0', '', np.float),
Statement('v2', '=', '1 * v1*N', '', np.float),
Statement('v2', '=', '1.0 * v1*N', '', np.float),
Statement('v2', '=', 'v1*N / 1.0', '', np.float),
Statement('v2', '=', 'v1*N / 1', '', np.float),
# Should be simplified to i1
Statement('i1', '=', 'i1*1', '', int),
Statement('i1', '=', 'i1//1', '', int),
Statement('i1', '=', 'i1+0', '', int),
Statement('i1', '=', '0+i1', '', int),
Statement('i1', '=', 'i1-0', '', int),
# Should *not* be simplified (because it would change the type,
# important for integer division, for example)
Statement('v1', '=', 'i1*1.0', '', float),
Statement('v1', '=', '1.0*i1', '', float),
Statement('v1', '=', 'i1/1.0', '', float),
Statement('v1', '=', 'i1/1', '', float),
Statement('v1', '=', 'i1+0.0', '', float),
Statement('v1', '=', '0.0+i1', '', float),
Statement('v1', '=', 'i1-0.0', '', float),
## Should *not* be simplified, flooring division by 1 changes the value
Statement('v1', '=', 'v2//1.0', '', float),
Statement('i1', '=', 'i1//1.0', '', float) # changes type
]
scalar, vector = optimise_statements([], statements, variables)
assert len(scalar) == 0
for s in vector[:6]:
assert s.expr == '0.0'
for s in vector[6:10]:
assert s.expr == '0', s.expr # integer
for s in vector[10:18]:
expr = s.expr.replace(' ', '')
assert expr == 'v1*N' or expr == 'N*v1'
for s in vector[18:23]:
expr = s.expr.replace(' ', '')
assert expr == 'i1'
for s in vector[23:27]:
expr = s.expr.replace(' ', '')
assert expr == '1.0*i1' or expr == 'i1*1.0' or expr == 'i1/1.0'
for s in vector[27:30]:
expr = s.expr.replace(' ', '')
assert expr == '0.0+i1' or expr == 'i1+0.0'
for s in vector[30:31]:
expr = s.expr.replace(' ', '')
assert expr == 'v2//1.0' or expr == 'v2//1'
for s in vector[31:]:
expr = s.expr.replace(' ', '')
assert expr == 'i1//1.0'
@pytest.mark.codegen_independent
def test_apply_loop_invariant_optimisation_constant_evaluation():
variables = {'v1': Variable('v1', scalar=False),
'v2': Variable('v2', scalar=False),
'i1': Variable('i1', scalar=False, dtype=int),
'N': Constant('N', 10),
's1': Variable('s1', scalar=True, dtype=float),
's2': Variable('s2', scalar=True, dtype=float),
'exp': DEFAULT_FUNCTIONS['exp']
}
statements = [
Statement('v1', '=', 'v1 * (1 + 2 + 3)', '', np.float),
Statement('v1', '=', 'exp(N)*v1', '', np.float),
Statement('v1', '=', 'exp(0)*v1', '', np.float),
]
scalar, vector = optimise_statements([], statements, variables)
# exp(N) should be pulled out of the vector statements, the rest should be
# evaluated in place
assert len(scalar) == 1
assert scalar[0].expr == 'exp(N)'
assert len(vector) == 3
expr = vector[0].expr.replace(' ', '')
assert expr == '_lio_1*v1' or 'v1*_lio_1'
expr = vector[1].expr.replace(' ', '')
assert expr == '6.0*v1' or 'v1*6.0'
assert vector[2].expr == 'v1'
@pytest.mark.codegen_independent
def test_automatic_augmented_assignments():
# We test that statements that could be rewritten as augmented assignments
# are correctly rewritten (using sympy to test for symbolic equality)
variables = {
'x': ArrayVariable('x', owner=None, size=10,
device=device),
'y': ArrayVariable('y', owner=None, size=10,
device=device),
'z': ArrayVariable('y', owner=None, size=10,
device=device),
'b': ArrayVariable('b', owner=None, size=10,
dtype=np.bool, device=device),
'clip': DEFAULT_FUNCTIONS['clip'],
'inf': DEFAULT_CONSTANTS['inf']
}
statements = [
# examples that should be rewritten
# Note that using our approach, we will never get -= or /= but always
# the equivalent += or *= statements
('x = x + 1.0', 'x += 1.0'),
('x = 2.0 * x', 'x *= 2.0'),
('x = x - 3.0', 'x += -3.0'),
('x = x/2.0', 'x *= 0.5'),
('x = y + (x + 1.0)', 'x += y + 1.0'),
('x = x + x', 'x *= 2.0'),
('x = x + y + z', 'x += y + z'),
('x = x + y + z', 'x += y + z'),
# examples that should not be rewritten
('x = 1.0/x', 'x = 1.0/x'),
('x = 1.0', 'x = 1.0'),
('x = 2.0*(x + 1.0)', 'x = 2.0*(x + 1.0)'),
('x = clip(x + y, 0.0, inf)', 'x = clip(x + y, 0.0, inf)'),
('b = b or False', 'b = b or False')
]
for orig, rewritten in statements:
scalar, vector = make_statements(orig, variables, np.float32)
try: # we augment the assertion error with the original statement
assert len(scalar) == 0, 'Did not expect any scalar statements but got ' + str(scalar)
assert len(vector) == 1, 'Did expect a single statement but got ' + str(vector)
statement = vector[0]
expected_var, expected_op, expected_expr, _ = parse_statement(rewritten)
assert expected_var == statement.var, 'expected write to variable %s, not to %s' % (expected_var, statement.var)
assert expected_op == statement.op, 'expected operation %s, not %s' % (expected_op, statement.op)
# Compare the two expressions using sympy to allow for different order etc.
sympy_expected = str_to_sympy(expected_expr)
sympy_actual = str_to_sympy(statement.expr)
assert sympy_expected == sympy_actual, ('RHS expressions "%s" and "%s" are not identical' % (sympy_to_str(sympy_expected),
sympy_to_str(sympy_actual)))
except AssertionError as ex:
raise AssertionError('Transformation for statement "%s" gave an unexpected result: %s' % (orig, str(ex)))
def test_clear_cache():
target = prefs.codegen.target
if target == 'numpy':
assert 'numpy' not in _cache_dirs_and_extensions
with pytest.raises(ValueError):
clear_cache('numpy')
else:
assert target in _cache_dirs_and_extensions
cache_dir, _ = _cache_dirs_and_extensions[target]
# Create a file that should not be there
fname = os.path.join(cache_dir, 'some_file.py')
open(fname, 'w').close()
# clear_cache should refuse to clear the directory
with pytest.raises(IOError):
clear_cache(target)
os.remove(fname)
def test_compiler_c99():
# On a user's computer, we do not know whether the compiler actually
c99_support = compiler_supports_c99()
if os.environ.get('AGENT_OS', ''):
assert c99_support
if __name__ == '__main__':
test_auto_target()
test_analyse_identifiers()
test_get_identifiers_recursively()
test_write_to_subexpression()
test_repeated_subexpressions()
test_nested_subexpressions()
test_apply_loop_invariant_optimisation()
test_apply_loop_invariant_optimisation_integer()
test_apply_loop_invariant_optimisation_boolean()
test_apply_loop_invariant_optimisation_no_optimisation()
test_apply_loop_invariant_optimisation_simplification()
test_apply_loop_invariant_optimisation_constant_evaluation()
test_automatic_augmented_assignments()
test_clear_cache()
| true | true |
f728f8df92eb60ed7b875c23517e93444c46346a | 5,568 | py | Python | a_other/runner_threading.py | ogorodnikov/m1 | 06aee1963471897d8c05986e2782e3ca3a107c93 | [
"MIT"
] | null | null | null | a_other/runner_threading.py | ogorodnikov/m1 | 06aee1963471897d8c05986e2782e3ca3a107c93 | [
"MIT"
] | null | null | null | a_other/runner_threading.py | ogorodnikov/m1 | 06aee1963471897d8c05986e2782e3ca3a107c93 | [
"MIT"
] | null | null | null | import time
import threading
import traceback
from queue import PriorityQueue
from collections import defaultdict
from qiskit import IBMQ, Aer, execute
from qiskit.providers.ibmq import least_busy
from qiskit.tools.monitor import backend_overview, job_monitor
from core import app
def log(task_id, message):
app.logger.info(f'{message}')
logs[task_id].append(message)
from core.algorithms.egcd import egcd
from core.algorithms.bernvaz import bernvaz
from core.algorithms.grover import grover
from core.algorithms.grover_sudoku import grover_sudoku
TASK_WORKERS_COUNT = 2
runner_functions = {'egcd': egcd,
'bernvaz': bernvaz,
'grover': grover,
'grover_sudoku': grover_sudoku,
}
task_id = 0
task_queue = PriorityQueue()
task_results_queue = PriorityQueue()
tasks = {}
logs = defaultdict(list)
worker_active_flag = threading.Event()
worker_active_flag.set()
task_worker_threads = []
def start_task_worker_threads():
for i in range(TASK_WORKERS_COUNT):
task_worker_thread = threading.Thread(target=task_worker,
args=(task_queue, task_results_queue, worker_active_flag),
daemon=True)
task_worker_thread.start()
task_worker_threads.append(task_worker_thread)
app.logger.info(f'RUNNER task_worker_threads: {task_worker_threads}')
def run_algorithm(algorithm_id, run_values):
priority = 1
global task_id
task_id += 1
new_task = (priority, task_id, algorithm_id, run_values)
tasks[task_id] = {'algorithm_id': algorithm_id,
'run_values': run_values,
'status': 'Running'}
task_queue.put(new_task)
app.logger.info(f'RUNNER new_task: {new_task}')
app.logger.info(f'RUNNER task_queue.qsize: {task_queue.qsize()}')
app.logger.info(f'RUNNER task_id: {task_id}')
return task_id
def task_worker(task_queue, task_results_queue, worker_active_flag):
app.logger.info(f'RUNNER task_worker started')
while True:
time.sleep(1)
if worker_active_flag.is_set() and not task_queue.empty():
pop_task = task_queue.get()
priority, task_id, algorithm_id, run_values = pop_task
app.logger.info(f'RUNNER pop_task: {pop_task}')
app.logger.info(f'RUNNER task_queue.qsize: {task_queue.qsize()}')
try:
result = task_runner(task_id, algorithm_id, run_values)
except Exception as exception:
error_message = traceback.format_exc()
log(task_id, error_message)
tasks[task_id]['status'] = 'Failed'
else:
task_results_queue.put((task_id, result))
tasks[task_id]['result'] = result
tasks[task_id]['status'] = 'Done'
log(task_id, f'Result: {result}')
app.logger.info(f'task_results_queue.qsize: {task_results_queue.qsize()}')
app.logger.info(f'len(tasks): {len(tasks)}')
# app.logger.info(f'RUNNER task_worker_threads: {task_worker_threads}')
def task_runner(task_id, algorithm_id, run_values_multidict):
run_values = dict(run_values_multidict)
run_values['task_id'] = task_id
runner_function = runner_functions[algorithm_id]
run_mode = run_values.get('run_mode')
app.logger.info(f'RUNNER run_mode: {run_mode}')
app.logger.info(f'RUNNER run_values: {run_values}')
app.logger.info(f'RUNNER runner_function: {runner_function}')
if run_mode == 'classic':
return runner_function(run_values)
else:
circuit = runner_function(run_values)
qubit_count = circuit.num_qubits
if run_mode == 'simulator':
backend = Aer.get_backend('qasm_simulator')
elif run_mode == 'quantum_device':
qiskit_token = app.config.get('QISKIT_TOKEN')
IBMQ.save_account(qiskit_token)
if not IBMQ.active_account():
IBMQ.load_account()
provider = IBMQ.get_provider()
log(task_id, f'RUNNER provider: {provider}')
log(task_id, f'RUNNER provider.backends(): {provider.backends()}')
backend = get_least_busy_backend(provider, qubit_count)
log(task_id, f'RUNNER backend: {backend}')
job = execute(circuit, backend=backend, shots=1024)
job_monitor(job, interval=0.5)
result = job.result()
counts = result.get_counts()
log(task_id, f'RUNNER counts:')
[log(task_id, f'{state}: {count}') for state, count in sorted(counts.items())]
return {'Counts:': counts}
def get_least_busy_backend(provider, qubit_count):
backend_filter = lambda backend: (not backend.configuration().simulator
and backend.configuration().n_qubits >= qubit_count
and backend.status().operational==True)
least_busy_backend = least_busy(provider.backends(filters=backend_filter))
return least_busy_backend | 28.553846 | 104 | 0.595546 | import time
import threading
import traceback
from queue import PriorityQueue
from collections import defaultdict
from qiskit import IBMQ, Aer, execute
from qiskit.providers.ibmq import least_busy
from qiskit.tools.monitor import backend_overview, job_monitor
from core import app
def log(task_id, message):
app.logger.info(f'{message}')
logs[task_id].append(message)
from core.algorithms.egcd import egcd
from core.algorithms.bernvaz import bernvaz
from core.algorithms.grover import grover
from core.algorithms.grover_sudoku import grover_sudoku
TASK_WORKERS_COUNT = 2
runner_functions = {'egcd': egcd,
'bernvaz': bernvaz,
'grover': grover,
'grover_sudoku': grover_sudoku,
}
task_id = 0
task_queue = PriorityQueue()
task_results_queue = PriorityQueue()
tasks = {}
logs = defaultdict(list)
worker_active_flag = threading.Event()
worker_active_flag.set()
task_worker_threads = []
def start_task_worker_threads():
for i in range(TASK_WORKERS_COUNT):
task_worker_thread = threading.Thread(target=task_worker,
args=(task_queue, task_results_queue, worker_active_flag),
daemon=True)
task_worker_thread.start()
task_worker_threads.append(task_worker_thread)
app.logger.info(f'RUNNER task_worker_threads: {task_worker_threads}')
def run_algorithm(algorithm_id, run_values):
priority = 1
global task_id
task_id += 1
new_task = (priority, task_id, algorithm_id, run_values)
tasks[task_id] = {'algorithm_id': algorithm_id,
'run_values': run_values,
'status': 'Running'}
task_queue.put(new_task)
app.logger.info(f'RUNNER new_task: {new_task}')
app.logger.info(f'RUNNER task_queue.qsize: {task_queue.qsize()}')
app.logger.info(f'RUNNER task_id: {task_id}')
return task_id
def task_worker(task_queue, task_results_queue, worker_active_flag):
app.logger.info(f'RUNNER task_worker started')
while True:
time.sleep(1)
if worker_active_flag.is_set() and not task_queue.empty():
pop_task = task_queue.get()
priority, task_id, algorithm_id, run_values = pop_task
app.logger.info(f'RUNNER pop_task: {pop_task}')
app.logger.info(f'RUNNER task_queue.qsize: {task_queue.qsize()}')
try:
result = task_runner(task_id, algorithm_id, run_values)
except Exception as exception:
error_message = traceback.format_exc()
log(task_id, error_message)
tasks[task_id]['status'] = 'Failed'
else:
task_results_queue.put((task_id, result))
tasks[task_id]['result'] = result
tasks[task_id]['status'] = 'Done'
log(task_id, f'Result: {result}')
app.logger.info(f'task_results_queue.qsize: {task_results_queue.qsize()}')
app.logger.info(f'len(tasks): {len(tasks)}')
def task_runner(task_id, algorithm_id, run_values_multidict):
run_values = dict(run_values_multidict)
run_values['task_id'] = task_id
runner_function = runner_functions[algorithm_id]
run_mode = run_values.get('run_mode')
app.logger.info(f'RUNNER run_mode: {run_mode}')
app.logger.info(f'RUNNER run_values: {run_values}')
app.logger.info(f'RUNNER runner_function: {runner_function}')
if run_mode == 'classic':
return runner_function(run_values)
else:
circuit = runner_function(run_values)
qubit_count = circuit.num_qubits
if run_mode == 'simulator':
backend = Aer.get_backend('qasm_simulator')
elif run_mode == 'quantum_device':
qiskit_token = app.config.get('QISKIT_TOKEN')
IBMQ.save_account(qiskit_token)
if not IBMQ.active_account():
IBMQ.load_account()
provider = IBMQ.get_provider()
log(task_id, f'RUNNER provider: {provider}')
log(task_id, f'RUNNER provider.backends(): {provider.backends()}')
backend = get_least_busy_backend(provider, qubit_count)
log(task_id, f'RUNNER backend: {backend}')
job = execute(circuit, backend=backend, shots=1024)
job_monitor(job, interval=0.5)
result = job.result()
counts = result.get_counts()
log(task_id, f'RUNNER counts:')
[log(task_id, f'{state}: {count}') for state, count in sorted(counts.items())]
return {'Counts:': counts}
def get_least_busy_backend(provider, qubit_count):
backend_filter = lambda backend: (not backend.configuration().simulator
and backend.configuration().n_qubits >= qubit_count
and backend.status().operational==True)
least_busy_backend = least_busy(provider.backends(filters=backend_filter))
return least_busy_backend | true | true |
f728f8e6f71945d2a6718b12de9140b2ecfe37b3 | 1,294 | py | Python | test/test_v1_apply_modifier_list_request.py | shaminmeerankutty/connect-python-sdk | 524c8fe344bc3c0340833984970a07d519c4f5be | [
"Apache-2.0"
] | 53 | 2016-08-06T17:12:16.000Z | 2020-08-02T19:43:58.000Z | test/test_v1_apply_modifier_list_request.py | shaminmeerankutty/connect-python-sdk | 524c8fe344bc3c0340833984970a07d519c4f5be | [
"Apache-2.0"
] | 32 | 2016-08-19T16:32:30.000Z | 2020-01-14T18:01:37.000Z | test/test_v1_apply_modifier_list_request.py | shaminmeerankutty/connect-python-sdk | 524c8fe344bc3c0340833984970a07d519c4f5be | [
"Apache-2.0"
] | 45 | 2016-09-05T11:58:09.000Z | 2020-11-15T16:26:41.000Z | # coding: utf-8
"""
Copyright 2017 Square, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import os
import sys
import unittest
import squareconnect
from squareconnect.rest import ApiException
from squareconnect.models.v1_apply_modifier_list_request import V1ApplyModifierListRequest
class TestV1ApplyModifierListRequest(unittest.TestCase):
""" V1ApplyModifierListRequest unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1ApplyModifierListRequest(self):
"""
Test V1ApplyModifierListRequest
"""
model = squareconnect.models.v1_apply_modifier_list_request.V1ApplyModifierListRequest()
if __name__ == '__main__':
unittest.main()
| 26.408163 | 96 | 0.741113 |
from __future__ import absolute_import
import os
import sys
import unittest
import squareconnect
from squareconnect.rest import ApiException
from squareconnect.models.v1_apply_modifier_list_request import V1ApplyModifierListRequest
class TestV1ApplyModifierListRequest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testV1ApplyModifierListRequest(self):
model = squareconnect.models.v1_apply_modifier_list_request.V1ApplyModifierListRequest()
if __name__ == '__main__':
unittest.main()
| true | true |
f728f995aa93d0b83b79a6116102e1dddf8f6ac2 | 503 | py | Python | src/run.py | jessrenteria/flowbot | 82d0c0022ab7c5f72a605cb36456b1ab52bdd2e4 | [
"Apache-2.0"
] | null | null | null | src/run.py | jessrenteria/flowbot | 82d0c0022ab7c5f72a605cb36456b1ab52bdd2e4 | [
"Apache-2.0"
] | null | null | null | src/run.py | jessrenteria/flowbot | 82d0c0022ab7c5f72a605cb36456b1ab52bdd2e4 | [
"Apache-2.0"
] | null | null | null | import argparse
from utils.config import get_config
from model.flowbot import Flowbot
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--train', action='store_true')
parser.add_argument('--bot', default='lstm_attention')
args = parser.parse_args()
testing = not args.train
config = get_config(args.bot)
flowbot = Flowbot(config, testing)
if testing:
flowbot.interact()
else:
flowbot.train()
if __name__=='__main__':
main()
| 21.869565 | 58 | 0.677932 | import argparse
from utils.config import get_config
from model.flowbot import Flowbot
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--train', action='store_true')
parser.add_argument('--bot', default='lstm_attention')
args = parser.parse_args()
testing = not args.train
config = get_config(args.bot)
flowbot = Flowbot(config, testing)
if testing:
flowbot.interact()
else:
flowbot.train()
if __name__=='__main__':
main()
| true | true |
f728fa580fac0b74f2b800beae5b806ad9d33649 | 8,708 | py | Python | spdx/parsers/jsonyamlxmlbuilders.py | quaresmajose/tools-python | 53c917a1a2491a373efa23e4ef8570b5e863fabc | [
"Apache-2.0"
] | 74 | 2015-12-25T09:43:18.000Z | 2022-03-30T00:23:30.000Z | spdx/parsers/jsonyamlxmlbuilders.py | quaresmajose/tools-python | 53c917a1a2491a373efa23e4ef8570b5e863fabc | [
"Apache-2.0"
] | 184 | 2016-11-23T15:57:16.000Z | 2022-03-15T05:25:59.000Z | spdx/parsers/jsonyamlxmlbuilders.py | quaresmajose/tools-python | 53c917a1a2491a373efa23e4ef8570b5e863fabc | [
"Apache-2.0"
] | 98 | 2015-12-13T12:20:34.000Z | 2022-03-18T15:28:35.000Z | # Copyright (c) Xavier Figueroa
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from spdx.parsers import rdfbuilders
from spdx.parsers import tagvaluebuilders
from spdx.parsers import validations
from spdx.parsers.builderexceptions import SPDXValueError
from spdx.parsers.builderexceptions import CardinalityError
from spdx.parsers.builderexceptions import OrderError
class CreationInfoBuilder(rdfbuilders.CreationInfoBuilder):
def __init__(self):
super(CreationInfoBuilder, self).__init__()
class ExternalDocumentRefsBuilder(rdfbuilders.ExternalDocumentRefBuilder):
def __init__(self):
super(ExternalDocumentRefsBuilder, self).__init__()
class EntityBuilder(rdfbuilders.EntityBuilder):
def __init__(self):
super(EntityBuilder, self).__init__()
class SnippetBuilder(rdfbuilders.SnippetBuilder):
def __init__(self):
super(SnippetBuilder, self).__init__()
class ReviewBuilder(rdfbuilders.ReviewBuilder):
def __init__(self):
super(ReviewBuilder, self).__init__()
class PackageBuilder(rdfbuilders.PackageBuilder):
def __init__(self):
super(PackageBuilder, self).__init__()
class DocBuilder(tagvaluebuilders.DocBuilder):
def __init__(self):
super(DocBuilder, self).__init__()
def set_doc_spdx_id(self, doc, doc_spdx_id_line):
"""
Set the document SPDX Identifier.
Raise SPDXValueError if malformed value, CardinalityError
if already defined.
"""
if not self.doc_spdx_id_set:
if (
doc_spdx_id_line == "SPDXRef-DOCUMENT"
or validations.validate_doc_spdx_id(doc_spdx_id_line)
):
doc.spdx_id = doc_spdx_id_line
self.doc_spdx_id_set = True
return True
else:
raise SPDXValueError("Document::SPDXID")
else:
raise CardinalityError("Document::SPDXID")
def set_doc_comment(self, doc, comment):
"""
Set document comment.
Raise CardinalityError if comment already set.
"""
if not self.doc_comment_set:
self.doc_comment_set = True
doc.comment = comment
else:
raise CardinalityError("Document::Comment")
def set_doc_namespace(self, doc, namespace):
"""
Set the document namespace.
Raise SPDXValueError if malformed value.
Raise CardinalityError if already defined.
"""
if not self.doc_namespace_set:
self.doc_namespace_set = True
if validations.validate_doc_namespace(namespace):
doc.namespace = namespace
return True
else:
raise SPDXValueError("Document::Namespace")
else:
raise CardinalityError("Document::Comment")
class LicenseBuilder(tagvaluebuilders.LicenseBuilder):
def __init__(self):
super(LicenseBuilder, self).__init__()
def set_lic_name(self, doc, name):
"""
Set license name.
Raise SPDXValueError if name is not str or utils.NoAssert
Raise CardinalityError if it is already set
Raise OrderError if no license id defined.
"""
if self.has_extr_lic(doc):
if not self.extr_lic_name_set:
self.extr_lic_name_set = True
if validations.validate_extr_lic_name(name, True):
self.extr_lic(doc).full_name = name
return True
else:
raise SPDXValueError("ExtractedLicense::Name")
else:
raise CardinalityError("ExtractedLicense::Name")
else:
raise OrderError("ExtractedLicense::Name")
def set_lic_text(self, doc, text):
"""
Set license name.
Raise CardinalityError if it is already set.
Raise OrderError if no license id defined.
"""
if self.has_extr_lic(doc):
if not self.extr_text_set:
self.extr_text_set = True
self.extr_lic(doc).text = text
return True
else:
raise CardinalityError("ExtractedLicense::text")
else:
raise OrderError("ExtractedLicense::text")
def set_lic_comment(self, doc, comment):
"""
Set license comment.
Raise CardinalityError if it is already set.
Raise OrderError if no license ID defined.
"""
if self.has_extr_lic(doc):
if not self.extr_lic_comment_set:
self.extr_lic_comment_set = True
self.extr_lic(doc).comment = comment
return True
else:
raise CardinalityError("ExtractedLicense::comment")
else:
raise OrderError("ExtractedLicense::comment")
class FileBuilder(rdfbuilders.FileBuilder):
def __init__(self):
super(FileBuilder, self).__init__()
def set_file_notice(self, doc, text):
"""
Set file notice
Raise OrderError if no package or file defined.
Raise CardinalityError if more than one.
"""
if self.has_package(doc) and self.has_file(doc):
self.file_notice_set = True
self.file(doc).notice = text
return True
else:
raise OrderError("File::Notice")
def set_file_type(self, doc, type_value):
"""
Wrap rdfbuilders.FileBuilder.set_file_type to match the different
fileType representations.
"""
type_dict = {
"fileType_source": "SOURCE",
"fileType_binary": "BINARY",
"fileType_archive": "ARCHIVE",
"fileType_other": "OTHER",
}
return super(FileBuilder, self).set_file_type(doc, type_dict.get(type_value))
class AnnotationBuilder(tagvaluebuilders.AnnotationBuilder):
def __init__(self):
super(AnnotationBuilder, self).__init__()
def add_annotation_comment(self, doc, comment):
"""
Set the annotation comment.
Raise CardinalityError if already set.
Raise OrderError if no annotator defined before.
"""
if len(doc.annotations) != 0:
if not self.annotation_comment_set:
self.annotation_comment_set = True
doc.annotations[-1].comment = comment
return True
else:
raise CardinalityError("AnnotationComment")
else:
raise OrderError("AnnotationComment")
class RelationshipBuilder(tagvaluebuilders.RelationshipBuilder):
def __init__(self):
super(RelationshipBuilder, self).__init__()
def add_relationship_comment(self, doc, comment):
"""
Set the relationship comment.
Raise CardinalityError if already set.
Raise OrderError if no annotator defined before.
"""
if len(doc.relationships) != 0:
if not self.relationship_comment_set:
self.relationship_comment_set = True
doc.relationships[-1].comment = comment
return True
else:
raise CardinalityError("RelationshipComment")
else:
raise OrderError("RelationshipComment")
class Builder(
DocBuilder,
CreationInfoBuilder,
ExternalDocumentRefsBuilder,
EntityBuilder,
SnippetBuilder,
ReviewBuilder,
LicenseBuilder,
FileBuilder,
PackageBuilder,
AnnotationBuilder,
RelationshipBuilder,
):
"""
SPDX document builder.
"""
def __init__(self):
super(Builder, self).__init__()
# FIXME: this state does not make sense
self.reset()
def reset(self):
"""
Reset builder's state for building new documents.
Must be called between usage with different documents.
"""
# FIXME: this state does not make sense
self.reset_creation_info()
self.reset_document()
self.reset_package()
self.reset_file_stat()
self.reset_reviews()
self.reset_annotations()
self.reset_relationship()
self.reset_extr_lics()
| 32.371747 | 85 | 0.629306 |
from spdx.parsers import rdfbuilders
from spdx.parsers import tagvaluebuilders
from spdx.parsers import validations
from spdx.parsers.builderexceptions import SPDXValueError
from spdx.parsers.builderexceptions import CardinalityError
from spdx.parsers.builderexceptions import OrderError
class CreationInfoBuilder(rdfbuilders.CreationInfoBuilder):
def __init__(self):
super(CreationInfoBuilder, self).__init__()
class ExternalDocumentRefsBuilder(rdfbuilders.ExternalDocumentRefBuilder):
def __init__(self):
super(ExternalDocumentRefsBuilder, self).__init__()
class EntityBuilder(rdfbuilders.EntityBuilder):
def __init__(self):
super(EntityBuilder, self).__init__()
class SnippetBuilder(rdfbuilders.SnippetBuilder):
def __init__(self):
super(SnippetBuilder, self).__init__()
class ReviewBuilder(rdfbuilders.ReviewBuilder):
def __init__(self):
super(ReviewBuilder, self).__init__()
class PackageBuilder(rdfbuilders.PackageBuilder):
def __init__(self):
super(PackageBuilder, self).__init__()
class DocBuilder(tagvaluebuilders.DocBuilder):
def __init__(self):
super(DocBuilder, self).__init__()
def set_doc_spdx_id(self, doc, doc_spdx_id_line):
if not self.doc_spdx_id_set:
if (
doc_spdx_id_line == "SPDXRef-DOCUMENT"
or validations.validate_doc_spdx_id(doc_spdx_id_line)
):
doc.spdx_id = doc_spdx_id_line
self.doc_spdx_id_set = True
return True
else:
raise SPDXValueError("Document::SPDXID")
else:
raise CardinalityError("Document::SPDXID")
def set_doc_comment(self, doc, comment):
if not self.doc_comment_set:
self.doc_comment_set = True
doc.comment = comment
else:
raise CardinalityError("Document::Comment")
def set_doc_namespace(self, doc, namespace):
if not self.doc_namespace_set:
self.doc_namespace_set = True
if validations.validate_doc_namespace(namespace):
doc.namespace = namespace
return True
else:
raise SPDXValueError("Document::Namespace")
else:
raise CardinalityError("Document::Comment")
class LicenseBuilder(tagvaluebuilders.LicenseBuilder):
def __init__(self):
super(LicenseBuilder, self).__init__()
def set_lic_name(self, doc, name):
if self.has_extr_lic(doc):
if not self.extr_lic_name_set:
self.extr_lic_name_set = True
if validations.validate_extr_lic_name(name, True):
self.extr_lic(doc).full_name = name
return True
else:
raise SPDXValueError("ExtractedLicense::Name")
else:
raise CardinalityError("ExtractedLicense::Name")
else:
raise OrderError("ExtractedLicense::Name")
def set_lic_text(self, doc, text):
if self.has_extr_lic(doc):
if not self.extr_text_set:
self.extr_text_set = True
self.extr_lic(doc).text = text
return True
else:
raise CardinalityError("ExtractedLicense::text")
else:
raise OrderError("ExtractedLicense::text")
def set_lic_comment(self, doc, comment):
if self.has_extr_lic(doc):
if not self.extr_lic_comment_set:
self.extr_lic_comment_set = True
self.extr_lic(doc).comment = comment
return True
else:
raise CardinalityError("ExtractedLicense::comment")
else:
raise OrderError("ExtractedLicense::comment")
class FileBuilder(rdfbuilders.FileBuilder):
def __init__(self):
super(FileBuilder, self).__init__()
def set_file_notice(self, doc, text):
if self.has_package(doc) and self.has_file(doc):
self.file_notice_set = True
self.file(doc).notice = text
return True
else:
raise OrderError("File::Notice")
def set_file_type(self, doc, type_value):
type_dict = {
"fileType_source": "SOURCE",
"fileType_binary": "BINARY",
"fileType_archive": "ARCHIVE",
"fileType_other": "OTHER",
}
return super(FileBuilder, self).set_file_type(doc, type_dict.get(type_value))
class AnnotationBuilder(tagvaluebuilders.AnnotationBuilder):
def __init__(self):
super(AnnotationBuilder, self).__init__()
def add_annotation_comment(self, doc, comment):
if len(doc.annotations) != 0:
if not self.annotation_comment_set:
self.annotation_comment_set = True
doc.annotations[-1].comment = comment
return True
else:
raise CardinalityError("AnnotationComment")
else:
raise OrderError("AnnotationComment")
class RelationshipBuilder(tagvaluebuilders.RelationshipBuilder):
def __init__(self):
super(RelationshipBuilder, self).__init__()
def add_relationship_comment(self, doc, comment):
if len(doc.relationships) != 0:
if not self.relationship_comment_set:
self.relationship_comment_set = True
doc.relationships[-1].comment = comment
return True
else:
raise CardinalityError("RelationshipComment")
else:
raise OrderError("RelationshipComment")
class Builder(
DocBuilder,
CreationInfoBuilder,
ExternalDocumentRefsBuilder,
EntityBuilder,
SnippetBuilder,
ReviewBuilder,
LicenseBuilder,
FileBuilder,
PackageBuilder,
AnnotationBuilder,
RelationshipBuilder,
):
def __init__(self):
super(Builder, self).__init__()
self.reset()
def reset(self):
self.reset_creation_info()
self.reset_document()
self.reset_package()
self.reset_file_stat()
self.reset_reviews()
self.reset_annotations()
self.reset_relationship()
self.reset_extr_lics()
| true | true |
f728faab3d96a9b5edcb73fb2f1bcacba109faf4 | 6,750 | py | Python | maskrcnn/data/build.py | kSahatova/MULAN-XAI | 7f2ede7cc8ad4e772a3cfe7d52f0a710d5c89d5a | [
"MIT"
] | 6 | 2020-08-31T03:15:02.000Z | 2021-12-14T02:33:51.000Z | maskrcnn/data/build.py | kSahatova/MULAN-XAI | 7f2ede7cc8ad4e772a3cfe7d52f0a710d5c89d5a | [
"MIT"
] | 1 | 2021-12-15T06:43:59.000Z | 2021-12-15T06:45:50.000Z | maskrcnn/data/build.py | kSahatova/MULAN-XAI | 7f2ede7cc8ad4e772a3cfe7d52f0a710d5c89d5a | [
"MIT"
] | 5 | 2019-11-26T14:19:36.000Z | 2021-11-22T13:49:01.000Z | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import bisect
import copy
import logging
import torch.utils.data
from maskrcnn.utils.comm import get_world_size
from maskrcnn.utils.imports import import_file
from . import datasets as D
from . import samplers
from .collate_batch import BatchCollator, Batch3dceCollator
from .transforms import build_transforms
from maskrcnn.config import cfg
def build_dataset(dataset_list, transforms, dataset_catalog, is_train=True):
"""
Arguments:
dataset_list (list[str]): Contains the names of the datasets, i.e.,
coco_2014_trian, coco_2014_val, etc
transforms (callable): transforms to apply to each (image, target) sample
dataset_catalog (DatasetCatalog): contains the information on how to
construct a dataset.
is_train (bool): whether to setup the dataset for training or testing
"""
if not isinstance(dataset_list, (list, tuple)):
raise RuntimeError(
"dataset_list should be a list of strings, got {}".format(dataset_list)
)
datasets = []
for dataset_name in dataset_list:
data = dataset_catalog.get(dataset_name)
factory = getattr(D, data["factory"])
args = data["args"]
# for COCODataset, we want to remove images without annotations
# during training
if data["factory"] == "COCODataset":
args["remove_images_without_annotations"] = is_train
if data["factory"] == "PascalVOCDataset":
args["use_difficult"] = not is_train
args["transforms"] = transforms
# make dataset from factory
dataset = factory(**args)
datasets.append(dataset)
# for testing, return a list of datasets
if not is_train:
return datasets
# for training, concatenate all datasets into a single one
dataset = datasets[0]
if len(datasets) > 1:
dataset = D.ConcatDataset(datasets)
return [dataset]
def make_data_sampler(dataset, shuffle, distributed):
if distributed:
return samplers.DistributedSampler(dataset, shuffle=shuffle)
if shuffle:
sampler = torch.utils.data.sampler.RandomSampler(dataset)
else:
sampler = torch.utils.data.sampler.SequentialSampler(dataset)
return sampler
def _quantize(x, bins):
bins = copy.copy(bins)
bins = sorted(bins)
quantized = list(map(lambda y: bisect.bisect_right(bins, y), x))
return quantized
def _compute_aspect_ratios(dataset):
aspect_ratios = []
for i in range(len(dataset)):
img_info = dataset.get_img_info(i)
aspect_ratio = float(img_info["height"]) / float(img_info["width"])
aspect_ratios.append(aspect_ratio)
return aspect_ratios
def make_batch_data_sampler(
dataset, sampler, aspect_grouping, images_per_batch, num_iters=None, start_iter=0
):
if aspect_grouping:
if not isinstance(aspect_grouping, (list, tuple)):
aspect_grouping = [aspect_grouping]
aspect_ratios = _compute_aspect_ratios(dataset)
group_ids = _quantize(aspect_ratios, aspect_grouping)
batch_sampler = samplers.GroupedBatchSampler(
sampler, group_ids, images_per_batch, drop_uneven=False
)
else:
batch_sampler = torch.utils.data.sampler.BatchSampler(
sampler, images_per_batch, drop_last=cfg.DATALOADER.DROP_LAST_BATCH,
)
if num_iters is not None:
batch_sampler = samplers.IterationBasedBatchSampler(
batch_sampler, num_iters, start_iter
)
return batch_sampler
def make_datasets(split):
is_train = split == 'train'
paths_catalog = import_file(
"maskrcnn.config.paths_catalog", cfg.PATHS_CATALOG, True
)
DatasetCatalog = paths_catalog.DatasetCatalog
# dataset_list = cfg.DATASETS.TRAIN if is_train else cfg.DATASETS.TEST
dataset_list = eval('cfg.DATASETS.' + split.upper())
transforms = build_transforms(is_train)
datasets = build_dataset(dataset_list, transforms, DatasetCatalog, is_train)
return datasets
def make_data_loader(datasets, is_train=True, is_distributed=False):
num_gpus = get_world_size()
if is_train:
images_per_batch = cfg.SOLVER.IMS_PER_BATCH
assert (
images_per_batch % num_gpus == 0
), "SOLVER.IMS_PER_BATCH ({}) must be divisible by the number "
"of GPUs ({}) used.".format(images_per_batch, num_gpus)
images_per_gpu = images_per_batch // num_gpus
shuffle = True
else:
images_per_batch = cfg.TEST.IMS_PER_BATCH
assert (
images_per_batch % num_gpus == 0
), "TEST.IMS_PER_BATCH ({}) must be divisible by the number "
"of GPUs ({}) used.".format(images_per_batch, num_gpus)
images_per_gpu = images_per_batch // num_gpus
shuffle = cfg.TEST.SHUFFLE
# if images_per_gpu > 1:
# logger = logging.getLogger(__name__)
# logger.warning(
# "When using more than one image per GPU you may encounter "
# "an out-of-memory (OOM) error if your GPU does not have "
# "sufficient memory. If this happens, you can reduce "
# "SOLVER.IMS_PER_BATCH (for training) or "
# "TEST.IMS_PER_BATCH (for inference). For training, you must "
# "also adjust the learning rate and schedule length according "
# "to the linear scaling rule. See for example: "
# "https://github.com/facebookresearch/Detectron/blob/master/configs/getting_started/tutorial_1gpu_e2e_faster_rcnn_R-50-FPN.yaml#L14"
# )
# group images which have similar aspect ratio. In this case, we only
# group in two cases: those with width / height > 1, and the other way around,
# but the code supports more general grouping strategy
aspect_grouping = [1] if cfg.DATALOADER.ASPECT_RATIO_GROUPING else []
data_loaders = []
for dataset in datasets:
sampler = make_data_sampler(dataset, shuffle, is_distributed)
batch_sampler = make_batch_data_sampler(
dataset, sampler, aspect_grouping, images_per_gpu#, num_iters, start_iter
)
collator = Batch3dceCollator(cfg.DATALOADER.SIZE_DIVISIBILITY)
num_workers = cfg.DATALOADER.NUM_WORKERS
data_loader = torch.utils.data.DataLoader(
dataset,
num_workers=num_workers,
batch_sampler=batch_sampler,
collate_fn=collator,
)
data_loaders.append(data_loader)
if is_train:
# during training, a single (possibly concatenated) data_loader is returned
assert len(data_loaders) == 1
return data_loaders[0]
return data_loaders
| 37.709497 | 145 | 0.676593 |
import bisect
import copy
import logging
import torch.utils.data
from maskrcnn.utils.comm import get_world_size
from maskrcnn.utils.imports import import_file
from . import datasets as D
from . import samplers
from .collate_batch import BatchCollator, Batch3dceCollator
from .transforms import build_transforms
from maskrcnn.config import cfg
def build_dataset(dataset_list, transforms, dataset_catalog, is_train=True):
if not isinstance(dataset_list, (list, tuple)):
raise RuntimeError(
"dataset_list should be a list of strings, got {}".format(dataset_list)
)
datasets = []
for dataset_name in dataset_list:
data = dataset_catalog.get(dataset_name)
factory = getattr(D, data["factory"])
args = data["args"]
if data["factory"] == "COCODataset":
args["remove_images_without_annotations"] = is_train
if data["factory"] == "PascalVOCDataset":
args["use_difficult"] = not is_train
args["transforms"] = transforms
dataset = factory(**args)
datasets.append(dataset)
if not is_train:
return datasets
dataset = datasets[0]
if len(datasets) > 1:
dataset = D.ConcatDataset(datasets)
return [dataset]
def make_data_sampler(dataset, shuffle, distributed):
if distributed:
return samplers.DistributedSampler(dataset, shuffle=shuffle)
if shuffle:
sampler = torch.utils.data.sampler.RandomSampler(dataset)
else:
sampler = torch.utils.data.sampler.SequentialSampler(dataset)
return sampler
def _quantize(x, bins):
bins = copy.copy(bins)
bins = sorted(bins)
quantized = list(map(lambda y: bisect.bisect_right(bins, y), x))
return quantized
def _compute_aspect_ratios(dataset):
aspect_ratios = []
for i in range(len(dataset)):
img_info = dataset.get_img_info(i)
aspect_ratio = float(img_info["height"]) / float(img_info["width"])
aspect_ratios.append(aspect_ratio)
return aspect_ratios
def make_batch_data_sampler(
dataset, sampler, aspect_grouping, images_per_batch, num_iters=None, start_iter=0
):
if aspect_grouping:
if not isinstance(aspect_grouping, (list, tuple)):
aspect_grouping = [aspect_grouping]
aspect_ratios = _compute_aspect_ratios(dataset)
group_ids = _quantize(aspect_ratios, aspect_grouping)
batch_sampler = samplers.GroupedBatchSampler(
sampler, group_ids, images_per_batch, drop_uneven=False
)
else:
batch_sampler = torch.utils.data.sampler.BatchSampler(
sampler, images_per_batch, drop_last=cfg.DATALOADER.DROP_LAST_BATCH,
)
if num_iters is not None:
batch_sampler = samplers.IterationBasedBatchSampler(
batch_sampler, num_iters, start_iter
)
return batch_sampler
def make_datasets(split):
is_train = split == 'train'
paths_catalog = import_file(
"maskrcnn.config.paths_catalog", cfg.PATHS_CATALOG, True
)
DatasetCatalog = paths_catalog.DatasetCatalog
dataset_list = eval('cfg.DATASETS.' + split.upper())
transforms = build_transforms(is_train)
datasets = build_dataset(dataset_list, transforms, DatasetCatalog, is_train)
return datasets
def make_data_loader(datasets, is_train=True, is_distributed=False):
num_gpus = get_world_size()
if is_train:
images_per_batch = cfg.SOLVER.IMS_PER_BATCH
assert (
images_per_batch % num_gpus == 0
), "SOLVER.IMS_PER_BATCH ({}) must be divisible by the number "
"of GPUs ({}) used.".format(images_per_batch, num_gpus)
images_per_gpu = images_per_batch // num_gpus
shuffle = True
else:
images_per_batch = cfg.TEST.IMS_PER_BATCH
assert (
images_per_batch % num_gpus == 0
), "TEST.IMS_PER_BATCH ({}) must be divisible by the number "
"of GPUs ({}) used.".format(images_per_batch, num_gpus)
images_per_gpu = images_per_batch // num_gpus
shuffle = cfg.TEST.SHUFFLE
aspect_grouping = [1] if cfg.DATALOADER.ASPECT_RATIO_GROUPING else []
data_loaders = []
for dataset in datasets:
sampler = make_data_sampler(dataset, shuffle, is_distributed)
batch_sampler = make_batch_data_sampler(
dataset, sampler, aspect_grouping, images_per_gpu
)
collator = Batch3dceCollator(cfg.DATALOADER.SIZE_DIVISIBILITY)
num_workers = cfg.DATALOADER.NUM_WORKERS
data_loader = torch.utils.data.DataLoader(
dataset,
num_workers=num_workers,
batch_sampler=batch_sampler,
collate_fn=collator,
)
data_loaders.append(data_loader)
if is_train:
assert len(data_loaders) == 1
return data_loaders[0]
return data_loaders
| true | true |
f728fb56317a5077e89063574c3601f0f6fd25e2 | 590 | py | Python | connexion/openapi_spec_validator/loaders.py | peachfinance/connexion | c2dd9534868db1b68188bec62b001e4dfbb7d372 | [
"Apache-2.0"
] | null | null | null | connexion/openapi_spec_validator/loaders.py | peachfinance/connexion | c2dd9534868db1b68188bec62b001e4dfbb7d372 | [
"Apache-2.0"
] | null | null | null | connexion/openapi_spec_validator/loaders.py | peachfinance/connexion | c2dd9534868db1b68188bec62b001e4dfbb7d372 | [
"Apache-2.0"
] | null | null | null | from yaml.composer import Composer
from yaml.parser import Parser
from yaml.reader import Reader
from yaml.resolver import Resolver
from yaml.scanner import Scanner
from ..openapi_spec_validator.constructors import ExtendedSafeConstructor
class ExtendedSafeLoader(
Reader, Scanner, Parser, Composer, ExtendedSafeConstructor, Resolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
ExtendedSafeConstructor.__init__(self)
Resolver.__init__(self)
| 29.5 | 78 | 0.754237 | from yaml.composer import Composer
from yaml.parser import Parser
from yaml.reader import Reader
from yaml.resolver import Resolver
from yaml.scanner import Scanner
from ..openapi_spec_validator.constructors import ExtendedSafeConstructor
class ExtendedSafeLoader(
Reader, Scanner, Parser, Composer, ExtendedSafeConstructor, Resolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
ExtendedSafeConstructor.__init__(self)
Resolver.__init__(self)
| true | true |
f728fbb5034433c6c19528a025677582c5ec9d8b | 9,816 | py | Python | pydavis/db_manager.py | GSEL9/web-scraping | 4ff8e7539ea182a481b8044284691015136010a5 | [
"MIT"
] | 1 | 2018-06-17T18:18:48.000Z | 2018-06-17T18:18:48.000Z | pydavis/db_manager.py | GSEL9/web-scraping | 4ff8e7539ea182a481b8044284691015136010a5 | [
"MIT"
] | 1 | 2018-06-12T10:17:35.000Z | 2018-06-12T12:51:43.000Z | pydavis/db_manager.py | GSEL9/web-scraping | 4ff8e7539ea182a481b8044284691015136010a5 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# db_manager.py
#
# The module is part of pydavis.
#
"""
Interacting with MySQL databases through the DatabaseManager.
"""
__author__ = 'Severin E. R. Langberg'
__email__ = 'Langberg91@gmail.no'
__status__ = 'Operational'
import pymysql
from pydavis import utils
from datetime import datetime
class DatabaseManager:
"""Handles interaction with MySQL database.
Args:
user (str): Follows from writing data to table. The MySQL username
credential. Necessary for connecting to database.
password (str): Follows from writing data to table. The MySQL
password credential. Necessary for connecting to database.
host (str): Specifies the host where the server is running. Uses
localhost by default.
port (int): Specifies port number. Uses 3306 by default.
database (str): Follows from writing data to table. Name of an
existing database, or the name of a database that will be created.
"""
_CHARS_LIMIT = 20
@classmethod
def update_limit_varchars(cls, new_limit):
"""Defines the maximum number of characters assigned to string
attributes in the created database tables.
Args:
new_limit (int): The maximum number of characters assigned to
string attributes.
"""
utils._check_parameter('new_limit', int, new_limit)
cls._CHARS_LIMIT = new_limit
def __init__(self, user, password, host='localhost', port=3306):
self.user = str(user)
self.password = str(password)
self.host = str(host)
self.port = int(port)
# NOTE: Variables set during instance.
self._con = None
self._cur = None
self._query = None
self._current_db = None
@property
def connection(self):
"""Returns database connection."""
return self._con
@property
def current_db(self):
"""Returns name of working database."""
return self._current_db
@property
def query(self):
"""Returns the latest MySQL query ready to be executed."""
return self._query
@property
def results(self):
"""Returns the result from a database query."""
return self._cur.fetchall()
@property
def limit_varchars(self):
"""Returns the current limit to number of characters in varchar
variables."""
return self._CHARS_LIMIT
def connect(self):
"""Connects to a MySQL server."""
if self.connection:
print('Already connected to `{}`'.format(self.host))
return
try:
con = pymysql.connect(host=self.host,
port=self.port,
user=self.user,
passwd=self.password)
print('Connecting to: `{}`'.format(self.host))
self._con = con
self._cur = self._con.cursor()
except:
raise utils.DatabaseConnectionError('Unable to connect to: `{}`'
''.format(self.host))
return self
def _check_connection(self):
# Checks if connected to a MySQL server.
if self.connection:
return
else:
raise utils.DatabaseConnectionError('Disconnected from {}'
''.format(self.host))
def _check_database(self):
if self.current_db:
return
else:
raise utils.MissingDatabaseError('Must specify working database')
def execute(self):
"""Execute a MySQL command and commit changes to the database."""
self._check_connection()
try:
self._cur.execute(self._query)
except:
raise utils.DatabaseExecutionError('Unable to execute query:\n'
'`{}`'.format(self._query))
# Commit changes to database.
try:
self._con.commit()
except:
raise utils.DatabaseCommitError('Unable to commit changes: `{}`'
''.format(self._db_name))
return self
def create_database(self, database):
"""Creates a new database. Only enabled if connected to a MySQL server.
Args:
database (str): Name of the database to create.
"""
utils._check_parameter('database', str, database)
self._current_db = database
self._query = 'CREATE DATABASE IF NOT EXISTS {};'.format(database)
return self
def use_database(self, database):
"""Selects an existent database as working database. Only enabled if
connected to a MySQL server and the database exists.
Args:
database (str): Name of the new working database.
"""
utils._check_parameter('database', str, database)
self._current_db = database
self._query = 'USE {}'.format(database)
return self
def drop_database(self, database):
"""Deletes a database. Only enabled if connected to a MySQL server.
Args:
database (str): Name of the database to delete.
"""
utils._check_parameter('database', str, database)
# Resetting working DB variable.
self._current_db = None
self._query = 'DROP DATABASE IF EXISTS {};'.format(database)
return self
def create_table(self, table_name, table_columns):
"""Creates a table if connected to a MySQL server and a working
database is set.
Args:
table_name (str): Name of the new table.
table_columns (dict): The column labels and corresponding column
data types as key-value pairs. The data types are given in
Python format.
"""
self._check_database()
utils._check_parameter('table_name', str, table_name)
utils._check_parameter('table_columns', dict, table_columns)
col_labels, col_dtypes = list(zip(*table_columns.items()))
mysql_dtypes = self.convert_dtypes(col_dtypes)
_columns = ''
for label, dtype in zip(col_labels[:-1], mysql_dtypes[:-1]):
_columns += ' {} {},'.format(label, dtype)
_columns += ' {} {}'.format(col_labels[-1], mysql_dtypes[-1])
self._query = """CREATE TABLE IF NOT EXISTS {} ({});
""".format(table_name, _columns)
return self
def convert_dtypes(self, data_types):
"""Converts from Python to MySQL data types.
Args:
data_types (iterable): A container of Python data types that will
be converted to MySQL data types.
Returns:
list: The corresponding MySQL data types.
"""
mysql_dtypes = []
for data_type in data_types:
if data_type is datetime:
mysql_dtypes.append('DATETIME')
elif data_type is float:
mysql_dtypes.append('FLOAT')
elif data_type is int:
mysql_dtypes.append('INT')
elif data_type is str:
mysql_dtypes.append('VARCHAR({})'.format(self._CHARS_LIMIT))
else:
raise TypeError('Unable to recognize {} as data type'
''.format(data_type))
return mysql_dtypes
def drop_table(self, table_name):
"""Deletes specified table from database. Only enabled if connected to
a MySQL server and a working database is set.
Args:
table_name (str): Name of the table.
"""
self._check_database()
utils._check_parameter('table_name', str, table_name)
self._query = 'DROP TABLE IF EXISTS {};'.format(table_name)
return self
def describe_table(self, table_name):
"""Returns description of table content. Only enabled if connected to a
MySQL server and a working database is set.
Args:
table_name (str): Name of the table.
"""
self._check_database()
utils._check_parameter('table_name', str, table_name)
self._query = 'DESCRIBE {};'.format(table_name)
return self
def insert_values(self, table_name, table_values):
"""Inserts entities into a table.
Args:
table_name (str): The name of the table.
table_values (dict): The column labels and corresponding values as
key-value pairs.
"""
self._check_database()
utils._check_parameter('table_name', str, table_name)
utils._check_parameter('table_values', dict, table_values)
labels, values = list(zip(*table_values.items()))
_columns, _values = '', ''
for label, value in zip(labels[:-1], values[:-1]):
_columns += "{}, ".format(str(label))
_values += "'{}', ".format(str(value))
_columns += "{}".format(str(labels[-1]))
_values += "'{}'".format(str(values[-1]))
self._query = """INSERT INTO {} ({}) VALUES ({});
""".format(table_name, _columns, _values)
return self
def add_constraints(self, table_name, constraints):
raise NotImplementedError('Method currently not implemented.')
def terminate_connection(self):
"""Shuts down connection to MySQL server."""
self._con.close()
self._cur.close()
print('Shutting down connection to: `{}`'.format(self.host))
# NOTE: Resetting connection variables.
self._con = None
self._cur = None
return self
| 27.266667 | 79 | 0.580685 |
__author__ = 'Severin E. R. Langberg'
__email__ = 'Langberg91@gmail.no'
__status__ = 'Operational'
import pymysql
from pydavis import utils
from datetime import datetime
class DatabaseManager:
_CHARS_LIMIT = 20
@classmethod
def update_limit_varchars(cls, new_limit):
utils._check_parameter('new_limit', int, new_limit)
cls._CHARS_LIMIT = new_limit
def __init__(self, user, password, host='localhost', port=3306):
self.user = str(user)
self.password = str(password)
self.host = str(host)
self.port = int(port)
self._con = None
self._cur = None
self._query = None
self._current_db = None
@property
def connection(self):
return self._con
@property
def current_db(self):
return self._current_db
@property
def query(self):
return self._query
@property
def results(self):
return self._cur.fetchall()
@property
def limit_varchars(self):
return self._CHARS_LIMIT
def connect(self):
if self.connection:
print('Already connected to `{}`'.format(self.host))
return
try:
con = pymysql.connect(host=self.host,
port=self.port,
user=self.user,
passwd=self.password)
print('Connecting to: `{}`'.format(self.host))
self._con = con
self._cur = self._con.cursor()
except:
raise utils.DatabaseConnectionError('Unable to connect to: `{}`'
''.format(self.host))
return self
def _check_connection(self):
if self.connection:
return
else:
raise utils.DatabaseConnectionError('Disconnected from {}'
''.format(self.host))
def _check_database(self):
if self.current_db:
return
else:
raise utils.MissingDatabaseError('Must specify working database')
def execute(self):
self._check_connection()
try:
self._cur.execute(self._query)
except:
raise utils.DatabaseExecutionError('Unable to execute query:\n'
'`{}`'.format(self._query))
try:
self._con.commit()
except:
raise utils.DatabaseCommitError('Unable to commit changes: `{}`'
''.format(self._db_name))
return self
def create_database(self, database):
utils._check_parameter('database', str, database)
self._current_db = database
self._query = 'CREATE DATABASE IF NOT EXISTS {};'.format(database)
return self
def use_database(self, database):
utils._check_parameter('database', str, database)
self._current_db = database
self._query = 'USE {}'.format(database)
return self
def drop_database(self, database):
utils._check_parameter('database', str, database)
self._current_db = None
self._query = 'DROP DATABASE IF EXISTS {};'.format(database)
return self
def create_table(self, table_name, table_columns):
self._check_database()
utils._check_parameter('table_name', str, table_name)
utils._check_parameter('table_columns', dict, table_columns)
col_labels, col_dtypes = list(zip(*table_columns.items()))
mysql_dtypes = self.convert_dtypes(col_dtypes)
_columns = ''
for label, dtype in zip(col_labels[:-1], mysql_dtypes[:-1]):
_columns += ' {} {},'.format(label, dtype)
_columns += ' {} {}'.format(col_labels[-1], mysql_dtypes[-1])
self._query = """CREATE TABLE IF NOT EXISTS {} ({});
""".format(table_name, _columns)
return self
def convert_dtypes(self, data_types):
mysql_dtypes = []
for data_type in data_types:
if data_type is datetime:
mysql_dtypes.append('DATETIME')
elif data_type is float:
mysql_dtypes.append('FLOAT')
elif data_type is int:
mysql_dtypes.append('INT')
elif data_type is str:
mysql_dtypes.append('VARCHAR({})'.format(self._CHARS_LIMIT))
else:
raise TypeError('Unable to recognize {} as data type'
''.format(data_type))
return mysql_dtypes
def drop_table(self, table_name):
self._check_database()
utils._check_parameter('table_name', str, table_name)
self._query = 'DROP TABLE IF EXISTS {};'.format(table_name)
return self
def describe_table(self, table_name):
self._check_database()
utils._check_parameter('table_name', str, table_name)
self._query = 'DESCRIBE {};'.format(table_name)
return self
def insert_values(self, table_name, table_values):
self._check_database()
utils._check_parameter('table_name', str, table_name)
utils._check_parameter('table_values', dict, table_values)
labels, values = list(zip(*table_values.items()))
_columns, _values = '', ''
for label, value in zip(labels[:-1], values[:-1]):
_columns += "{}, ".format(str(label))
_values += "'{}', ".format(str(value))
_columns += "{}".format(str(labels[-1]))
_values += "'{}'".format(str(values[-1]))
self._query = """INSERT INTO {} ({}) VALUES ({});
""".format(table_name, _columns, _values)
return self
def add_constraints(self, table_name, constraints):
raise NotImplementedError('Method currently not implemented.')
def terminate_connection(self):
self._con.close()
self._cur.close()
print('Shutting down connection to: `{}`'.format(self.host))
self._con = None
self._cur = None
return self
| true | true |
f728fbd6e877db07c02c891160f467f15e63c3f5 | 393 | py | Python | stockze/example_app/utils/robinhood_login.py | vendari12/django-ai-algotrade | f20d2691f08ec75f148cd6409b886b1dd6edac78 | [
"Apache-1.1"
] | 10 | 2021-06-25T09:54:11.000Z | 2022-03-24T19:18:13.000Z | stockze/example_app/utils/robinhood_login.py | vendari12/django-ai-algotrade | f20d2691f08ec75f148cd6409b886b1dd6edac78 | [
"Apache-1.1"
] | 12 | 2021-06-24T22:45:11.000Z | 2022-03-31T17:17:41.000Z | stockze/example_app/utils/robinhood_login.py | vendari12/django-ai-algotrade | f20d2691f08ec75f148cd6409b886b1dd6edac78 | [
"Apache-1.1"
] | 2 | 2021-08-13T21:30:06.000Z | 2022-03-24T06:05:16.000Z | import time
import robin_stocks.robinhood as rh
from pyotp import TOTP as otp
from environ import Env
env = Env()
def robinhood_login():
totp = otp(env('RH_DEVICE_TOKEN')).now()
rh.authentication.login(
username=env('RH_USERNAME'),
password=env('RH_PASSWORD'),
mfa_code=totp
)
print('logged in to robinhood.')
time.sleep(float(env('RH_SLEEP')))
| 20.684211 | 44 | 0.669211 | import time
import robin_stocks.robinhood as rh
from pyotp import TOTP as otp
from environ import Env
env = Env()
def robinhood_login():
totp = otp(env('RH_DEVICE_TOKEN')).now()
rh.authentication.login(
username=env('RH_USERNAME'),
password=env('RH_PASSWORD'),
mfa_code=totp
)
print('logged in to robinhood.')
time.sleep(float(env('RH_SLEEP')))
| true | true |
f728fc01673c1c4f062e079427d221749f3040d6 | 11,111 | py | Python | src/attacks/attack.py | Kronemeyer/project-athena | 0e79cba1c4d30146326ce7bd311f69f2ee845e80 | [
"MIT"
] | 2 | 2020-10-01T08:27:13.000Z | 2020-10-01T20:23:04.000Z | src/attacks/attack.py | Kronemeyer/project-athena | 0e79cba1c4d30146326ce7bd311f69f2ee845e80 | [
"MIT"
] | 3 | 2020-09-29T13:57:24.000Z | 2020-10-01T20:26:03.000Z | src/attacks/attack.py | Kronemeyer/project-athena | 0e79cba1c4d30146326ce7bd311f69f2ee845e80 | [
"MIT"
] | 1 | 2020-12-04T07:56:32.000Z | 2020-12-04T07:56:32.000Z | """
Implement white-box attacks on top of IBM ART.
@author: Ying Meng (y(dot)meng201011(at)gmail(dot)com)
"""
import numpy as np
import torch
# from art.attacks.evasion.fast_gradient import FastGradientMethod
# from art.attacks.evasion.projected_gradient_descent import ProjectedGradientDescent
from art.attacks.evasion.carlini import CarliniL2Method, CarliniLInfMethod
from art.attacks.evasion.deepfool import DeepFool
from art.attacks.evasion.saliency_map import SaliencyMapMethod
from art.attacks.evasion.iterative_method import BasicIterativeMethod
from art.attacks.evasion.spatial_transformation import SpatialTransformation
from art.attacks.evasion.hop_skip_jump import HopSkipJump
from art.attacks.evasion.zoo import ZooAttack
from attacks.fast_gradient import FastGradientMethod
from attacks.pgd import ProjectedGradientDescent
from attacks.utils import WHITEBOX_ATTACK as ATTACK
def generate(model, data_loader, attack_args, device=None):
"""
Generate adversarial examples.
:param model: an instances of art.classifiers.classifier. The targeted model.
:param data_loader: a tuple of benign samples and corresponding true labels.
:param attack_args: dictionary. adversarial configurations.
:param device: string. cuda (for gpu) or cpu.
:return:
"""
attack = attack_args.get('attack').lower()
eot = attack_args.get('eot')
if eot and attack not in [ATTACK.FGSM.value, ATTACK.PGD.value]:
raise NotImplementedError("`EOT` is not supported for {} attack yet.".format(attack))
print(">>> Generating {}(EOT:{}) examples.".format(attack_args.get('description'),
"ON" if eot else "OFF"))
if device is None:
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
images, labels = data_loader
if attack == ATTACK.FGSM.value:
return _fgsm(model, images, labels, attack_args)
elif attack == ATTACK.CW.value:
return _cw(model, images, labels, attack_args)
elif attack == ATTACK.PGD.value:
return _pgd(model, images, labels, attack_args)
elif attack == ATTACK.BIM.value:
return _bim(model, images, labels, attack_args)
elif attack == ATTACK.JSMA.value:
return _jsma(model, images, labels, attack_args)
elif attack == ATTACK.DF.value:
return _df(model, images, labels, attack_args)
elif attack == ATTACK.MIM.value:
return _mim(model, images, labels, attack_args)
elif attack == ATTACK.OP.value:
return _op(model, images, labels, attack_args)
elif attack == ATTACK.HOP_SKIP_JUMP.value:
raise _hop_skip_jump(model, images, labels, attack_args)
elif attack == ATTACK.SPATIAL_TRANS.value:
return _spatial(model, images, labels, attack_args)
elif attack == ATTACK.ZOO.value:
return _zoo(model, images, labels, attack_args)
else:
raise ValueError('{} is not supported.'.format(attack))
def _fgsm(model, data, labels, attack_args):
"""
Fast Gradient Sign Method
Explaining and Harnessing Adversarial Examples
by Ian J. Goodfellow, Jonathon Shlens, Christian Szegedy
``https://arxiv.org/abs/1412.6572``
:param model:
:param data:
:param labels:
:param attack_args:
:param distribution: dictionary. the configurations of distribution (for EOT)
:return:
"""
eps = attack_args.get('eps', 0.3)
targeted = attack_args.get('targeted', False)
num_random_init = attack_args.get('num_random_init', 0)
minimal = attack_args.get('minimal', False)
if attack_args.get("eot"):
distribution = attack_args.get('distribution', None)
else:
distribution = None
attacker = FastGradientMethod(model, eps=eps, eps_step=eps, targeted=targeted,
num_random_init=num_random_init, minimal=minimal,
distribution=distribution)
return attacker.generate(data, labels)
def _cw(model, data, labels, attack_args):
"""
Carlini & Wanger
Towards Evaluating the Robustness of Neural Networks
by Nicholas Carlini, David Wagner
``https://arxiv.org/abs/1608.04644``
:param model:
:param data:
:param labels:
:param attack_args:
:return:
"""
norm = attack_args.get('norm').lower()
lr = attack_args.get('lr')
max_iter = attack_args.get('max_iter', 10)
# use default values for the following arguments
confidence = attack_args.get('confidence', 0.0)
targeted = attack_args.get('targeted', False)
init_const = attack_args.get('init_const', 0.01)
max_halving = attack_args.get('max_halving', 5)
max_doubling = attack_args.get('max_doubling', 5)
if norm == 'l2':
print('>>> Generating CW_l2 examples.')
binary_search_steps = attack_args.get('binary_search_steps', 10)
attacker = CarliniL2Method(classifier=model, confidence=confidence, targeted=targeted, learning_rate=lr,
binary_search_steps=binary_search_steps, max_iter=max_iter,
initial_const=init_const, max_halving=max_halving,
max_doubling=max_doubling)
elif norm == 'linf':
print('>>> Generating CW_linf examples.')
eps = attack_args.get('eps', 0.3)
attacker = CarliniLInfMethod(classifier=model, confidence=confidence, targeted=targeted, learning_rate=lr,
max_iter=max_iter, max_halving=max_halving, max_doubling=max_doubling, eps=eps)
else:
raise ValueError('Support `l2` and `linf` norms. But found {}'.format(norm))
return attacker.generate(data, labels)
def _pgd(model, data, labels, attack_args):
"""
Projected Gradient Descent
Towards deep learning models resistant to adversarial attacks
by Aleksander Madry, Aleksandar Makelov, Ludwig Schmidt, Dimitris Tsipras, and Adrian Vladu.
``https://arxiv.org/abs/1706.06083``
:param model:
:param data:
:param labels:
:param attack_args:
:return:
"""
eps = attack_args.get('eps', 0.3)
eps_step = attack_args.get('eps_step', eps/10.)
max_iter = attack_args.get('max_iter', 10)
norm = _get_norm_value(attack_args.get('norm', 'linf'))
targeted = attack_args.get('targeted', False)
num_random_init = attack_args.get('num_random_init', 0)
random_eps = attack_args.get('random_eps', False)
if attack_args.get("eot"):
distribution = attack_args.get('distribution', None)
else:
distribution = None
attacker = ProjectedGradientDescent(classifier=model, norm=norm, eps=eps, eps_step=eps_step,
max_iter=max_iter, targeted=targeted,
num_random_init=num_random_init, random_eps=random_eps,
distribution=distribution)
return attacker.generate(data, labels)
def _bim(model, data, labels, attack_args):
"""
Basic Iteractive Method
ADVERSARIAL EXAMPLES IN THE PHYSICAL WORLD
Alexey Kurakin, Ian J. Goodfellow, Samy Bengio
``https://arxiv.org/pdf/1607.02533.pdf``
:param model:
:param data:
:param labels:
:param attack_args:
:return:
"""
eps = attack_args.get('eps', 0.3)
eps_step = attack_args.get('eps_step', eps/10.)
max_iter = attack_args.get('max_iter', 100)
targeted = attack_args.get('targeted', False)
attacker = BasicIterativeMethod(classifier=model, eps=eps, eps_step=eps_step,
max_iter=max_iter, targeted=targeted)
return attacker.generate(data, labels)
def _jsma(model, data, labels, attack_args):
theta = attack_args.get('theta', 0.15)
gamma = attack_args.get('gamma', 0.5)
batch_size = attack_args.get('batch_size', 1)
attacker = SaliencyMapMethod(classifier=model, theta=theta, gamma=gamma, batch_size=batch_size)
return attacker.generate(data, labels)
def _df(model, data, labels, attack_args):
max_iter = attack_args.get('max_iter', 100)
eps = attack_args.get('eps', 0.01)
nb_grads = attack_args.get('nb_grads', 10)
attacker = DeepFool(classifier=model, max_iter=max_iter, epsilon=eps, nb_grads=nb_grads)
return attacker.generate(data, labels)
def _mim(model, data, labels, attack_args):
raise NotImplementedError
def _op(model, data, labels, attack_args):
raise NotImplementedError
def _spatial(model, data, labels, attack_args):
max_translation = attack_args.get('max_translation', 0.2)
num_translations = attack_args.get('num_translations', 1)
max_rotation = attack_args.get('max_rotation', 10)
num_rotations = attack_args.get('num_rotations', 1)
attacker = SpatialTransformation(classifier=model,
max_translation=max_translation, num_translations=num_translations,
max_rotation=max_rotation, num_rotations=num_rotations)
return attacker.generate(data, labels)
def _hop_skip_jump(model, data, labels, attack_args):
norm = _get_norm_value(attack_args.get('norm', 'l2'))
max_iter = attack_args.get('max_iter', 50)
max_eval = attack_args.get('max_eval', 10000)
init_eval = attack_args.get('init_eval', 100)
init_size = attack_args.get('init_size', 100)
targeted = attack_args.get('targeted', False)
attacker = HopSkipJump(classifier=model, targeted=targeted, norm=norm,
max_iter=max_iter, max_eval=max_eval,
init_eval=init_eval, init_size=init_size)
return attacker.generate(data, labels)
def _zoo(model, data, labels, attack_args):
lr = attack_args.get('learning_rate', 0.01)
max_iter = attack_args.get('max_iter', 10)
binary_search_steps = attack_args.get('binary_search_steps', 1)
confidence = attack_args.get('confidence', 0.0)
targeted = attack_args.get('targeted', False)
init_const = attack_args.get('init_const', 1e-3)
abort_early = attack_args.get('abort_early', True)
use_resize = attack_args.get('use_resize', True)
use_importance = attack_args.get('use_importance', True)
nb_parallel = attack_args.get('nb_parallel', 128)
variable_h = attack_args.get('variable_h', 1e-4)
attacker = ZooAttack(classifier=model, confidence=confidence, targeted=targeted,
learning_rate=lr, max_iter=max_iter, binary_search_steps=binary_search_steps,
initial_const=init_const, abort_early=abort_early, use_resize=use_resize,
use_importance=use_importance, nb_parallel=nb_parallel, variable_h=variable_h)
return attacker.generate(data, labels)
def _get_norm_value(norm):
"""
Convert a string norm to a numeric value.
:param norm:
:return:
"""
norm = norm.lower()
if norm == 'linf':
value = np.inf
elif norm == 'l2':
value = 2
else:
raise ValueError('Support `l2` and `linf` norms. But found {}.'.format(norm))
return value
| 37.792517 | 116 | 0.676177 |
import numpy as np
import torch
from art.attacks.evasion.carlini import CarliniL2Method, CarliniLInfMethod
from art.attacks.evasion.deepfool import DeepFool
from art.attacks.evasion.saliency_map import SaliencyMapMethod
from art.attacks.evasion.iterative_method import BasicIterativeMethod
from art.attacks.evasion.spatial_transformation import SpatialTransformation
from art.attacks.evasion.hop_skip_jump import HopSkipJump
from art.attacks.evasion.zoo import ZooAttack
from attacks.fast_gradient import FastGradientMethod
from attacks.pgd import ProjectedGradientDescent
from attacks.utils import WHITEBOX_ATTACK as ATTACK
def generate(model, data_loader, attack_args, device=None):
attack = attack_args.get('attack').lower()
eot = attack_args.get('eot')
if eot and attack not in [ATTACK.FGSM.value, ATTACK.PGD.value]:
raise NotImplementedError("`EOT` is not supported for {} attack yet.".format(attack))
print(">>> Generating {}(EOT:{}) examples.".format(attack_args.get('description'),
"ON" if eot else "OFF"))
if device is None:
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
images, labels = data_loader
if attack == ATTACK.FGSM.value:
return _fgsm(model, images, labels, attack_args)
elif attack == ATTACK.CW.value:
return _cw(model, images, labels, attack_args)
elif attack == ATTACK.PGD.value:
return _pgd(model, images, labels, attack_args)
elif attack == ATTACK.BIM.value:
return _bim(model, images, labels, attack_args)
elif attack == ATTACK.JSMA.value:
return _jsma(model, images, labels, attack_args)
elif attack == ATTACK.DF.value:
return _df(model, images, labels, attack_args)
elif attack == ATTACK.MIM.value:
return _mim(model, images, labels, attack_args)
elif attack == ATTACK.OP.value:
return _op(model, images, labels, attack_args)
elif attack == ATTACK.HOP_SKIP_JUMP.value:
raise _hop_skip_jump(model, images, labels, attack_args)
elif attack == ATTACK.SPATIAL_TRANS.value:
return _spatial(model, images, labels, attack_args)
elif attack == ATTACK.ZOO.value:
return _zoo(model, images, labels, attack_args)
else:
raise ValueError('{} is not supported.'.format(attack))
def _fgsm(model, data, labels, attack_args):
eps = attack_args.get('eps', 0.3)
targeted = attack_args.get('targeted', False)
num_random_init = attack_args.get('num_random_init', 0)
minimal = attack_args.get('minimal', False)
if attack_args.get("eot"):
distribution = attack_args.get('distribution', None)
else:
distribution = None
attacker = FastGradientMethod(model, eps=eps, eps_step=eps, targeted=targeted,
num_random_init=num_random_init, minimal=minimal,
distribution=distribution)
return attacker.generate(data, labels)
def _cw(model, data, labels, attack_args):
norm = attack_args.get('norm').lower()
lr = attack_args.get('lr')
max_iter = attack_args.get('max_iter', 10)
confidence = attack_args.get('confidence', 0.0)
targeted = attack_args.get('targeted', False)
init_const = attack_args.get('init_const', 0.01)
max_halving = attack_args.get('max_halving', 5)
max_doubling = attack_args.get('max_doubling', 5)
if norm == 'l2':
print('>>> Generating CW_l2 examples.')
binary_search_steps = attack_args.get('binary_search_steps', 10)
attacker = CarliniL2Method(classifier=model, confidence=confidence, targeted=targeted, learning_rate=lr,
binary_search_steps=binary_search_steps, max_iter=max_iter,
initial_const=init_const, max_halving=max_halving,
max_doubling=max_doubling)
elif norm == 'linf':
print('>>> Generating CW_linf examples.')
eps = attack_args.get('eps', 0.3)
attacker = CarliniLInfMethod(classifier=model, confidence=confidence, targeted=targeted, learning_rate=lr,
max_iter=max_iter, max_halving=max_halving, max_doubling=max_doubling, eps=eps)
else:
raise ValueError('Support `l2` and `linf` norms. But found {}'.format(norm))
return attacker.generate(data, labels)
def _pgd(model, data, labels, attack_args):
eps = attack_args.get('eps', 0.3)
eps_step = attack_args.get('eps_step', eps/10.)
max_iter = attack_args.get('max_iter', 10)
norm = _get_norm_value(attack_args.get('norm', 'linf'))
targeted = attack_args.get('targeted', False)
num_random_init = attack_args.get('num_random_init', 0)
random_eps = attack_args.get('random_eps', False)
if attack_args.get("eot"):
distribution = attack_args.get('distribution', None)
else:
distribution = None
attacker = ProjectedGradientDescent(classifier=model, norm=norm, eps=eps, eps_step=eps_step,
max_iter=max_iter, targeted=targeted,
num_random_init=num_random_init, random_eps=random_eps,
distribution=distribution)
return attacker.generate(data, labels)
def _bim(model, data, labels, attack_args):
eps = attack_args.get('eps', 0.3)
eps_step = attack_args.get('eps_step', eps/10.)
max_iter = attack_args.get('max_iter', 100)
targeted = attack_args.get('targeted', False)
attacker = BasicIterativeMethod(classifier=model, eps=eps, eps_step=eps_step,
max_iter=max_iter, targeted=targeted)
return attacker.generate(data, labels)
def _jsma(model, data, labels, attack_args):
theta = attack_args.get('theta', 0.15)
gamma = attack_args.get('gamma', 0.5)
batch_size = attack_args.get('batch_size', 1)
attacker = SaliencyMapMethod(classifier=model, theta=theta, gamma=gamma, batch_size=batch_size)
return attacker.generate(data, labels)
def _df(model, data, labels, attack_args):
max_iter = attack_args.get('max_iter', 100)
eps = attack_args.get('eps', 0.01)
nb_grads = attack_args.get('nb_grads', 10)
attacker = DeepFool(classifier=model, max_iter=max_iter, epsilon=eps, nb_grads=nb_grads)
return attacker.generate(data, labels)
def _mim(model, data, labels, attack_args):
raise NotImplementedError
def _op(model, data, labels, attack_args):
raise NotImplementedError
def _spatial(model, data, labels, attack_args):
max_translation = attack_args.get('max_translation', 0.2)
num_translations = attack_args.get('num_translations', 1)
max_rotation = attack_args.get('max_rotation', 10)
num_rotations = attack_args.get('num_rotations', 1)
attacker = SpatialTransformation(classifier=model,
max_translation=max_translation, num_translations=num_translations,
max_rotation=max_rotation, num_rotations=num_rotations)
return attacker.generate(data, labels)
def _hop_skip_jump(model, data, labels, attack_args):
norm = _get_norm_value(attack_args.get('norm', 'l2'))
max_iter = attack_args.get('max_iter', 50)
max_eval = attack_args.get('max_eval', 10000)
init_eval = attack_args.get('init_eval', 100)
init_size = attack_args.get('init_size', 100)
targeted = attack_args.get('targeted', False)
attacker = HopSkipJump(classifier=model, targeted=targeted, norm=norm,
max_iter=max_iter, max_eval=max_eval,
init_eval=init_eval, init_size=init_size)
return attacker.generate(data, labels)
def _zoo(model, data, labels, attack_args):
lr = attack_args.get('learning_rate', 0.01)
max_iter = attack_args.get('max_iter', 10)
binary_search_steps = attack_args.get('binary_search_steps', 1)
confidence = attack_args.get('confidence', 0.0)
targeted = attack_args.get('targeted', False)
init_const = attack_args.get('init_const', 1e-3)
abort_early = attack_args.get('abort_early', True)
use_resize = attack_args.get('use_resize', True)
use_importance = attack_args.get('use_importance', True)
nb_parallel = attack_args.get('nb_parallel', 128)
variable_h = attack_args.get('variable_h', 1e-4)
attacker = ZooAttack(classifier=model, confidence=confidence, targeted=targeted,
learning_rate=lr, max_iter=max_iter, binary_search_steps=binary_search_steps,
initial_const=init_const, abort_early=abort_early, use_resize=use_resize,
use_importance=use_importance, nb_parallel=nb_parallel, variable_h=variable_h)
return attacker.generate(data, labels)
def _get_norm_value(norm):
norm = norm.lower()
if norm == 'linf':
value = np.inf
elif norm == 'l2':
value = 2
else:
raise ValueError('Support `l2` and `linf` norms. But found {}.'.format(norm))
return value
| true | true |
f728fcac6ccca36a457c58b9703f275e34dc84ed | 2,607 | py | Python | polus-cell-nuclei-segmentation/src/dsb2018_topcoders/selim/losses.py | nishaq503/polus-plugins-dl | 511689e82eb29a84761538144277d1be1af7aa44 | [
"MIT"
] | null | null | null | polus-cell-nuclei-segmentation/src/dsb2018_topcoders/selim/losses.py | nishaq503/polus-plugins-dl | 511689e82eb29a84761538144277d1be1af7aa44 | [
"MIT"
] | 1 | 2021-09-09T23:22:16.000Z | 2021-09-09T23:22:16.000Z | polus-cell-nuclei-segmentation/src/dsb2018_topcoders/selim/losses.py | nishaq503/polus-plugins-dl | 511689e82eb29a84761538144277d1be1af7aa44 | [
"MIT"
] | 4 | 2021-06-22T13:54:52.000Z | 2022-01-26T19:23:39.000Z | import keras.backend as K
from keras.losses import categorical_crossentropy
def hard_dice_coef(y_true, y_pred, smooth=1e-3):
y_true_f = K.flatten(K.round(y_true[..., 0]))
y_pred_f = K.flatten(K.round(y_pred[..., 0]))
intersection = K.sum(y_true_f * y_pred_f)
return 100. * (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
def hard_dice_coef_ch1(y_true, y_pred, smooth=1e-3):
y_true_f = K.flatten(K.round(y_true[..., 1]))
y_pred_f = K.flatten(K.round(y_pred[..., 1]))
intersection = K.sum(y_true_f * y_pred_f)
return 100. * (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
def dice_coef(y_true, y_pred, smooth=1e-3):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return K.mean((2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth))
def dice_coef_loss(y_true, y_pred):
return 1 - dice_coef(y_true, y_pred)
def dice_coef_loss_bce(y_true, y_pred, dice=0.5, bce=0.5):
return binary_crossentropy(y_true, y_pred) * bce + dice_coef_loss(y_true, y_pred) * dice
def binary_crossentropy(y, p):
return K.mean(K.binary_crossentropy(y, p))
def double_head_loss(y_true, y_pred):
mask_loss = dice_coef_loss_bce(y_true[..., 0], y_pred[..., 0])
contour_loss = dice_coef_loss_bce(y_true[..., 1], y_pred[..., 1])
return mask_loss + contour_loss
def mask_contour_mask_loss(y_true, y_pred):
mask_loss = dice_coef_loss_bce(y_true[..., 0], y_pred[..., 0])
contour_loss = dice_coef_loss_bce(y_true[..., 1], y_pred[..., 1])
full_mask = dice_coef_loss_bce(y_true[..., 2], y_pred[..., 2])
return mask_loss + 2 * contour_loss + full_mask
def softmax_dice_loss(y_true, y_pred):
return categorical_crossentropy(y_true, y_pred) * 0.6 + dice_coef_loss(y_true[..., 0], y_pred[..., 0]) * 0.2 + dice_coef_loss(y_true[..., 1], y_pred[..., 1]) * 0.2
def make_loss(loss_name):
if loss_name == 'bce_dice':
def loss(y, p):
return dice_coef_loss_bce(y, p, dice=0.5, bce=0.5)
return loss
elif loss_name == 'bce':
def loss(y, p):
return dice_coef_loss_bce(y, p, dice=0, bce=1)
return loss
elif loss_name == 'categorical_dice':
return softmax_dice_loss
elif loss_name == 'double_head_loss':
return double_head_loss
elif loss_name == 'mask_contour_mask_loss':
return mask_contour_mask_loss
else:
ValueError("Unknown loss.")
| 35.22973 | 168 | 0.642501 | import keras.backend as K
from keras.losses import categorical_crossentropy
def hard_dice_coef(y_true, y_pred, smooth=1e-3):
y_true_f = K.flatten(K.round(y_true[..., 0]))
y_pred_f = K.flatten(K.round(y_pred[..., 0]))
intersection = K.sum(y_true_f * y_pred_f)
return 100. * (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
def hard_dice_coef_ch1(y_true, y_pred, smooth=1e-3):
y_true_f = K.flatten(K.round(y_true[..., 1]))
y_pred_f = K.flatten(K.round(y_pred[..., 1]))
intersection = K.sum(y_true_f * y_pred_f)
return 100. * (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
def dice_coef(y_true, y_pred, smooth=1e-3):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return K.mean((2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth))
def dice_coef_loss(y_true, y_pred):
return 1 - dice_coef(y_true, y_pred)
def dice_coef_loss_bce(y_true, y_pred, dice=0.5, bce=0.5):
return binary_crossentropy(y_true, y_pred) * bce + dice_coef_loss(y_true, y_pred) * dice
def binary_crossentropy(y, p):
return K.mean(K.binary_crossentropy(y, p))
def double_head_loss(y_true, y_pred):
mask_loss = dice_coef_loss_bce(y_true[..., 0], y_pred[..., 0])
contour_loss = dice_coef_loss_bce(y_true[..., 1], y_pred[..., 1])
return mask_loss + contour_loss
def mask_contour_mask_loss(y_true, y_pred):
mask_loss = dice_coef_loss_bce(y_true[..., 0], y_pred[..., 0])
contour_loss = dice_coef_loss_bce(y_true[..., 1], y_pred[..., 1])
full_mask = dice_coef_loss_bce(y_true[..., 2], y_pred[..., 2])
return mask_loss + 2 * contour_loss + full_mask
def softmax_dice_loss(y_true, y_pred):
return categorical_crossentropy(y_true, y_pred) * 0.6 + dice_coef_loss(y_true[..., 0], y_pred[..., 0]) * 0.2 + dice_coef_loss(y_true[..., 1], y_pred[..., 1]) * 0.2
def make_loss(loss_name):
if loss_name == 'bce_dice':
def loss(y, p):
return dice_coef_loss_bce(y, p, dice=0.5, bce=0.5)
return loss
elif loss_name == 'bce':
def loss(y, p):
return dice_coef_loss_bce(y, p, dice=0, bce=1)
return loss
elif loss_name == 'categorical_dice':
return softmax_dice_loss
elif loss_name == 'double_head_loss':
return double_head_loss
elif loss_name == 'mask_contour_mask_loss':
return mask_contour_mask_loss
else:
ValueError("Unknown loss.")
| true | true |
f728fd4af10f998ca802829dd4c857af4852e38b | 3,108 | py | Python | tests/components/samsungtv/test_init.py | mikan-megane/core | 837220cce40890e296920d33a623adbc11bd15a6 | [
"Apache-2.0"
] | 5 | 2018-10-23T14:15:05.000Z | 2021-11-26T06:38:44.000Z | tests/components/samsungtv/test_init.py | mikan-megane/core | 837220cce40890e296920d33a623adbc11bd15a6 | [
"Apache-2.0"
] | 79 | 2020-07-23T07:13:37.000Z | 2022-03-22T06:02:37.000Z | tests/components/samsungtv/test_init.py | mikan-megane/core | 837220cce40890e296920d33a623adbc11bd15a6 | [
"Apache-2.0"
] | 2 | 2019-06-11T12:13:14.000Z | 2020-12-24T23:17:53.000Z | """Tests for the Samsung TV Integration."""
from unittest.mock import Mock, call, patch
from homeassistant.components.media_player.const import DOMAIN, SUPPORT_TURN_ON
from homeassistant.components.samsungtv.const import (
CONF_ON_ACTION,
DOMAIN as SAMSUNGTV_DOMAIN,
METHOD_WEBSOCKET,
)
from homeassistant.components.samsungtv.media_player import SUPPORT_SAMSUNGTV
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_HOST,
CONF_METHOD,
CONF_NAME,
SERVICE_VOLUME_UP,
)
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
ENTITY_ID = f"{DOMAIN}.fake_name"
MOCK_CONFIG = {
SAMSUNGTV_DOMAIN: [
{
CONF_HOST: "fake_host",
CONF_NAME: "fake_name",
CONF_ON_ACTION: [{"delay": "00:00:01"}],
CONF_METHOD: METHOD_WEBSOCKET,
}
]
}
REMOTE_CALL = {
"name": "HomeAssistant",
"description": "HomeAssistant",
"id": "ha.component.samsung",
"host": MOCK_CONFIG[SAMSUNGTV_DOMAIN][0][CONF_HOST],
"method": "legacy",
"port": None,
"timeout": 1,
}
async def test_setup(hass: HomeAssistant, remote: Mock):
"""Test Samsung TV integration is setup."""
with patch("homeassistant.components.samsungtv.bridge.Remote") as remote, patch(
"homeassistant.components.samsungtv.config_flow.socket.gethostbyname",
return_value="fake_host",
):
with patch("homeassistant.components.samsungtv.bridge.Remote") as remote:
await async_setup_component(hass, SAMSUNGTV_DOMAIN, MOCK_CONFIG)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
# test name and turn_on
assert state
assert state.name == "fake_name"
assert (
state.attributes[ATTR_SUPPORTED_FEATURES]
== SUPPORT_SAMSUNGTV | SUPPORT_TURN_ON
)
# test host and port
assert await hass.services.async_call(
DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True
)
assert remote.call_args == call(REMOTE_CALL)
async def test_setup_duplicate_config(hass: HomeAssistant, remote: Mock, caplog):
"""Test duplicate setup of platform."""
DUPLICATE = {
SAMSUNGTV_DOMAIN: [
MOCK_CONFIG[SAMSUNGTV_DOMAIN][0],
MOCK_CONFIG[SAMSUNGTV_DOMAIN][0],
]
}
await async_setup_component(hass, SAMSUNGTV_DOMAIN, DUPLICATE)
await hass.async_block_till_done()
assert hass.states.get(ENTITY_ID) is None
assert len(hass.states.async_all()) == 0
assert "duplicate host entries found" in caplog.text
async def test_setup_duplicate_entries(hass: HomeAssistant, remote: Mock, caplog):
"""Test duplicate setup of platform."""
await async_setup_component(hass, SAMSUNGTV_DOMAIN, MOCK_CONFIG)
await hass.async_block_till_done()
assert hass.states.get(ENTITY_ID)
assert len(hass.states.async_all()) == 1
await async_setup_component(hass, SAMSUNGTV_DOMAIN, MOCK_CONFIG)
assert len(hass.states.async_all()) == 1
| 33.419355 | 84 | 0.69112 | from unittest.mock import Mock, call, patch
from homeassistant.components.media_player.const import DOMAIN, SUPPORT_TURN_ON
from homeassistant.components.samsungtv.const import (
CONF_ON_ACTION,
DOMAIN as SAMSUNGTV_DOMAIN,
METHOD_WEBSOCKET,
)
from homeassistant.components.samsungtv.media_player import SUPPORT_SAMSUNGTV
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_HOST,
CONF_METHOD,
CONF_NAME,
SERVICE_VOLUME_UP,
)
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
ENTITY_ID = f"{DOMAIN}.fake_name"
MOCK_CONFIG = {
SAMSUNGTV_DOMAIN: [
{
CONF_HOST: "fake_host",
CONF_NAME: "fake_name",
CONF_ON_ACTION: [{"delay": "00:00:01"}],
CONF_METHOD: METHOD_WEBSOCKET,
}
]
}
REMOTE_CALL = {
"name": "HomeAssistant",
"description": "HomeAssistant",
"id": "ha.component.samsung",
"host": MOCK_CONFIG[SAMSUNGTV_DOMAIN][0][CONF_HOST],
"method": "legacy",
"port": None,
"timeout": 1,
}
async def test_setup(hass: HomeAssistant, remote: Mock):
with patch("homeassistant.components.samsungtv.bridge.Remote") as remote, patch(
"homeassistant.components.samsungtv.config_flow.socket.gethostbyname",
return_value="fake_host",
):
with patch("homeassistant.components.samsungtv.bridge.Remote") as remote:
await async_setup_component(hass, SAMSUNGTV_DOMAIN, MOCK_CONFIG)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state
assert state.name == "fake_name"
assert (
state.attributes[ATTR_SUPPORTED_FEATURES]
== SUPPORT_SAMSUNGTV | SUPPORT_TURN_ON
)
assert await hass.services.async_call(
DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True
)
assert remote.call_args == call(REMOTE_CALL)
async def test_setup_duplicate_config(hass: HomeAssistant, remote: Mock, caplog):
DUPLICATE = {
SAMSUNGTV_DOMAIN: [
MOCK_CONFIG[SAMSUNGTV_DOMAIN][0],
MOCK_CONFIG[SAMSUNGTV_DOMAIN][0],
]
}
await async_setup_component(hass, SAMSUNGTV_DOMAIN, DUPLICATE)
await hass.async_block_till_done()
assert hass.states.get(ENTITY_ID) is None
assert len(hass.states.async_all()) == 0
assert "duplicate host entries found" in caplog.text
async def test_setup_duplicate_entries(hass: HomeAssistant, remote: Mock, caplog):
await async_setup_component(hass, SAMSUNGTV_DOMAIN, MOCK_CONFIG)
await hass.async_block_till_done()
assert hass.states.get(ENTITY_ID)
assert len(hass.states.async_all()) == 1
await async_setup_component(hass, SAMSUNGTV_DOMAIN, MOCK_CONFIG)
assert len(hass.states.async_all()) == 1
| true | true |
f728fdc393576c7d300b95276d9f3b1aeee7cd65 | 16,567 | py | Python | toolium/driver_wrapper.py | Telefonica/toolium | 3921cf94164ae1a2cd27d94197f0b145f2498541 | [
"Apache-2.0"
] | 94 | 2016-02-15T11:32:36.000Z | 2022-02-14T12:31:42.000Z | toolium/driver_wrapper.py | Telefonica/toolium | 3921cf94164ae1a2cd27d94197f0b145f2498541 | [
"Apache-2.0"
] | 225 | 2016-03-18T16:14:21.000Z | 2022-03-30T10:21:26.000Z | toolium/driver_wrapper.py | Telefonica/toolium | 3921cf94164ae1a2cd27d94197f0b145f2498541 | [
"Apache-2.0"
] | 65 | 2016-05-12T13:23:56.000Z | 2022-02-16T08:33:18.000Z | # -*- coding: utf-8 -*-
u"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging.config
import os
import screeninfo
from toolium.config_driver import ConfigDriver
from toolium.config_parser import ExtendedConfigParser
from toolium.driver_wrappers_pool import DriverWrappersPool
from toolium.utils.driver_utils import Utils
from toolium.utils.path_utils import get_valid_filename
class DriverWrapper(object):
"""Wrapper with the webdriver and the configuration needed to execute tests
:type driver: selenium.webdriver.remote.webdriver.WebDriver or appium.webdriver.webdriver.WebDriver
:type config: toolium.config_parser.ExtendedConfigParser or configparser.ConfigParser
:type utils: toolium.utils.driver_utils.Utils
:type app_strings: dict
:type session_id: str
:type remote_node: str
:type remote_node_video_enabled: bool
:type logger: logging.Logger
:type config_properties_filenames: str
:type config_log_filename: str
:type output_log_filename: str
:type visual_baseline_directory: str
:type baseline_name: str
"""
driver = None #: webdriver instance
config = ExtendedConfigParser() #: driver configuration
utils = None #: test utils instance
app_strings = None #: mobile application strings
session_id = None #: remote webdriver session id
server_type = None #: remote server type
remote_node = None #: remote grid node
remote_node_video_enabled = False #: True if the remote grid node has the video recorder enabled
logger = None #: logger instance
# Configuration and output files
config_properties_filenames = None #: configuration filenames separated by commas
config_log_filename = None #: configuration log file
output_log_filename = None #: output log file
visual_baseline_directory = None #: folder with the baseline images
baseline_name = None #: baseline name
def __init__(self):
if not DriverWrappersPool.is_empty():
# Copy config object and other properties from default driver
default_wrapper = DriverWrappersPool.get_default_wrapper()
self.config = default_wrapper.config.deepcopy()
self.logger = default_wrapper.logger
self.config_properties_filenames = default_wrapper.config_properties_filenames
self.config_log_filename = default_wrapper.config_log_filename
self.output_log_filename = default_wrapper.output_log_filename
self.visual_baseline_directory = default_wrapper.visual_baseline_directory
self.baseline_name = default_wrapper.baseline_name
# Create utils instance and add wrapper to the pool
self.utils = Utils(self)
DriverWrappersPool.add_wrapper(self)
def configure_logger(self, tc_config_log_filename=None, tc_output_log_filename=None):
"""Configure selenium instance logger
:param tc_config_log_filename: test case specific logging config file
:param tc_output_log_filename: test case specific output logger file
"""
# Get config logger filename
config_log_filename = DriverWrappersPool.get_configured_value('Config_log_filename', tc_config_log_filename,
'logging.conf')
config_log_filename = os.path.join(DriverWrappersPool.config_directory, config_log_filename)
# Configure logger only if logging filename has changed
if self.config_log_filename != config_log_filename:
# Get output logger filename
output_log_filename = DriverWrappersPool.get_configured_value('Output_log_filename', tc_output_log_filename,
'toolium.log')
output_log_filename = os.path.join(DriverWrappersPool.output_directory, output_log_filename)
output_log_filename = output_log_filename.replace('\\', '\\\\')
try:
logging.config.fileConfig(config_log_filename, {'logfilename': output_log_filename}, False)
except Exception as exc:
print("[WARN] Error reading logging config file '{}': {}".format(config_log_filename, exc))
self.config_log_filename = config_log_filename
self.output_log_filename = output_log_filename
self.logger = logging.getLogger(__name__)
def configure_properties(self, tc_config_prop_filenames=None, behave_properties=None):
"""Configure selenium instance properties
:param tc_config_prop_filenames: test case specific properties filenames
:param behave_properties: dict with behave user data properties
"""
prop_filenames = DriverWrappersPool.get_configured_value('Config_prop_filenames', tc_config_prop_filenames,
'properties.cfg;local-properties.cfg')
prop_filenames = [os.path.join(DriverWrappersPool.config_directory, filename) for filename in
prop_filenames.split(';')]
prop_filenames = ';'.join(prop_filenames)
# Configure config only if properties filename has changed
if self.config_properties_filenames != prop_filenames:
# Initialize the config object
self.config = ExtendedConfigParser.get_config_from_file(prop_filenames)
self.config_properties_filenames = prop_filenames
# Override properties with system properties
self.config.update_properties(os.environ)
# Override properties with behave userdata properties
if behave_properties:
self.config.update_properties(behave_properties)
# Modify config properties before driver creation
self.finalize_properties_configuration()
def finalize_properties_configuration(self):
# Override method if config properties (self.config object) need custom modifications before driver creation
pass
def configure_visual_baseline(self):
"""Configure baseline directory"""
# Get baseline name and translate config variables
baseline_name = self.config.get_optional('VisualTests', 'baseline_name', '{Driver_type}')
baseline_name = self.config.translate_config_variables(baseline_name)
# Configure baseline directory if baseline name has changed
if self.baseline_name != baseline_name:
self.baseline_name = baseline_name
self.visual_baseline_directory = os.path.join(DriverWrappersPool.visual_baseline_directory,
get_valid_filename(baseline_name))
def update_visual_baseline(self):
"""Configure baseline directory after driver is created"""
# Update baseline with real platformVersion value
if '{PlatformVersion}' in self.baseline_name:
try:
platform_version = self.driver.desired_capabilities['platformVersion']
except KeyError:
platform_version = None
self.baseline_name = self.baseline_name.replace('{PlatformVersion}', str(platform_version))
self.visual_baseline_directory = os.path.join(DriverWrappersPool.visual_baseline_directory,
self.baseline_name)
# Update baseline with real version value
if '{Version}' in self.baseline_name:
try:
splitted_version = self.driver.desired_capabilities['version'].split('.')
version = '.'.join(splitted_version[:2])
except KeyError:
version = None
self.baseline_name = self.baseline_name.replace('{Version}', str(version))
self.visual_baseline_directory = os.path.join(DriverWrappersPool.visual_baseline_directory,
self.baseline_name)
# Update baseline with remote node value
if '{RemoteNode}' in self.baseline_name:
self.baseline_name = self.baseline_name.replace('{RemoteNode}', str(self.remote_node))
self.visual_baseline_directory = os.path.join(DriverWrappersPool.visual_baseline_directory,
self.baseline_name)
def configure(self, tc_config_files, is_selenium_test=True, behave_properties=None):
"""Configure initial selenium instance using logging and properties files for Selenium or Appium tests
:param tc_config_files: test case specific config files
:param is_selenium_test: true if test is a selenium or appium test case
:param behave_properties: dict with behave user data properties
"""
# Configure config and output directories
DriverWrappersPool.configure_common_directories(tc_config_files)
# Configure logger
self.configure_logger(tc_config_files.config_log_filename, tc_config_files.output_log_filename)
# Initialize the config object
self.configure_properties(tc_config_files.config_properties_filenames, behave_properties)
# Configure visual directories
if is_selenium_test:
driver_info = self.config.get('Driver', 'type')
DriverWrappersPool.configure_visual_directories(driver_info)
self.configure_visual_baseline()
def connect(self, maximize=True):
"""Set up the selenium driver and connect to the server
:param maximize: True if the driver should be maximized
:returns: selenium driver
"""
if not self.config.get('Driver', 'type') or self.config.get('Driver', 'type') in ['api', 'no_driver']:
return None
self.driver = ConfigDriver(self.config, self.utils).create_driver()
# Save session id and remote node to download video after the test execution
self.session_id = self.driver.session_id
self.server_type, self.remote_node = self.utils.get_remote_node()
if self.server_type == 'grid':
self.remote_node_video_enabled = self.utils.is_remote_video_enabled(self.remote_node)
else:
self.remote_node_video_enabled = True if self.server_type in ['ggr', 'selenoid'] else False
# Save app_strings in mobile tests
if self.is_mobile_test() and not self.is_web_test() and self.config.getboolean_optional('Driver',
'appium_app_strings'):
self.app_strings = self.driver.app_strings()
if self.is_maximizable():
# Bounds and screen
bounds_x, bounds_y = self.get_config_window_bounds()
self.driver.set_window_position(bounds_x, bounds_y)
self.logger.debug('Window bounds: %s x %s', bounds_x, bounds_y)
# Maximize browser
if maximize:
# Set window size or maximize
window_width = self.config.get_optional('Driver', 'window_width')
window_height = self.config.get_optional('Driver', 'window_height')
if window_width and window_height:
self.driver.set_window_size(window_width, window_height)
else:
self.driver.maximize_window()
# Log window size
window_size = self.utils.get_window_size()
self.logger.debug('Window size: %s x %s', window_size['width'], window_size['height'])
# Update baseline
self.update_visual_baseline()
# Discard previous logcat logs
self.utils.discard_logcat_logs()
# Set implicitly wait timeout
self.utils.set_implicitly_wait()
return self.driver
def get_config_window_bounds(self):
"""Reads bounds from config and, if monitor is specified, modify the values to match with the specified monitor
:return: coords X and Y where set the browser window.
"""
bounds_x = int(self.config.get_optional('Driver', 'bounds_x') or 0)
bounds_y = int(self.config.get_optional('Driver', 'bounds_y') or 0)
monitor_index = int(self.config.get_optional('Driver', 'monitor') or -1)
if monitor_index > -1:
try:
monitor = screeninfo.get_monitors()[monitor_index]
bounds_x += monitor.x
bounds_y += monitor.y
except NotImplementedError:
self.logger.warning('Current environment doesn\'t support get_monitors')
return bounds_x, bounds_y
def is_android_test(self):
"""Check if actual test must be executed in an Android mobile
:returns: True if test must be executed in an Android mobile
"""
return self.utils.get_driver_name() == 'android'
def is_ios_test(self):
"""Check if actual test must be executed in an iOS mobile
:returns: True if test must be executed in an iOS mobile
"""
return self.utils.get_driver_name() in ('ios', 'iphone')
def is_mobile_test(self):
"""Check if actual test must be executed in a mobile
:returns: True if test must be executed in a mobile
"""
return self.is_android_test() or self.is_ios_test()
def is_web_test(self):
"""Check if actual test must be executed in a browser
:returns: True if test must be executed in a browser
"""
appium_browser_name = self.config.get_optional('AppiumCapabilities', 'browserName')
return not self.is_mobile_test() or appium_browser_name not in (None, '')
def is_android_web_test(self):
"""Check if actual test must be executed in a browser of an Android mobile
:returns: True if test must be executed in a browser of an Android mobile
"""
return self.is_android_test() and self.is_web_test()
def is_ios_web_test(self):
"""Check if actual test must be executed in a browser of an iOS mobile
:returns: True if test must be executed in a browser of an iOS mobile
"""
return self.is_ios_test() and self.is_web_test()
def is_maximizable(self):
"""Check if the browser is maximizable
:returns: True if the browser is maximizable
"""
return not self.is_mobile_test()
def should_reuse_driver(self, scope, test_passed, context=None):
"""Check if the driver should be reused
:param scope: execution scope (function, module, class or session)
:param test_passed: True if the test has passed
:param context: behave context
:returns: True if the driver should be reused
"""
reuse_driver = self.config.getboolean_optional('Driver', 'reuse_driver')
reuse_driver_session = self.config.getboolean_optional('Driver', 'reuse_driver_session')
restart_driver_after_failure = (self.config.getboolean_optional('Driver', 'restart_driver_after_failure') or
self.config.getboolean_optional('Driver', 'restart_driver_fail'))
if context and scope == 'function':
reuse_driver = reuse_driver or (hasattr(context, 'reuse_driver_from_tags')
and context.reuse_driver_from_tags)
return (((reuse_driver and scope == 'function') or (reuse_driver_session and scope != 'session'))
and (test_passed or not restart_driver_after_failure))
def get_driver_platform(self):
"""
Get driver platform where tests are running
:return: platform name
"""
platform = ''
if 'platform' in self.driver.desired_capabilities:
platform = self.driver.desired_capabilities['platform']
elif 'platformName' in self.driver.desired_capabilities:
platform = self.driver.desired_capabilities['platformName']
return platform
| 46.536517 | 120 | 0.669584 |
import logging.config
import os
import screeninfo
from toolium.config_driver import ConfigDriver
from toolium.config_parser import ExtendedConfigParser
from toolium.driver_wrappers_pool import DriverWrappersPool
from toolium.utils.driver_utils import Utils
from toolium.utils.path_utils import get_valid_filename
class DriverWrapper(object):
driver = None
config = ExtendedConfigParser()
utils = None
app_strings = None
session_id = None
server_type = None
remote_node = None
remote_node_video_enabled = False
logger = None
config_properties_filenames = None
config_log_filename = None
output_log_filename = None
visual_baseline_directory = None
baseline_name = None
def __init__(self):
if not DriverWrappersPool.is_empty():
default_wrapper = DriverWrappersPool.get_default_wrapper()
self.config = default_wrapper.config.deepcopy()
self.logger = default_wrapper.logger
self.config_properties_filenames = default_wrapper.config_properties_filenames
self.config_log_filename = default_wrapper.config_log_filename
self.output_log_filename = default_wrapper.output_log_filename
self.visual_baseline_directory = default_wrapper.visual_baseline_directory
self.baseline_name = default_wrapper.baseline_name
self.utils = Utils(self)
DriverWrappersPool.add_wrapper(self)
def configure_logger(self, tc_config_log_filename=None, tc_output_log_filename=None):
config_log_filename = DriverWrappersPool.get_configured_value('Config_log_filename', tc_config_log_filename,
'logging.conf')
config_log_filename = os.path.join(DriverWrappersPool.config_directory, config_log_filename)
if self.config_log_filename != config_log_filename:
output_log_filename = DriverWrappersPool.get_configured_value('Output_log_filename', tc_output_log_filename,
'toolium.log')
output_log_filename = os.path.join(DriverWrappersPool.output_directory, output_log_filename)
output_log_filename = output_log_filename.replace('\\', '\\\\')
try:
logging.config.fileConfig(config_log_filename, {'logfilename': output_log_filename}, False)
except Exception as exc:
print("[WARN] Error reading logging config file '{}': {}".format(config_log_filename, exc))
self.config_log_filename = config_log_filename
self.output_log_filename = output_log_filename
self.logger = logging.getLogger(__name__)
def configure_properties(self, tc_config_prop_filenames=None, behave_properties=None):
prop_filenames = DriverWrappersPool.get_configured_value('Config_prop_filenames', tc_config_prop_filenames,
'properties.cfg;local-properties.cfg')
prop_filenames = [os.path.join(DriverWrappersPool.config_directory, filename) for filename in
prop_filenames.split(';')]
prop_filenames = ';'.join(prop_filenames)
if self.config_properties_filenames != prop_filenames:
self.config = ExtendedConfigParser.get_config_from_file(prop_filenames)
self.config_properties_filenames = prop_filenames
self.config.update_properties(os.environ)
if behave_properties:
self.config.update_properties(behave_properties)
self.finalize_properties_configuration()
def finalize_properties_configuration(self):
pass
def configure_visual_baseline(self):
baseline_name = self.config.get_optional('VisualTests', 'baseline_name', '{Driver_type}')
baseline_name = self.config.translate_config_variables(baseline_name)
if self.baseline_name != baseline_name:
self.baseline_name = baseline_name
self.visual_baseline_directory = os.path.join(DriverWrappersPool.visual_baseline_directory,
get_valid_filename(baseline_name))
def update_visual_baseline(self):
if '{PlatformVersion}' in self.baseline_name:
try:
platform_version = self.driver.desired_capabilities['platformVersion']
except KeyError:
platform_version = None
self.baseline_name = self.baseline_name.replace('{PlatformVersion}', str(platform_version))
self.visual_baseline_directory = os.path.join(DriverWrappersPool.visual_baseline_directory,
self.baseline_name)
if '{Version}' in self.baseline_name:
try:
splitted_version = self.driver.desired_capabilities['version'].split('.')
version = '.'.join(splitted_version[:2])
except KeyError:
version = None
self.baseline_name = self.baseline_name.replace('{Version}', str(version))
self.visual_baseline_directory = os.path.join(DriverWrappersPool.visual_baseline_directory,
self.baseline_name)
if '{RemoteNode}' in self.baseline_name:
self.baseline_name = self.baseline_name.replace('{RemoteNode}', str(self.remote_node))
self.visual_baseline_directory = os.path.join(DriverWrappersPool.visual_baseline_directory,
self.baseline_name)
def configure(self, tc_config_files, is_selenium_test=True, behave_properties=None):
DriverWrappersPool.configure_common_directories(tc_config_files)
self.configure_logger(tc_config_files.config_log_filename, tc_config_files.output_log_filename)
self.configure_properties(tc_config_files.config_properties_filenames, behave_properties)
if is_selenium_test:
driver_info = self.config.get('Driver', 'type')
DriverWrappersPool.configure_visual_directories(driver_info)
self.configure_visual_baseline()
def connect(self, maximize=True):
if not self.config.get('Driver', 'type') or self.config.get('Driver', 'type') in ['api', 'no_driver']:
return None
self.driver = ConfigDriver(self.config, self.utils).create_driver()
self.session_id = self.driver.session_id
self.server_type, self.remote_node = self.utils.get_remote_node()
if self.server_type == 'grid':
self.remote_node_video_enabled = self.utils.is_remote_video_enabled(self.remote_node)
else:
self.remote_node_video_enabled = True if self.server_type in ['ggr', 'selenoid'] else False
if self.is_mobile_test() and not self.is_web_test() and self.config.getboolean_optional('Driver',
'appium_app_strings'):
self.app_strings = self.driver.app_strings()
if self.is_maximizable():
bounds_x, bounds_y = self.get_config_window_bounds()
self.driver.set_window_position(bounds_x, bounds_y)
self.logger.debug('Window bounds: %s x %s', bounds_x, bounds_y)
if maximize:
window_width = self.config.get_optional('Driver', 'window_width')
window_height = self.config.get_optional('Driver', 'window_height')
if window_width and window_height:
self.driver.set_window_size(window_width, window_height)
else:
self.driver.maximize_window()
window_size = self.utils.get_window_size()
self.logger.debug('Window size: %s x %s', window_size['width'], window_size['height'])
self.update_visual_baseline()
self.utils.discard_logcat_logs()
self.utils.set_implicitly_wait()
return self.driver
def get_config_window_bounds(self):
bounds_x = int(self.config.get_optional('Driver', 'bounds_x') or 0)
bounds_y = int(self.config.get_optional('Driver', 'bounds_y') or 0)
monitor_index = int(self.config.get_optional('Driver', 'monitor') or -1)
if monitor_index > -1:
try:
monitor = screeninfo.get_monitors()[monitor_index]
bounds_x += monitor.x
bounds_y += monitor.y
except NotImplementedError:
self.logger.warning('Current environment doesn\'t support get_monitors')
return bounds_x, bounds_y
def is_android_test(self):
return self.utils.get_driver_name() == 'android'
def is_ios_test(self):
return self.utils.get_driver_name() in ('ios', 'iphone')
def is_mobile_test(self):
return self.is_android_test() or self.is_ios_test()
def is_web_test(self):
appium_browser_name = self.config.get_optional('AppiumCapabilities', 'browserName')
return not self.is_mobile_test() or appium_browser_name not in (None, '')
def is_android_web_test(self):
return self.is_android_test() and self.is_web_test()
def is_ios_web_test(self):
return self.is_ios_test() and self.is_web_test()
def is_maximizable(self):
return not self.is_mobile_test()
def should_reuse_driver(self, scope, test_passed, context=None):
reuse_driver = self.config.getboolean_optional('Driver', 'reuse_driver')
reuse_driver_session = self.config.getboolean_optional('Driver', 'reuse_driver_session')
restart_driver_after_failure = (self.config.getboolean_optional('Driver', 'restart_driver_after_failure') or
self.config.getboolean_optional('Driver', 'restart_driver_fail'))
if context and scope == 'function':
reuse_driver = reuse_driver or (hasattr(context, 'reuse_driver_from_tags')
and context.reuse_driver_from_tags)
return (((reuse_driver and scope == 'function') or (reuse_driver_session and scope != 'session'))
and (test_passed or not restart_driver_after_failure))
def get_driver_platform(self):
platform = ''
if 'platform' in self.driver.desired_capabilities:
platform = self.driver.desired_capabilities['platform']
elif 'platformName' in self.driver.desired_capabilities:
platform = self.driver.desired_capabilities['platformName']
return platform
| true | true |
f72900502ab88493c523bb3da0c4a24be5629b00 | 6,081 | py | Python | sdk/python/pulumi_azure_nextgen/compute/latest/ssh_public_key.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_nextgen/compute/latest/ssh_public_key.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_nextgen/compute/latest/ssh_public_key.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['SshPublicKey']
class SshPublicKey(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
location: Optional[pulumi.Input[str]] = None,
public_key: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
ssh_public_key_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Specifies information about the SSH public key.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] location: Resource location
:param pulumi.Input[str] public_key: SSH public key used to authenticate to a virtual machine through ssh. If this property is not initially provided when the resource is created, the publicKey property will be populated when generateKeyPair is called. If the public key is provided upon resource creation, the provided public key needs to be at least 2048-bit and in ssh-rsa format.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] ssh_public_key_name: The name of the SSH public key.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if location is None:
raise TypeError("Missing required property 'location'")
__props__['location'] = location
__props__['public_key'] = public_key
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if ssh_public_key_name is None:
raise TypeError("Missing required property 'ssh_public_key_name'")
__props__['ssh_public_key_name'] = ssh_public_key_name
__props__['tags'] = tags
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:compute/v20191201:SshPublicKey"), pulumi.Alias(type_="azure-nextgen:compute/v20200601:SshPublicKey")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(SshPublicKey, __self__).__init__(
'azure-nextgen:compute/latest:SshPublicKey',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'SshPublicKey':
"""
Get an existing SshPublicKey resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return SshPublicKey(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="publicKey")
def public_key(self) -> pulumi.Output[Optional[str]]:
"""
SSH public key used to authenticate to a virtual machine through ssh. If this property is not initially provided when the resource is created, the publicKey property will be populated when generateKeyPair is called. If the public key is provided upon resource creation, the provided public key needs to be at least 2048-bit and in ssh-rsa format.
"""
return pulumi.get(self, "public_key")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 43.748201 | 391 | 0.650222 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['SshPublicKey']
class SshPublicKey(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
location: Optional[pulumi.Input[str]] = None,
public_key: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
ssh_public_key_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None,
__name__=None,
__opts__=None):
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if location is None:
raise TypeError("Missing required property 'location'")
__props__['location'] = location
__props__['public_key'] = public_key
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if ssh_public_key_name is None:
raise TypeError("Missing required property 'ssh_public_key_name'")
__props__['ssh_public_key_name'] = ssh_public_key_name
__props__['tags'] = tags
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:compute/v20191201:SshPublicKey"), pulumi.Alias(type_="azure-nextgen:compute/v20200601:SshPublicKey")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(SshPublicKey, __self__).__init__(
'azure-nextgen:compute/latest:SshPublicKey',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'SshPublicKey':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return SshPublicKey(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="publicKey")
def public_key(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "public_key")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| true | true |
f729008890b8db3dc09657c2381c73eda7e9edb9 | 20,265 | py | Python | ns-allinone-3.35/bake/bake/ModuleEnvironment.py | usi-systems/cc | 487aa9e322b2b01b6af3a92e38545c119e4980a3 | [
"Apache-2.0"
] | null | null | null | ns-allinone-3.35/bake/bake/ModuleEnvironment.py | usi-systems/cc | 487aa9e322b2b01b6af3a92e38545c119e4980a3 | [
"Apache-2.0"
] | null | null | null | ns-allinone-3.35/bake/bake/ModuleEnvironment.py | usi-systems/cc | 487aa9e322b2b01b6af3a92e38545c119e4980a3 | [
"Apache-2.0"
] | 1 | 2019-10-23T15:15:27.000Z | 2019-10-23T15:15:27.000Z | ###############################################################################
# Copyright (c) 2013 INRIA
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation;
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Authors: Daniel Camara <daniel.camara@inria.fr>
# Mathieu Lacage <mathieu.lacage@sophia.inria.fr>
###############################################################################
'''
ModuleEnvironment.py
This file stores the class Module Environment responsible for the interaction
between Bake and the execution of third party softwares and the operating
system.
'''
import os
import subprocess
import sys
import platform
from bake.Exceptions import TaskError
from bake.Utils import ColorTool
class ModuleEnvironment:
''' Main class to interact with the host system to execute the external
tools.
'''
_stopOnError = False
_libpaths = set([])
_binpaths = set([])
_pkgpaths = set([])
_variables = set([])
(HIGHER, LOWER, EQUAL) = range(0,3)
def __init__(self, logger, installdir, sourcedir, objdir, debug=False):
''' Internal variables initialization.'''
self._logger = logger
self._installdir = installdir
self._sourcedir = sourcedir
self._objdir = objdir
self._module_name = None
self._module_dir = None
self._module_supports_objdir = None
# self._libpaths = set([])
# self._binpaths = set([])
# self._pkgpaths = set([])
# self._variables = set([])
self._debug = debug
self._sudoEnabled = False
def _module_directory(self):
''' Returns the name of the directory of the on use module.'''
if not self._module_dir :
return self._module_name
return self._module_dir
@property
def installdir(self):
''' Returns the name of the set installation directory.'''
return self._installdir
@property
def debug(self):
''' Returns if this execution was set to show the debug messages or not.'''
return self._debug
@property
def srcdir(self):
''' Returns the directory where Bake stores the source of the present
module.
'''
try:
return os.path.join(self._sourcedir, self._module_directory())
except AttributeError as e:
raise TaskError('Missing configuration: sourcedir= %s, '
'module_directory= %s, Error: %s'
% (self._sourcedir,self._module_directory(), e))
@property
def srcrepo(self):
''' The root of the source repository, where all the sources for all
the modules will be stored.
'''
return self._sourcedir
@property
def objdir(self):
''' Returns the directory where Bake stores the object code of the
present module.
'''
if not self._module_supports_objdir:
obj = self.srcdir
else:
try:
obj = os.path.join(self.srcdir, self._objdir)
except AttributeError as e:
raise TaskError('Missing configuration: sourcedir= %s, '
'objdir= %s, Error: %s'
% (self._sourcedir, self._module_directory(), e))
return obj
@property
def sudoEnabled(self):
''' Returns the setting of the --sudo option'''
return self._sudoEnabled
@property
def stopOnErrorEnabled(self):
''' Returns the setting of the --stop_on_error option'''
return ModuleEnvironment._stopOnError
def _pkgconfig_var(self):
''' Returns the PKG_CONFIG_PATH configured environment variable.'''
return 'PKG_CONFIG_PATH'
def _pkgconfig_path(self):
''' Returns the PKG_CONFIG_PATH configured path. '''
return os.path.join(self._lib_path(), 'pkgconfig')
def _lib_var(self):
''' Returns the value of the system configured library path.'''
lib_var = {'Linux' : 'LD_LIBRARY_PATH',
'FreeBSD' : 'LD_LIBRARY_PATH',
'Darwin' : 'DYLD_LIBRARY_PATH',
'Windows' : 'PATH'}
if not platform.system() in lib_var:
sys.stderr('Error: Unsupported platform. Send email to '
'bake_support@inria.fr (%s)' % platform.system())
sys.exit(1)
return lib_var[platform.system()]
def _lib_path(self):
''' Returns the value of the library path for the in-use module.'''
return os.path.join(self._installdir, 'lib')
def _bin_var(self):
return 'PATH'
def _bin_path(self):
''' Returns the value of the binary path for the in-use module.'''
return os.path.join(self._installdir, 'bin')
def _py_var(self):
return 'PYTHONPATH'
def _py_path(self):
''' Returns the value of the python path for the in-use module.'''
return os.path.join(self._installdir, 'lib',
'python'+platform.python_version_tuple()[0]+
'.'+platform.python_version_tuple()[1],
'site-packages')
def _append_path(self, d, name, value, sep):
''' Append the variable to the system in use configuration. '''
if not name in d:
d[name] = value
else:
d[name] = d[name] + sep + value
def start_source(self, name, dir):
''' Sets the environment to be used by the given source module.'''
assert self._module_supports_objdir is None
self._module_name = name
self._module_dir = dir
self._logger.set_current_module(name)
# ensure source directory exists
if not os.path.isdir(self._sourcedir):
os.makedirs(self._sourcedir)
def end_source(self):
''' Cleans the environment regarding the informations of the last used
source module.
'''
self._module_name = None
self._module_dir = None
self._logger.clear_current_module()
def start_build(self, name, dir, supports_objdir):
''' Sets the environment to be used by the given build module.'''
# assert self._module_supports_objdir is None
self._module_name = name
self._module_dir = dir
self._module_supports_objdir = supports_objdir
self._logger.set_current_module(name)
if not os.path.isdir(self.installdir):
os.makedirs(self.installdir)
if not os.path.isdir(self.objdir):
os.makedirs(self.objdir)
def end_build(self):
''' Cleans the environment regarding the informations of the last used
build module.
'''
self._module_name = None
self._module_dir = None
self._module_supports_objdir = None
self._logger.clear_current_module()
def exist_file(self, file):
''' Finds if the file exists in the path.'''
return os.path.exists(file)
def path_list(self):
''' Return path that will be searched for executables '''
pythonpath=[]
if os.environ.get('PYTHONPATH'):
pythonpath=os.environ.get('PYTHONPATH').split(os.pathsep)
return os.environ.get('PATH').split(os.pathsep) + [self._bin_path()] + pythonpath
def _program_location(self, program):
''' Finds where the executable is located in the user's path.'''
# function to verify if the program exists on the given path
# and if it is executable
def is_exe(path):
return os.path.exists(path) and os.access(path, os.X_OK)
path, name = os.path.split(program)
# if the path for the executable was passed as part of its name
if path:
if is_exe(program):
return program
else:
# for all the directories in the path search for the executable
for path in self.path_list():
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
toFindIn=None
# search for libs with that name on the library path
index=program.find(".so") + program.find(".a")
if index>0 :
toFindIn=['/usr/lib','/usr/lib64','/usr/lib32','/usr/local/lib',
'/lib','/opt/local/lib','/opt/local/Library', '/usr/local/opt']
for libpath in self._libpaths:
toFindIn.append(libpath)
stdLibs = []
try:
libPath = os.environ[self._lib_var()]
if libPath:
stdLibs=libPath.split(os.pathsep)
except:
pass
tofindIn=toFindIn+stdLibs+[self._lib_path()]
elif program.endswith(".h"):
toFindIn=['/usr/include', '/usr/local/include', '/usr/lib','/opt/local/include', '/usr/local/opt']
if toFindIn :
for eachdir in toFindIn:
if sys.platform == "darwin":
# enable symlink walking for MacOS only (bug 2975)
for dirname, dirnames, filenames in os.walk(eachdir, True, None, True):
for filename in filenames:
if filename==name:
return os.path.join(dirname, filename)
else:
for dirname, dirnames, filenames in os.walk(eachdir):
for filename in filenames:
if filename==name:
return os.path.join(dirname, filename)
return None
def _check_version(self, found, required, match_type):
''' Checks the version of the required executable.'''
smallerSize=min(len(found),len(required))
if match_type == self.HIGHER:
for i in range(0,smallerSize):
if not found[i]:
return False
if int(found[i]) < int(required[i]):
return False
elif int(found[i]) > int(required[i]):
return True
return True
elif match_type == self.LOWER:
for i in range(0,smallerSize):
if not found[i]:
return True
if int(found[i]) > int(required[i]):
return False
elif int(found[i]) < int(required[i]):
return True
if len(found) >= len(required):
return False
return True
elif match_type == self.EQUAL:
if len(found) != len(required):
return False
for i in range(0,smallerSize):
if int(found[i]) != int(required[i]):
return False
return True
else:
assert False
def add_libpaths(self, libpaths):
''' Adds the list of paths to the in-use library path environment
variable.
'''
for element in libpaths :
self._libpaths.add(self.replace_variables(element))
def add_binpaths(self, libpaths):
''' Adds the list of paths to the in-use binary path environment
variable.
'''
for element in libpaths :
self._binpaths.add(self.replace_variables(element))
def add_pkgpaths(self, libpaths):
''' Adds the list of paths to the in-use package path environment
variable.
'''
for element in libpaths :
self._pkgpaths.add(self.replace_variables(element))
def add_variables(self, libpaths):
''' Adds/replace the list of variables to the in-use set of environment
variables.
'''
for element in libpaths :
self._variables.add(self.replace_variables(element))
def create_environment_file(self, fileName):
''' Creates the set environment file to help users to call the Bake
built modules.
'''
script = "#!/bin/bash \n#### \n# Environment setting script. Automatically generated by Bake\n####\n\n"
script = script + "if [ \"${BASH_SOURCE:-}\" == \"${0}\" ]; then \n" + \
" echo \"> Call with . bakeSetEnv.sh or source bakeSetEnv.sh\" \n" + \
" exit 1 \n" + \
"fi \n\n"
self._binpaths.add(self._bin_path())
if os.path.isdir(self._lib_path()):
self._libpaths.add(self._lib_path())
if os.path.isdir(self._lib_path()+'64'):
self._libpaths.add(self._lib_path()+'64')
if len(self._libpaths) > 0:
script = script + self.add_onPath("LD_LIBRARY_PATH", self._libpaths) + "\n"
if len(self._binpaths) > 0:
script = script + self.add_onPath("PATH", self._binpaths) + "\n"
if len(self._pkgpaths) > 0:
script = script + self.add_onPath("PKG_CONFIG_PATH", self._pkgpaths) + "\n"
from distutils.sysconfig import get_python_lib
localLibPath=''
libDir=get_python_lib()
if libDir:
begin=libDir.lower().index('python')
localLibPath=os.path.join(self._lib_path(),libDir[begin:])
script = script + self.add_onPath("PYTHONPATH", [sys.path[0],self._lib_path(),localLibPath]) + "\n"
for element in self._variables:
script = script + " export " + element + "\n"
fout = open(fileName, "w")
fout.write(script)
fout.close()
os.chmod(fileName, 0o755)
return script
def add_onPath (self, variableName, vectorPath):
''' Format the variable to be added on the system.
'''
returnString = " export " + variableName + "=\"${" + variableName + ":+${" + variableName + "}:}"
for element in vectorPath:
returnString = returnString + element + ":"
# Strip extra ':'
returnString = returnString[:-1]
returnString = returnString + "\""
return returnString
def replace_variables(self, string):
''' Replace the variables on the string, if they exist, by their
system real values.
'''
import re
tmp = string
tmp = re.sub('\$INSTALLDIR', self.installdir, tmp)
tmp = re.sub('\$OBJDIR', self.objdir, tmp)
tmp = re.sub('\$SRCDIR', self.srcdir, tmp)
return tmp
def check_program(self, program, version_arg = None,
version_regexp = None, version_required = None,
match_type=HIGHER):
'''Checks if the program, with the desired version, exists in the
system.
'''
if self._program_location(program) is None:
return False
if version_arg is None and version_regexp is None and version_required is None:
return True
else:
# This assert as it was avoided the checking of the version of the
# executable assert not (version_arg is None or version_regexp is
# None or version_required is None)
assert not (version_arg is None and version_regexp is None and version_required is None)
popen = subprocess.Popen([self._program_location(program),
version_arg],
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT)
(out, err) = popen.communicate('')
import re
reg = re.compile(version_regexp)
for line in out.splitlines():
m = reg.search(line)
if not m is None:
found = m.groups()
return self._check_version(found, version_required, match_type)
def append_to_path(self, env_vars):
"""Sets the library and binary paths."""
for libpath in self._libpaths:
self._append_path(env_vars, self._lib_var(), libpath, os.pathsep)
if self.debug:
print(" -> " + self._lib_var() + " " + libpath + " ")
self._append_path(env_vars, self._lib_var(), self._lib_path(), os.pathsep)
for libpath in self._binpaths:
self._append_path(env_vars, self._bin_var(), libpath, os.pathsep)
if self.debug:
print(" -> " + self._bin_var() + " " + libpath + " ")
self._append_path(env_vars, self._bin_var(), self._bin_path(), os.pathsep)
for libpath in self._pkgpaths:
self._append_path(env_vars, self._pkgconfig_var(), libpath, os.pathsep)
if self.debug:
print(" -> " + self._pkgconfig_var() + " " + libpath + " ")
self._append_path(env_vars, self._pkgconfig_var(), self._pkgconfig_path(), os.pathsep)
self._append_path(env_vars, self._py_var(), self._py_path(), os.pathsep)
self._append_path(env_vars, self._py_var(), os.path.join(self._installdir, 'lib'), os.pathsep)
return env_vars
def run(self, args, directory = None, env = dict(), interactive = False):
'''Executes a system program adding the libraries and over the correct
directories.
'''
if not interactive:
env_string = ''
if len(env) != 0:
env_string = ' '.join([a + '=' + b for a,b in env.items()])
try:
args_string = ' '.join(args)
except TypeError as e:
raise TaskError('Wrong argument type: %s, expected string,'
' error: %s' % (str(args), e))
self._logger.commands.write(env_string + ' ' + args_string +
' dir=' + str(directory) + '\n')
stdin = None
stdout = self._logger.stdout
stderr = self._logger.stderr
else:
stdin = sys.stdin
stdout = sys.stdout
stderr = sys.stderr
tmp = dict(list(os.environ.items()) + list(env.items()))
# sets the library and binary paths
tmp = self.append_to_path(tmp)
# Calls the third party executable with the whole context
try:
popen = subprocess.Popen(args,
stdin = stdin,
stdout = stdout,
stderr = stderr,
cwd = directory,
env = tmp)
except Exception as e:
raise TaskError('could not execute: %s %s. \nUnexpected error: %s'
% (str(directory), str(args), str(e)))
# Waits for the full execution of the third party software
retcode = popen.wait()
if retcode != 0:
raise TaskError('Subprocess failed with error %d: %s' % (retcode, str(args)))
| 37.320442 | 116 | 0.540785 | Libs+[self._lib_path()]
elif program.endswith(".h"):
toFindIn=['/usr/include', '/usr/local/include', '/usr/lib','/opt/local/include', '/usr/local/opt']
if toFindIn :
for eachdir in toFindIn:
if sys.platform == "darwin":
for dirname, dirnames, filenames in os.walk(eachdir, True, None, True):
for filename in filenames:
if filename==name:
return os.path.join(dirname, filename)
else:
for dirname, dirnames, filenames in os.walk(eachdir):
for filename in filenames:
if filename==name:
return os.path.join(dirname, filename)
return None
def _check_version(self, found, required, match_type):
smallerSize=min(len(found),len(required))
if match_type == self.HIGHER:
for i in range(0,smallerSize):
if not found[i]:
return False
if int(found[i]) < int(required[i]):
return False
elif int(found[i]) > int(required[i]):
return True
return True
elif match_type == self.LOWER:
for i in range(0,smallerSize):
if not found[i]:
return True
if int(found[i]) > int(required[i]):
return False
elif int(found[i]) < int(required[i]):
return True
if len(found) >= len(required):
return False
return True
elif match_type == self.EQUAL:
if len(found) != len(required):
return False
for i in range(0,smallerSize):
if int(found[i]) != int(required[i]):
return False
return True
else:
assert False
def add_libpaths(self, libpaths):
for element in libpaths :
self._libpaths.add(self.replace_variables(element))
def add_binpaths(self, libpaths):
for element in libpaths :
self._binpaths.add(self.replace_variables(element))
def add_pkgpaths(self, libpaths):
for element in libpaths :
self._pkgpaths.add(self.replace_variables(element))
def add_variables(self, libpaths):
for element in libpaths :
self._variables.add(self.replace_variables(element))
def create_environment_file(self, fileName):
script = "#!/bin/bash \n#### \n# Environment setting script. Automatically generated by Bake\n####\n\n"
script = script + "if [ \"${BASH_SOURCE:-}\" == \"${0}\" ]; then \n" + \
" echo \"> Call with . bakeSetEnv.sh or source bakeSetEnv.sh\" \n" + \
" exit 1 \n" + \
"fi \n\n"
self._binpaths.add(self._bin_path())
if os.path.isdir(self._lib_path()):
self._libpaths.add(self._lib_path())
if os.path.isdir(self._lib_path()+'64'):
self._libpaths.add(self._lib_path()+'64')
if len(self._libpaths) > 0:
script = script + self.add_onPath("LD_LIBRARY_PATH", self._libpaths) + "\n"
if len(self._binpaths) > 0:
script = script + self.add_onPath("PATH", self._binpaths) + "\n"
if len(self._pkgpaths) > 0:
script = script + self.add_onPath("PKG_CONFIG_PATH", self._pkgpaths) + "\n"
from distutils.sysconfig import get_python_lib
localLibPath=''
libDir=get_python_lib()
if libDir:
begin=libDir.lower().index('python')
localLibPath=os.path.join(self._lib_path(),libDir[begin:])
script = script + self.add_onPath("PYTHONPATH", [sys.path[0],self._lib_path(),localLibPath]) + "\n"
for element in self._variables:
script = script + " export " + element + "\n"
fout = open(fileName, "w")
fout.write(script)
fout.close()
os.chmod(fileName, 0o755)
return script
def add_onPath (self, variableName, vectorPath):
returnString = " export " + variableName + "=\"${" + variableName + ":+${" + variableName + "}:}"
for element in vectorPath:
returnString = returnString + element + ":"
# Strip extra ':'
returnString = returnString[:-1]
returnString = returnString + "\""
return returnString
def replace_variables(self, string):
import re
tmp = string
tmp = re.sub('\$INSTALLDIR', self.installdir, tmp)
tmp = re.sub('\$OBJDIR', self.objdir, tmp)
tmp = re.sub('\$SRCDIR', self.srcdir, tmp)
return tmp
def check_program(self, program, version_arg = None,
version_regexp = None, version_required = None,
match_type=HIGHER):
if self._program_location(program) is None:
return False
if version_arg is None and version_regexp is None and version_required is None:
return True
else:
assert not (version_arg is None and version_regexp is None and version_required is None)
popen = subprocess.Popen([self._program_location(program),
version_arg],
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT)
(out, err) = popen.communicate('')
import re
reg = re.compile(version_regexp)
for line in out.splitlines():
m = reg.search(line)
if not m is None:
found = m.groups()
return self._check_version(found, version_required, match_type)
def append_to_path(self, env_vars):
for libpath in self._libpaths:
self._append_path(env_vars, self._lib_var(), libpath, os.pathsep)
if self.debug:
print(" -> " + self._lib_var() + " " + libpath + " ")
self._append_path(env_vars, self._lib_var(), self._lib_path(), os.pathsep)
for libpath in self._binpaths:
self._append_path(env_vars, self._bin_var(), libpath, os.pathsep)
if self.debug:
print(" -> " + self._bin_var() + " " + libpath + " ")
self._append_path(env_vars, self._bin_var(), self._bin_path(), os.pathsep)
for libpath in self._pkgpaths:
self._append_path(env_vars, self._pkgconfig_var(), libpath, os.pathsep)
if self.debug:
print(" -> " + self._pkgconfig_var() + " " + libpath + " ")
self._append_path(env_vars, self._pkgconfig_var(), self._pkgconfig_path(), os.pathsep)
self._append_path(env_vars, self._py_var(), self._py_path(), os.pathsep)
self._append_path(env_vars, self._py_var(), os.path.join(self._installdir, 'lib'), os.pathsep)
return env_vars
def run(self, args, directory = None, env = dict(), interactive = False):
if not interactive:
env_string = ''
if len(env) != 0:
env_string = ' '.join([a + '=' + b for a,b in env.items()])
try:
args_string = ' '.join(args)
except TypeError as e:
raise TaskError('Wrong argument type: %s, expected string,'
' error: %s' % (str(args), e))
self._logger.commands.write(env_string + ' ' + args_string +
' dir=' + str(directory) + '\n')
stdin = None
stdout = self._logger.stdout
stderr = self._logger.stderr
else:
stdin = sys.stdin
stdout = sys.stdout
stderr = sys.stderr
tmp = dict(list(os.environ.items()) + list(env.items()))
tmp = self.append_to_path(tmp)
try:
popen = subprocess.Popen(args,
stdin = stdin,
stdout = stdout,
stderr = stderr,
cwd = directory,
env = tmp)
except Exception as e:
raise TaskError('could not execute: %s %s. \nUnexpected error: %s'
% (str(directory), str(args), str(e)))
retcode = popen.wait()
if retcode != 0:
raise TaskError('Subprocess failed with error %d: %s' % (retcode, str(args)))
| true | true |
f729009d866d7d8781b0cb23a905d6c759bb4150 | 1,307 | py | Python | runtests.py | solarmonkey/django-taggit-serializer | ed33bd44e0b5b903af9cb32c8b10236c7cd3a925 | [
"BSD-3-Clause"
] | 203 | 2015-02-23T19:29:46.000Z | 2022-02-23T09:27:59.000Z | runtests.py | adriangzz/dj-taggit-serializer | 88386daa9c981231de8be12cff032e7711dfdc4e | [
"BSD-3-Clause"
] | 50 | 2015-03-18T01:37:49.000Z | 2022-01-31T20:42:28.000Z | runtests.py | adriangzz/dj-taggit-serializer | 88386daa9c981231de8be12cff032e7711dfdc4e | [
"BSD-3-Clause"
] | 54 | 2015-03-08T17:23:54.000Z | 2022-03-19T11:34:25.000Z | import sys
try:
from django.conf import settings
settings.configure(
DEBUG=True,
USE_TZ=True,
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
ROOT_URLCONF="taggit_serializer.urls",
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"rest_framework",
"taggit",
'nose',
"django_nose",
"taggit_serializer",
"tests",
],
SITE_ID=1,
NOSE_ARGS=['-s'],
MIDDLEWARE_CLASSES=[],
)
try:
import django
setup = django.setup
except AttributeError:
pass
else:
setup()
from django_nose import NoseTestSuiteRunner
except ImportError:
import traceback
traceback.print_exc()
raise ImportError("To fix this error, run: pip install -r requirements-test.txt")
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=0)
failures = test_runner.run_tests(test_args)
if failures:
sys.exit(failures)
if __name__ == '__main__':
run_tests(*sys.argv[1:]) | 21.783333 | 85 | 0.560061 | import sys
try:
from django.conf import settings
settings.configure(
DEBUG=True,
USE_TZ=True,
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
ROOT_URLCONF="taggit_serializer.urls",
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"rest_framework",
"taggit",
'nose',
"django_nose",
"taggit_serializer",
"tests",
],
SITE_ID=1,
NOSE_ARGS=['-s'],
MIDDLEWARE_CLASSES=[],
)
try:
import django
setup = django.setup
except AttributeError:
pass
else:
setup()
from django_nose import NoseTestSuiteRunner
except ImportError:
import traceback
traceback.print_exc()
raise ImportError("To fix this error, run: pip install -r requirements-test.txt")
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
test_runner = NoseTestSuiteRunner(verbosity=0)
failures = test_runner.run_tests(test_args)
if failures:
sys.exit(failures)
if __name__ == '__main__':
run_tests(*sys.argv[1:]) | true | true |
f72901b0081120b8e5043dcc98793760f0fa0a8f | 862 | py | Python | translation.py | Mahmud8421/TGraph-Bot | c2228074206f49ec65b2c2ea7f146fd756be7427 | [
"MIT"
] | null | null | null | translation.py | Mahmud8421/TGraph-Bot | c2228074206f49ec65b2c2ea7f146fd756be7427 | [
"MIT"
] | null | null | null | translation.py | Mahmud8421/TGraph-Bot | c2228074206f49ec65b2c2ea7f146fd756be7427 | [
"MIT"
] | null | null | null | class Translation(object):
START_TEXT = """<b>Hai {}!!!</b>
<code>Im Simple Telegraph Uploader Bot😜</code>
<b>» You Must Join My Updates Channel for using me</b>
<i>Click Help Button For More Details</i>"""
HELP_TEXT = """<b>Hey {}!!!,Please Folow these Steps</b>
➥ <code>Send any Image, Gif or Video(Mp4 Only) below 5MB.</code>
➥ <code>Bot will send you the Telegra.ph link.</code>
<b>Available Commands</b>
/start - <b>Checking Bot Online</b>
/help - <b>How To Use Me</b>
/about - <b>More About Me</b>"""
ABOUT_TEXT = """**➥ My Name** : ```Telegraph Bot```
**➥ Creator** : <a href=https://t.me/D_ar_k_Angel>Dark Angel</a>
**➥ Credits** : ```Everyone in this journey```
**➥ Language** : ```Python3```
**➥ Library** : <a href=https://docs.pyrogram.org>Pyrogram asyncio 0.18.0</a>
**➥ Server** : ```Heroku```
**➥ Build Status** : ```V01.1 [BETA]```""" | 45.368421 | 77 | 0.61949 | class Translation(object):
START_TEXT = """<b>Hai {}!!!</b>
<code>Im Simple Telegraph Uploader Bot😜</code>
<b>» You Must Join My Updates Channel for using me</b>
<i>Click Help Button For More Details</i>"""
HELP_TEXT = """<b>Hey {}!!!,Please Folow these Steps</b>
➥ <code>Send any Image, Gif or Video(Mp4 Only) below 5MB.</code>
➥ <code>Bot will send you the Telegra.ph link.</code>
<b>Available Commands</b>
/start - <b>Checking Bot Online</b>
/help - <b>How To Use Me</b>
/about - <b>More About Me</b>"""
ABOUT_TEXT = """**➥ My Name** : ```Telegraph Bot```
**➥ Creator** : <a href=https://t.me/D_ar_k_Angel>Dark Angel</a>
**➥ Credits** : ```Everyone in this journey```
**➥ Language** : ```Python3```
**➥ Library** : <a href=https://docs.pyrogram.org>Pyrogram asyncio 0.18.0</a>
**➥ Server** : ```Heroku```
**➥ Build Status** : ```V01.1 [BETA]```""" | true | true |
f729022dfa20b18a3e8cd789e227b05f928ecf4c | 3,104 | py | Python | tests/test_toposort.py | LaudateCorpus1/libconda | 28d77a7506f76cbf537c1f0ddc947384cab15415 | [
"BSD-3-Clause"
] | 3 | 2018-01-05T20:27:44.000Z | 2018-10-22T22:58:39.000Z | tests/test_toposort.py | conda/libconda | 28d77a7506f76cbf537c1f0ddc947384cab15415 | [
"BSD-3-Clause"
] | 7 | 2016-07-22T15:21:20.000Z | 2019-04-30T11:41:38.000Z | tests/test_toposort.py | LaudateCorpus1/libconda | 28d77a7506f76cbf537c1f0ddc947384cab15415 | [
"BSD-3-Clause"
] | 9 | 2017-05-05T17:52:05.000Z | 2021-08-23T05:42:10.000Z | import unittest
from libconda.toposort import toposort, pop_key
class TopoSortTests(unittest.TestCase):
def test_pop_key(self):
key = pop_key({'a':{'b', 'c'}, 'b':{'c'}})
self.assertEqual(key, 'b')
key = pop_key({'a':{'b'}, 'b':{'c', 'a'}})
self.assertEqual(key, 'a')
key = pop_key({'a':{'b'}, 'b':{'a'}})
self.assertEqual(key, 'a')
def test_simple(self):
data = {'a':'bc', 'b':'c'}
results = toposort(data, safe=True)
self.assertEqual(results, ['c', 'b', 'a'])
results = toposort(data, safe=False)
self.assertEqual(results, ['c', 'b', 'a'])
def test_cycle(self):
data = {'a':'b', 'b':'a'}
with self.assertRaises(ValueError):
toposort(data, False)
results = toposort(data)
# Results do not have an guaranteed order
self.assertEqual(set(results), {'b', 'a'})
def test_cycle_best_effort(self):
data = {'a':'bc', 'b':'c', '1':'2', '2':'1'}
results = toposort(data)
self.assertEqual(results[:3], ['c', 'b', 'a'])
# Cycles come last
# Results do not have an guaranteed order
self.assertEqual(set(results[3:]), {'1', '2'})
def test_python_is_prioritized(self):
"""
This test checks a special invariant related to 'python' specifically.
Python is part of a cycle (pip <--> python), which can cause it to be
installed *after* packages that need python (possibly in
post-install.sh).
A special case in toposort() breaks the cycle, to ensure that python
isn't installed too late. Here, we verify that it works.
"""
# This is the actual dependency graph for python (as of the time of this writing, anyway)
data = {'python' : ['pip', 'openssl', 'readline', 'sqlite', 'tk', 'xz', 'zlib'],
'pip': ['python', 'setuptools', 'wheel'],
'setuptools' : ['python'],
'wheel' : ['python'],
'openssl' : [],
'readline' : [],
'sqlite' : [],
'tk' : [],
'xz' : [],
'zlib' : []}
# Here are some extra pure-python libs, just for good measure.
data.update({'psutil' : ['python'],
'greenlet' : ['python'],
'futures' : ['python'],
'six' : ['python']})
results = toposort(data)
# Python always comes before things that need it!
self.assertLess(results.index('python'), results.index('setuptools'))
self.assertLess(results.index('python'), results.index('wheel'))
self.assertLess(results.index('python'), results.index('pip'))
self.assertLess(results.index('python'), results.index('psutil'))
self.assertLess(results.index('python'), results.index('greenlet'))
self.assertLess(results.index('python'), results.index('futures'))
self.assertLess(results.index('python'), results.index('six'))
if __name__ == '__main__':
unittest.main()
| 36.952381 | 97 | 0.542526 | import unittest
from libconda.toposort import toposort, pop_key
class TopoSortTests(unittest.TestCase):
def test_pop_key(self):
key = pop_key({'a':{'b', 'c'}, 'b':{'c'}})
self.assertEqual(key, 'b')
key = pop_key({'a':{'b'}, 'b':{'c', 'a'}})
self.assertEqual(key, 'a')
key = pop_key({'a':{'b'}, 'b':{'a'}})
self.assertEqual(key, 'a')
def test_simple(self):
data = {'a':'bc', 'b':'c'}
results = toposort(data, safe=True)
self.assertEqual(results, ['c', 'b', 'a'])
results = toposort(data, safe=False)
self.assertEqual(results, ['c', 'b', 'a'])
def test_cycle(self):
data = {'a':'b', 'b':'a'}
with self.assertRaises(ValueError):
toposort(data, False)
results = toposort(data)
self.assertEqual(set(results), {'b', 'a'})
def test_cycle_best_effort(self):
data = {'a':'bc', 'b':'c', '1':'2', '2':'1'}
results = toposort(data)
self.assertEqual(results[:3], ['c', 'b', 'a'])
self.assertEqual(set(results[3:]), {'1', '2'})
def test_python_is_prioritized(self):
data = {'python' : ['pip', 'openssl', 'readline', 'sqlite', 'tk', 'xz', 'zlib'],
'pip': ['python', 'setuptools', 'wheel'],
'setuptools' : ['python'],
'wheel' : ['python'],
'openssl' : [],
'readline' : [],
'sqlite' : [],
'tk' : [],
'xz' : [],
'zlib' : []}
data.update({'psutil' : ['python'],
'greenlet' : ['python'],
'futures' : ['python'],
'six' : ['python']})
results = toposort(data)
self.assertLess(results.index('python'), results.index('setuptools'))
self.assertLess(results.index('python'), results.index('wheel'))
self.assertLess(results.index('python'), results.index('pip'))
self.assertLess(results.index('python'), results.index('psutil'))
self.assertLess(results.index('python'), results.index('greenlet'))
self.assertLess(results.index('python'), results.index('futures'))
self.assertLess(results.index('python'), results.index('six'))
if __name__ == '__main__':
unittest.main()
| true | true |
f729023ed17c72b9cb67e504b14c51bfc35a5b63 | 896 | py | Python | boardfarm/tests/netperf_rfc2544.py | mattsm/boardfarm | 100521fde1fb67536682cafecc2f91a6e2e8a6f8 | [
"BSD-3-Clause-Clear"
] | 40 | 2018-03-23T14:17:13.000Z | 2022-02-05T05:59:41.000Z | boardfarm/tests/netperf_rfc2544.py | mattsm/boardfarm | 100521fde1fb67536682cafecc2f91a6e2e8a6f8 | [
"BSD-3-Clause-Clear"
] | 1 | 2020-04-17T01:20:12.000Z | 2020-04-20T20:42:00.000Z | boardfarm/tests/netperf_rfc2544.py | mattsm/boardfarm | 100521fde1fb67536682cafecc2f91a6e2e8a6f8 | [
"BSD-3-Clause-Clear"
] | 9 | 2018-04-11T08:31:14.000Z | 2020-08-06T14:55:35.000Z | # Copyright (c) 2015
#
# All rights reserved.
#
# This file is distributed under the Clear BSD license.
# The full text can be found in LICENSE in the root directory.
from boardfarm.devices import prompt
from boardfarm.tests import rootfs_boot
class NetperfRFC2544(rootfs_boot.RootFSBootTest):
'''Single test to simulate RFC2544'''
def runTest(self):
board = self.dev.board
lan = self.dev.lan
for sz in ["74", "128", "256", "512", "1024", "1280", "1518"]:
print("running %s UDP test" % sz)
lan.sendline(
'netperf -H 192.168.0.1 -t UDP_STREAM -l 60 -- -m %s' % sz)
lan.expect_exact(
'netperf -H 192.168.0.1 -t UDP_STREAM -l 60 -- -m %s' % sz)
lan.expect('UDP UNIDIRECTIONAL')
lan.expect(prompt, timeout=90)
board.sendline()
board.expect(prompt)
| 32 | 75 | 0.592634 |
from boardfarm.devices import prompt
from boardfarm.tests import rootfs_boot
class NetperfRFC2544(rootfs_boot.RootFSBootTest):
def runTest(self):
board = self.dev.board
lan = self.dev.lan
for sz in ["74", "128", "256", "512", "1024", "1280", "1518"]:
print("running %s UDP test" % sz)
lan.sendline(
'netperf -H 192.168.0.1 -t UDP_STREAM -l 60 -- -m %s' % sz)
lan.expect_exact(
'netperf -H 192.168.0.1 -t UDP_STREAM -l 60 -- -m %s' % sz)
lan.expect('UDP UNIDIRECTIONAL')
lan.expect(prompt, timeout=90)
board.sendline()
board.expect(prompt)
| true | true |
f72903ba078395125701457a4993dee528185b05 | 9,825 | py | Python | neuralhydrology/datasetzoo/camelsus.py | visr/neuralhydrology | 77f6c9214945c8e857e3b9545afe8470da751cab | [
"BSD-3-Clause"
] | null | null | null | neuralhydrology/datasetzoo/camelsus.py | visr/neuralhydrology | 77f6c9214945c8e857e3b9545afe8470da751cab | [
"BSD-3-Clause"
] | null | null | null | neuralhydrology/datasetzoo/camelsus.py | visr/neuralhydrology | 77f6c9214945c8e857e3b9545afe8470da751cab | [
"BSD-3-Clause"
] | null | null | null | from pathlib import Path
from typing import Dict, List, Tuple, Union
import numpy as np
import pandas as pd
import xarray
from neuralhydrology.datasetzoo.basedataset import BaseDataset
from neuralhydrology.utils.config import Config
class CamelsUS(BaseDataset):
"""Data set class for the CAMELS US data set by [#]_ and [#]_.
Parameters
----------
cfg : Config
The run configuration.
is_train : bool
Defines if the dataset is used for training or evaluating. If True (training), means/stds for each feature
are computed and stored to the run directory. If one-hot encoding is used, the mapping for the one-hot encoding
is created and also stored to disk. If False, a `scaler` input is expected and similarly the `id_to_int` input
if one-hot encoding is used.
period : {'train', 'validation', 'test'}
Defines the period for which the data will be loaded
basin : str, optional
If passed, the data for only this basin will be loaded. Otherwise the basin(s) are read from the appropriate
basin file, corresponding to the `period`.
additional_features : List[Dict[str, pd.DataFrame]], optional
List of dictionaries, mapping from a basin id to a pandas DataFrame. This DataFrame will be added to the data
loaded from the dataset and all columns are available as 'dynamic_inputs', 'static_inputs' and
'target_variables'
id_to_int : Dict[str, int], optional
If the config argument 'use_basin_id_encoding' is True in the config and period is either 'validation' or
'test', this input is required. It is a dictionary, mapping from basin id to an integer (the one-hot encoding).
scaler : Dict[str, Union[pd.Series, xarray.DataArray]], optional
If period is either 'validation' or 'test', this input is required. It contains the means and standard
deviations for each feature and is stored to the run directory during training (train_data/train_data_scaler.p)
References
----------
.. [#] A. J. Newman, M. P. Clark, K. Sampson, A. Wood, L. E. Hay, A. Bock, R. J. Viger, D. Blodgett,
L. Brekke, J. R. Arnold, T. Hopson, and Q. Duan: Development of a large-sample watershed-scale
hydrometeorological dataset for the contiguous USA: dataset characteristics and assessment of regional
variability in hydrologic model performance. Hydrol. Earth Syst. Sci., 19, 209-223,
doi:10.5194/hess-19-209-2015, 2015
.. [#] Addor, N., Newman, A. J., Mizukami, N. and Clark, M. P.: The CAMELS data set: catchment attributes and
meteorology for large-sample studies, Hydrol. Earth Syst. Sci., 21, 5293-5313, doi:10.5194/hess-21-5293-2017,
2017.
"""
def __init__(self,
cfg: Config,
is_train: bool,
period: str,
basin: str = None,
additional_features: List[Dict[str, pd.DataFrame]] = [],
id_to_int: Dict[str, int] = {},
scaler: Dict[str, Union[pd.Series, xarray.DataArray]] = {}):
super(CamelsUS, self).__init__(cfg=cfg,
is_train=is_train,
period=period,
basin=basin,
additional_features=additional_features,
id_to_int=id_to_int,
scaler=scaler)
def _load_basin_data(self, basin: str) -> pd.DataFrame:
"""Load input and output data from text files."""
# get forcings
dfs = []
for forcing in self.cfg.forcings:
df, area = load_camels_us_forcings(self.cfg.data_dir, basin, forcing)
# rename columns
if len(self.cfg.forcings) > 1:
df = df.rename(columns={col: f"{col}_{forcing}" for col in df.columns})
dfs.append(df)
df = pd.concat(dfs, axis=1)
# add discharge
df['QObs(mm/d)'] = load_camels_us_discharge(self.cfg.data_dir, basin, area)
# replace invalid discharge values by NaNs
qobs_cols = [col for col in df.columns if "qobs" in col.lower()]
for col in qobs_cols:
df.loc[df[col] < 0, col] = np.nan
return df
def _load_attributes(self) -> pd.DataFrame:
if self.cfg.camels_attributes:
df = load_camels_us_attributes(self.cfg.data_dir, basins=self.basins)
# remove all attributes not defined in the config
drop_cols = [c for c in df.columns if c not in self.cfg.camels_attributes]
df = df.drop(drop_cols, axis=1)
return df
def load_camels_us_attributes(data_dir: Path, basins: List[str] = []) -> pd.DataFrame:
"""Load CAMELS US attributes from the dataset provided by [#]_
Parameters
----------
data_dir : Path
Path to the CAMELS US directory. This folder must contain a 'camels_attributes_v2.0' folder (the original
data set) containing the corresponding txt files for each attribute group.
basins : List[str], optional
If passed, return only attributes for the basins specified in this list. Otherwise, the attributes of all basins
are returned.
Returns
-------
pandas.DataFrame
Basin-indexed DataFrame, containing the attributes as columns.
References
----------
.. [#] Addor, N., Newman, A. J., Mizukami, N. and Clark, M. P.: The CAMELS data set: catchment attributes and
meteorology for large-sample studies, Hydrol. Earth Syst. Sci., 21, 5293-5313, doi:10.5194/hess-21-5293-2017,
2017.
"""
attributes_path = Path(data_dir) / 'camels_attributes_v2.0'
if not attributes_path.exists():
raise RuntimeError(f"Attribute folder not found at {attributes_path}")
txt_files = attributes_path.glob('camels_*.txt')
# Read-in attributes into one big dataframe
dfs = []
for txt_file in txt_files:
df_temp = pd.read_csv(txt_file, sep=';', header=0, dtype={'gauge_id': str})
df_temp = df_temp.set_index('gauge_id')
dfs.append(df_temp)
df = pd.concat(dfs, axis=1)
# convert huc column to double digit strings
df['huc'] = df['huc_02'].apply(lambda x: str(x).zfill(2))
df = df.drop('huc_02', axis=1)
if basins:
# drop rows of basins not contained in the passed list
drop_basins = [b for b in df.index if b not in basins]
df = df.drop(drop_basins, axis=0)
return df
def load_camels_us_forcings(data_dir: Path, basin: str, forcings: str) -> Tuple[pd.DataFrame, int]:
"""Load the forcing data for a basin of the CAMELS US data set.
Parameters
----------
data_dir : Path
Path to the CAMELS US directory. This folder must contain a 'basin_mean_forcing' folder containing one
subdirectory for each forcing. The forcing directories have to contain 18 subdirectories (for the 18 HUCS) as in
the original CAMELS data set. In each HUC folder are the forcing files (.txt), starting with the 8-digit basin
id.
basin : str
8-digit USGS identifier of the basin.
forcings : str
Can be e.g. 'daymet' or 'nldas', etc. Must match the folder names in the 'basin_mean_forcing' directory.
Returns
-------
pd.DataFrame
Time-indexed DataFrame, containing the forcing data.
int
Catchment area (m2), specified in the header of the forcing file.
"""
forcing_path = data_dir / 'basin_mean_forcing' / forcings
if not forcing_path.is_dir():
raise OSError(f"{forcing_path} does not exist")
files = list(forcing_path.glob('**/*_forcing_leap.txt'))
file_path = [f for f in files if f.name[:8] == basin]
if file_path:
file_path = file_path[0]
else:
raise FileNotFoundError(f'No file for Basin {basin} at {file_path}')
df = pd.read_csv(file_path, sep='\s+', header=3)
df["date"] = pd.to_datetime(df.Year.map(str) + "/" + df.Mnth.map(str) + "/" + df.Day.map(str), format="%Y/%m/%d")
df = df.set_index("date")
# load area from header
with open(file_path, 'r') as fp:
content = fp.readlines()
area = int(content[2])
return df, area
def load_camels_us_discharge(data_dir: Path, basin: str, area: int) -> pd.Series:
"""Load the discharge data for a basin of the CAMELS US data set.
Parameters
----------
data_dir : Path
Path to the CAMELS US directory. This folder must contain a 'usgs_streamflow' folder with 18
subdirectories (for the 18 HUCS) as in the original CAMELS data set. In each HUC folder are the discharge files
(.txt), starting with the 8-digit basin id.
basin : str
8-digit USGS identifier of the basin.
area : int
Catchment area (m2), used to normalize the discharge.
Returns
-------
pd.Series
Time-index pandas.Series of the discharge values (mm/day)
"""
discharge_path = data_dir / 'usgs_streamflow'
files = list(discharge_path.glob('**/*_streamflow_qc.txt'))
file_path = [f for f in files if f.name[:8] == basin]
if file_path:
file_path = file_path[0]
else:
raise FileNotFoundError(f'No file for Basin {basin} at {file_path}')
col_names = ['basin', 'Year', 'Mnth', 'Day', 'QObs', 'flag']
df = pd.read_csv(file_path, sep='\s+', header=None, names=col_names)
df["date"] = pd.to_datetime(df.Year.map(str) + "/" + df.Mnth.map(str) + "/" + df.Day.map(str), format="%Y/%m/%d")
df = df.set_index("date")
# normalize discharge from cubic feed per second to mm per day
df.QObs = 28316846.592 * df.QObs * 86400 / (area * 10**6)
return df.QObs
| 41.631356 | 120 | 0.63084 | from pathlib import Path
from typing import Dict, List, Tuple, Union
import numpy as np
import pandas as pd
import xarray
from neuralhydrology.datasetzoo.basedataset import BaseDataset
from neuralhydrology.utils.config import Config
class CamelsUS(BaseDataset):
def __init__(self,
cfg: Config,
is_train: bool,
period: str,
basin: str = None,
additional_features: List[Dict[str, pd.DataFrame]] = [],
id_to_int: Dict[str, int] = {},
scaler: Dict[str, Union[pd.Series, xarray.DataArray]] = {}):
super(CamelsUS, self).__init__(cfg=cfg,
is_train=is_train,
period=period,
basin=basin,
additional_features=additional_features,
id_to_int=id_to_int,
scaler=scaler)
def _load_basin_data(self, basin: str) -> pd.DataFrame:
dfs = []
for forcing in self.cfg.forcings:
df, area = load_camels_us_forcings(self.cfg.data_dir, basin, forcing)
if len(self.cfg.forcings) > 1:
df = df.rename(columns={col: f"{col}_{forcing}" for col in df.columns})
dfs.append(df)
df = pd.concat(dfs, axis=1)
df['QObs(mm/d)'] = load_camels_us_discharge(self.cfg.data_dir, basin, area)
qobs_cols = [col for col in df.columns if "qobs" in col.lower()]
for col in qobs_cols:
df.loc[df[col] < 0, col] = np.nan
return df
def _load_attributes(self) -> pd.DataFrame:
if self.cfg.camels_attributes:
df = load_camels_us_attributes(self.cfg.data_dir, basins=self.basins)
drop_cols = [c for c in df.columns if c not in self.cfg.camels_attributes]
df = df.drop(drop_cols, axis=1)
return df
def load_camels_us_attributes(data_dir: Path, basins: List[str] = []) -> pd.DataFrame:
attributes_path = Path(data_dir) / 'camels_attributes_v2.0'
if not attributes_path.exists():
raise RuntimeError(f"Attribute folder not found at {attributes_path}")
txt_files = attributes_path.glob('camels_*.txt')
dfs = []
for txt_file in txt_files:
df_temp = pd.read_csv(txt_file, sep=';', header=0, dtype={'gauge_id': str})
df_temp = df_temp.set_index('gauge_id')
dfs.append(df_temp)
df = pd.concat(dfs, axis=1)
df['huc'] = df['huc_02'].apply(lambda x: str(x).zfill(2))
df = df.drop('huc_02', axis=1)
if basins:
drop_basins = [b for b in df.index if b not in basins]
df = df.drop(drop_basins, axis=0)
return df
def load_camels_us_forcings(data_dir: Path, basin: str, forcings: str) -> Tuple[pd.DataFrame, int]:
forcing_path = data_dir / 'basin_mean_forcing' / forcings
if not forcing_path.is_dir():
raise OSError(f"{forcing_path} does not exist")
files = list(forcing_path.glob('**/*_forcing_leap.txt'))
file_path = [f for f in files if f.name[:8] == basin]
if file_path:
file_path = file_path[0]
else:
raise FileNotFoundError(f'No file for Basin {basin} at {file_path}')
df = pd.read_csv(file_path, sep='\s+', header=3)
df["date"] = pd.to_datetime(df.Year.map(str) + "/" + df.Mnth.map(str) + "/" + df.Day.map(str), format="%Y/%m/%d")
df = df.set_index("date")
with open(file_path, 'r') as fp:
content = fp.readlines()
area = int(content[2])
return df, area
def load_camels_us_discharge(data_dir: Path, basin: str, area: int) -> pd.Series:
discharge_path = data_dir / 'usgs_streamflow'
files = list(discharge_path.glob('**/*_streamflow_qc.txt'))
file_path = [f for f in files if f.name[:8] == basin]
if file_path:
file_path = file_path[0]
else:
raise FileNotFoundError(f'No file for Basin {basin} at {file_path}')
col_names = ['basin', 'Year', 'Mnth', 'Day', 'QObs', 'flag']
df = pd.read_csv(file_path, sep='\s+', header=None, names=col_names)
df["date"] = pd.to_datetime(df.Year.map(str) + "/" + df.Mnth.map(str) + "/" + df.Day.map(str), format="%Y/%m/%d")
df = df.set_index("date")
df.QObs = 28316846.592 * df.QObs * 86400 / (area * 10**6)
return df.QObs
| true | true |
f72903d52ab156ace0f6587404165e6f3dd3d608 | 754 | py | Python | app/crud/crud_actor.py | luovkle/FastAPI-Movie-Manager | 716eeb66a63a47488c4f3fa390321361fd94dbc6 | [
"MIT"
] | null | null | null | app/crud/crud_actor.py | luovkle/FastAPI-Movie-Manager | 716eeb66a63a47488c4f3fa390321361fd94dbc6 | [
"MIT"
] | null | null | null | app/crud/crud_actor.py | luovkle/FastAPI-Movie-Manager | 716eeb66a63a47488c4f3fa390321361fd94dbc6 | [
"MIT"
] | null | null | null | from typing import Optional
from sqlalchemy.orm import Session
from app.models.actor import Actor
from app.schemas.actor import ActorCreate, ActorInDB
from app.core.exceptions import exceptions
class CRUDActor:
def get_by_name(self, db: Session, name: str) -> Optional[Actor]:
return db.query(Actor).filter(Actor.name == name).first()
def create(self, db: Session, actor_create: ActorCreate) -> Actor:
if self.get_by_name(db, actor_create.name):
raise exceptions.NAME_NOT_AVAILABLE
actor_in_db = ActorInDB(**actor_create.dict())
actor_obj = Actor(**actor_in_db.dict())
db.add(actor_obj)
db.commit()
db.refresh(actor_obj)
return actor_obj
crud_actor = CRUDActor()
| 29 | 70 | 0.696286 | from typing import Optional
from sqlalchemy.orm import Session
from app.models.actor import Actor
from app.schemas.actor import ActorCreate, ActorInDB
from app.core.exceptions import exceptions
class CRUDActor:
def get_by_name(self, db: Session, name: str) -> Optional[Actor]:
return db.query(Actor).filter(Actor.name == name).first()
def create(self, db: Session, actor_create: ActorCreate) -> Actor:
if self.get_by_name(db, actor_create.name):
raise exceptions.NAME_NOT_AVAILABLE
actor_in_db = ActorInDB(**actor_create.dict())
actor_obj = Actor(**actor_in_db.dict())
db.add(actor_obj)
db.commit()
db.refresh(actor_obj)
return actor_obj
crud_actor = CRUDActor()
| true | true |
f72903f757a2aae5f78e0e7be21844fecc72d6ad | 153 | py | Python | src/poetry/console/exceptions.py | hadialqattan/poetry | 02444308831358c26595c0f36ea263a3e02cb5d4 | [
"MIT"
] | 2 | 2022-01-15T20:22:15.000Z | 2022-01-16T09:17:11.000Z | src/poetry/console/exceptions.py | hadialqattan/poetry | 02444308831358c26595c0f36ea263a3e02cb5d4 | [
"MIT"
] | 1 | 2022-02-22T05:52:32.000Z | 2022-02-22T05:52:32.000Z | src/poetry/console/exceptions.py | hadialqattan/poetry | 02444308831358c26595c0f36ea263a3e02cb5d4 | [
"MIT"
] | 1 | 2022-03-19T12:13:53.000Z | 2022-03-19T12:13:53.000Z | from __future__ import annotations
from cleo.exceptions import CleoSimpleException
class PoetrySimpleConsoleException(CleoSimpleException):
pass
| 17 | 56 | 0.849673 | from __future__ import annotations
from cleo.exceptions import CleoSimpleException
class PoetrySimpleConsoleException(CleoSimpleException):
pass
| true | true |
f72904a7ccce5bbd9512c0085594cb8f444db9ba | 17,865 | py | Python | python/tests/test_client.py | ground-context/grit | 3edd0a6c3ec2797fcd090690d6075db0c1965f5f | [
"Apache-2.0"
] | 1 | 2018-03-01T03:29:17.000Z | 2018-03-01T03:29:17.000Z | python/tests/test_client.py | ground-context/grit | 3edd0a6c3ec2797fcd090690d6075db0c1965f5f | [
"Apache-2.0"
] | 1 | 2018-03-22T16:57:55.000Z | 2018-03-22T16:57:55.000Z | python/tests/test_client.py | ground-context/grit | 3edd0a6c3ec2797fcd090690d6075db0c1965f5f | [
"Apache-2.0"
] | 2 | 2018-11-20T19:53:16.000Z | 2021-02-25T15:48:46.000Z | import unittest
import uuid
import ground.client as client
class TestClient(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = client.GroundClient()
def test_node(self):
"""
Tests most of the node access methods
"""
source_key = uuid.uuid4().hex
node = self.client.create_node(source_key, source_key)
# test created node is valid
self.assertTrue(
node is not None,
msg="create_node with source_key = {} returned None instead of a node"
.format(source_key)
)
self.assertTrue(
source_key == node.get_source_key(),
msg="node created with source key {} has a differing source key: {}"
.format(source_key, node.get_source_key())
)
retrieved_node = self.client.get_node(source_key)
# test retrieved node_version is valid
self.assertTrue(
retrieved_node is not None,
msg='valid call to get_node returned None'
)
self.assertTrue(
retrieved_node == node,
msg='valid call to get_node returned incorrect data'
)
node_version = self.client.create_node_version(node.get_id())
# test created node_version is valid
self.assertTrue(
node_version is not None,
msg='create_node_version with node_id={} returned None instead of a node version'
.format(node.get_id())
)
self.assertTrue(
node_version.get_node_id() == node.get_id(),
msg="created node_version's node_id does not match id of node"
)
retrieved_nv = self.client.get_node_version(node_version.get_id())
# test retrieved node_version is valid
self.assertTrue(
retrieved_nv is not None,
msg='valid call to get_node_version returned None'
)
self.assertTrue(
retrieved_nv == node_version,
msg='valid call to get_node_version returned incorrect data'
)
latest = self.client.get_node_latest_versions(source_key)
# test latest_version matches created node_version
self.assertTrue(
latest == [node_version.get_id()],
msg="get_node_latest_version returns incorrect versions"
)
# TODO: uncomment when server side bug fixed
# history = self.client.get_node_history(source_key)
# expected_history = {
# '0': [node_version.get_id()]
# }
# self.assertTrue(
# history == expected_history,
# "call to get_node_history did not match expected value"
# )
def test_graph(self):
"""
Tests most of the node access methods
"""
source_key = uuid.uuid4().hex
graph = self.client.create_graph(source_key, source_key)
# test created graph is valid
self.assertTrue(
graph is not None,
msg="create_graph with source_key = {} returned None instead of a graph"
.format(source_key)
)
self.assertTrue(
source_key == graph.get_source_key(),
msg="graph created with source key {} has a differing source key: {}"
.format(source_key, graph.get_source_key())
)
retrieved_graph = self.client.get_graph(source_key)
# test retrieved graph_version is valid
self.assertTrue(
retrieved_graph is not None,
msg='valid call to get_graph returned None'
)
self.assertTrue(
retrieved_graph == graph,
msg='valid call to get_graph returned incorrect data'
)
graph_version = self.client.create_graph_version(graph.get_id(), [])
# test created graph_version is valid
self.assertTrue(
graph_version is not None,
msg='create_graph_version with graph_id={} returned None instead of a graph version'
.format(graph.get_id())
)
self.assertTrue(
graph_version.get_graph_id() == graph.get_id(),
msg="created graph_version's graph_id does not match id of graph"
)
retrieved_gv = self.client.get_graph_version(graph_version.get_id())
# test retrieved graph_version is valid
self.assertTrue(
retrieved_gv is not None,
msg='valid call to get_graph_version returned None'
)
self.assertTrue(
retrieved_gv == graph_version,
msg='valid call to get_graph_version returned incorrect data'
)
latest = self.client.get_graph_latest_versions(source_key)
# test latest_version matches created graph_version
self.assertTrue(
latest == [graph_version.get_id()],
msg="get_graph_latest_version returns incorrect versions"
)
# TODO: uncomment when server side bug fixed
# history = self.client.get_graph_history(source_key)
# expected_history = {
# '0': [graph_version.get_id()]
# }
# self.assertTrue(
# history == expected_history,
# "call to get_graph_history did not match expected value"
# )
def test_edge(self):
"""
Tests most of the node access methods
"""
# create nodes/nodeversions for edges
node1_source_key = uuid.uuid4().hex
node2_source_key = uuid.uuid4().hex
node1 = self.client.create_node(node1_source_key, node1_source_key)
node2 = self.client.create_node(node2_source_key, node2_source_key)
nv1 = self.client.create_node_version(node1.get_id())
nv2 = self.client.create_node_version(node2.get_id())
source_key = uuid.uuid4().hex
edge = self.client.create_edge(source_key, source_key, node1.get_id(), node2.get_id())
# test created edge is valid
self.assertTrue(
edge is not None,
msg="create_edge with source_key = {} returned None instead of a edge"
.format(source_key)
)
self.assertTrue(
source_key == edge.get_source_key(),
msg="edge created with source key {} has a differing source key: {}"
.format(source_key, edge.get_source_key())
)
retrieved_edge = self.client.get_edge(source_key)
# test retrieved edge_version is valid
self.assertTrue(
retrieved_edge is not None,
msg='valid call to get_edge returned None'
)
self.assertTrue(
retrieved_edge == edge,
msg='valid call to get_edge returned incorrect data'
)
edge_version = self.client.create_edge_version(edge.get_id(), nv1.get_id(), nv2.get_id())
# test created edge_version is valid
self.assertTrue(
edge_version is not None,
msg='create_edge_version with edge_id={} returned None instead of a edge version'
.format(edge.get_id())
)
self.assertTrue(
edge_version.get_edge_id() == edge.get_id(),
msg="created edge_version's edge_id does not match id of edge"
)
retrieved_nv = self.client.get_edge_version(edge_version.get_id())
# test retrieved edge_version is valid
self.assertTrue(
retrieved_nv is not None,
msg='valid call to get_edge_version returned None'
)
self.assertTrue(
retrieved_nv == edge_version,
msg='valid call to get_edge_version returned incorrect data'
)
latest = self.client.get_edge_latest_versions(source_key)
# test latest_version matches created edge_version
self.assertTrue(
latest == [edge_version.get_id()],
msg="get_edge_latest_version returns incorrect versions"
)
# TODO: uncomment when server side bug fixed
# history = self.client.get_edge_history(source_key)
# expected_history = {
# '0': [edge_version.get_id()]
# }
# self.assertTrue(
# history == expected_history,
# "call to get_edge_history did not match expected value"
# )
def test_structure(self):
"""
Tests most of the node access methods
"""
source_key = uuid.uuid4().hex
structure = self.client.create_structure(source_key, source_key)
# test created structure is valid
self.assertTrue(
structure is not None,
msg="create_structure with source_key = {} returned None instead of a structure"
.format(source_key)
)
self.assertTrue(
source_key == structure.get_source_key(),
msg="structure created with source key {} has a differing source key: {}"
.format(source_key, structure.get_source_key())
)
retrieved_structure = self.client.get_structure(source_key)
# test retrieved structure_version is valid
self.assertTrue(
retrieved_structure is not None,
msg='valid call to get_structure returned None'
)
self.assertTrue(
retrieved_structure == structure,
msg='valid call to get_structure returned incorrect data'
)
structure_version = self.client.create_structure_version(structure.get_id(), {})
# test created structure_version is valid
self.assertTrue(
structure_version is not None,
msg='create_structure_version with structure_id={} returned None instead of a structure version'
.format(structure.get_id())
)
self.assertTrue(
structure_version.get_structure_id() == structure.get_id(),
msg="created structure_version's structure_id does not match id of structure"
)
retrieved_nv = self.client.get_structure_version(structure_version.get_id())
# test retrieved structure_version is valid
self.assertTrue(
retrieved_nv is not None,
msg='valid call to get_structure_version returned None'
)
self.assertTrue(
retrieved_nv == structure_version,
msg='valid call to get_structure_version returned incorrect data'
)
latest = self.client.get_structure_latest_versions(source_key)
# test latest_version matches created structure_version
self.assertTrue(
latest == [structure_version.get_id()],
msg="get_structure_latest_version returns incorrect versions"
)
# TODO: uncomment when server side bug fixed
# history = self.client.get_structure_history(source_key)
# expected_history = {
# '0': [structure_version.get_id()]
# }
# self.assertTrue(
# history == expected_history,
# "call to get_structure_history did not match expected value"
# )
def test_lineage_edge(self):
"""
Tests most of the node access methods
"""
# create rich versions aka node versions
node1_source_key = uuid.uuid4().hex
node2_source_key = uuid.uuid4().hex
node1 = self.client.create_node(node1_source_key, node1_source_key)
node2 = self.client.create_node(node2_source_key, node2_source_key)
nv1 = self.client.create_node_version(node1.get_id())
nv2 = self.client.create_node_version(node2.get_id())
source_key = uuid.uuid4().hex
lineage_edge = self.client.create_lineage_edge(source_key, source_key)
# test created lineage_edge is valid
self.assertTrue(
lineage_edge is not None,
msg="create_lineage_edge with source_key = {} returned None instead of a lineage_edge"
.format(source_key)
)
self.assertTrue(
source_key == lineage_edge.get_source_key(),
msg="lineage_edge created with source key {} has a differing source key: {}"
.format(source_key, lineage_edge.get_source_key())
)
retrieved_lineage_edge = self.client.get_lineage_edge(source_key)
# test retrieved lineage_edge_version is valid
self.assertTrue(
retrieved_lineage_edge is not None,
msg='valid call to get_lineage_edge returned None'
)
self.assertTrue(
retrieved_lineage_edge == lineage_edge,
msg='valid call to get_lineage_edge returned incorrect data'
)
lineage_edge_version = self.client.create_lineage_edge_version(
lineage_edge.get_id(), nv1.get_id(), nv2.get_id()
)
# test created lineage_edge_version is valid
self.assertTrue(
lineage_edge_version is not None,
msg='create_lineage_edge_version with lineage_edge_id={} returned None instead of a lineage_edge version'
.format(lineage_edge.get_id())
)
self.assertTrue(
lineage_edge_version.get_lineage_edge_id() == lineage_edge.get_id(),
msg="created lineage_edge_version's lineage_edge_id does not match id of lineage_edge"
)
retrieved_nv = self.client.get_lineage_edge_version(lineage_edge_version.get_id())
# test retrieved lineage_edge_version is valid
self.assertTrue(
retrieved_nv is not None,
msg='valid call to get_lineage_edge_version returned None'
)
self.assertTrue(
retrieved_nv == lineage_edge_version,
msg='valid call to get_lineage_edge_version returned incorrect data'
)
latest = self.client.get_lineage_edge_latest_versions(source_key)
# test latest_version matches created lineage_edge_version
self.assertTrue(
latest == [lineage_edge_version.get_id()],
msg="get_lineage_edge_latest_version returns incorrect versions"
)
# TODO: uncomment when server side bug fixed
# history = self.client.get_lineage_edge_history(source_key)
# expected_history = {
# '0': [lineage_edge_version.get_id()]
# }
# self.assertTrue(
# history == expected_history,
# "call to get_lineage_edge_history did not match expected value"
# )
def test_lineage_graph(self):
"""
Tests most of the node access methods
"""
# create rich versions aka node versions
node1_source_key = uuid.uuid4().hex
node2_source_key = uuid.uuid4().hex
node1 = self.client.create_node(node1_source_key, node1_source_key)
node2 = self.client.create_node(node2_source_key, node2_source_key)
nv1 = self.client.create_node_version(node1.get_id())
nv2 = self.client.create_node_version(node2.get_id())
source_key = uuid.uuid4().hex
lineage_graph = self.client.create_lineage_graph(source_key, source_key)
# test created lineage_graph is valid
self.assertTrue(
lineage_graph is not None,
msg="create_lineage_graph with source_key = {} returned None instead of a lineage_graph"
.format(source_key)
)
self.assertTrue(
source_key == lineage_graph.get_source_key(),
msg="lineage_graph created with source key {} has a differing source key: {}"
.format(source_key, lineage_graph.get_source_key())
)
retrieved_lineage_graph = self.client.get_lineage_graph(source_key)
# test retrieved lineage_graph_version is valid
self.assertTrue(
retrieved_lineage_graph is not None,
msg='valid call to get_lineage_graph returned None'
)
self.assertTrue(
retrieved_lineage_graph == lineage_graph,
msg='valid call to get_lineage_graph returned incorrect data'
)
lineage_graph_version = self.client.create_lineage_graph_version(
lineage_graph.get_id(), []
)
# test created lineage_graph_version is valid
self.assertTrue(
lineage_graph_version is not None,
msg='create_lineage_graph_version with lineage_graph_id={} returned None instead of a lineage_graph version'
.format(lineage_graph.get_id())
)
self.assertTrue(
lineage_graph_version.get_lineage_graph_id() == lineage_graph.get_id(),
msg="created lineage_graph_version's lineage_graph_id does not match id of lineage_graph"
)
retrieved_nv = self.client.get_lineage_graph_version(lineage_graph_version.get_id())
# test retrieved lineage_graph_version is valid
self.assertTrue(
retrieved_nv is not None,
msg='valid call to get_lineage_graph_version returned None'
)
self.assertTrue(
retrieved_nv == lineage_graph_version,
msg='valid call to get_lineage_graph_version returned incorrect data'
)
latest = self.client.get_lineage_graph_latest_versions(source_key)
# test latest_version matches created lineage_graph_version
self.assertTrue(
latest == [lineage_graph_version.get_id()],
msg="get_lineage_graph_latest_version returns incorrect versions"
)
# TODO: uncomment when server side bug fixed
# history = self.client.get_lineage_graph_history(source_key)
# expected_history = {
# '0': [lineage_graph_version.get_id()]
# }
# self.assertTrue(
# history == expected_history,
# "call to get_lineage_graph_history did not match expected value"
# )
if __name__ == '__main__':
unittest.main()
| 38.502155 | 120 | 0.625917 | import unittest
import uuid
import ground.client as client
class TestClient(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = client.GroundClient()
def test_node(self):
source_key = uuid.uuid4().hex
node = self.client.create_node(source_key, source_key)
self.assertTrue(
node is not None,
msg="create_node with source_key = {} returned None instead of a node"
.format(source_key)
)
self.assertTrue(
source_key == node.get_source_key(),
msg="node created with source key {} has a differing source key: {}"
.format(source_key, node.get_source_key())
)
retrieved_node = self.client.get_node(source_key)
self.assertTrue(
retrieved_node is not None,
msg='valid call to get_node returned None'
)
self.assertTrue(
retrieved_node == node,
msg='valid call to get_node returned incorrect data'
)
node_version = self.client.create_node_version(node.get_id())
self.assertTrue(
node_version is not None,
msg='create_node_version with node_id={} returned None instead of a node version'
.format(node.get_id())
)
self.assertTrue(
node_version.get_node_id() == node.get_id(),
msg="created node_version's node_id does not match id of node"
)
retrieved_nv = self.client.get_node_version(node_version.get_id())
# test retrieved node_version is valid
self.assertTrue(
retrieved_nv is not None,
msg='valid call to get_node_version returned None'
)
self.assertTrue(
retrieved_nv == node_version,
msg='valid call to get_node_version returned incorrect data'
)
latest = self.client.get_node_latest_versions(source_key)
# test latest_version matches created node_version
self.assertTrue(
latest == [node_version.get_id()],
msg="get_node_latest_version returns incorrect versions"
)
# TODO: uncomment when server side bug fixed
# history = self.client.get_node_history(source_key)
# expected_history = {
# '0': [node_version.get_id()]
# }
# self.assertTrue(
# history == expected_history,
# "call to get_node_history did not match expected value"
# )
def test_graph(self):
source_key = uuid.uuid4().hex
graph = self.client.create_graph(source_key, source_key)
# test created graph is valid
self.assertTrue(
graph is not None,
msg="create_graph with source_key = {} returned None instead of a graph"
.format(source_key)
)
self.assertTrue(
source_key == graph.get_source_key(),
msg="graph created with source key {} has a differing source key: {}"
.format(source_key, graph.get_source_key())
)
retrieved_graph = self.client.get_graph(source_key)
# test retrieved graph_version is valid
self.assertTrue(
retrieved_graph is not None,
msg='valid call to get_graph returned None'
)
self.assertTrue(
retrieved_graph == graph,
msg='valid call to get_graph returned incorrect data'
)
graph_version = self.client.create_graph_version(graph.get_id(), [])
# test created graph_version is valid
self.assertTrue(
graph_version is not None,
msg='create_graph_version with graph_id={} returned None instead of a graph version'
.format(graph.get_id())
)
self.assertTrue(
graph_version.get_graph_id() == graph.get_id(),
msg="created graph_version's graph_id does not match id of graph"
)
retrieved_gv = self.client.get_graph_version(graph_version.get_id())
self.assertTrue(
retrieved_gv is not None,
msg='valid call to get_graph_version returned None'
)
self.assertTrue(
retrieved_gv == graph_version,
msg='valid call to get_graph_version returned incorrect data'
)
latest = self.client.get_graph_latest_versions(source_key)
self.assertTrue(
latest == [graph_version.get_id()],
msg="get_graph_latest_version returns incorrect versions"
)
def test_edge(self):
node1_source_key = uuid.uuid4().hex
node2_source_key = uuid.uuid4().hex
node1 = self.client.create_node(node1_source_key, node1_source_key)
node2 = self.client.create_node(node2_source_key, node2_source_key)
nv1 = self.client.create_node_version(node1.get_id())
nv2 = self.client.create_node_version(node2.get_id())
source_key = uuid.uuid4().hex
edge = self.client.create_edge(source_key, source_key, node1.get_id(), node2.get_id())
self.assertTrue(
edge is not None,
msg="create_edge with source_key = {} returned None instead of a edge"
.format(source_key)
)
self.assertTrue(
source_key == edge.get_source_key(),
msg="edge created with source key {} has a differing source key: {}"
.format(source_key, edge.get_source_key())
)
retrieved_edge = self.client.get_edge(source_key)
self.assertTrue(
retrieved_edge is not None,
msg='valid call to get_edge returned None'
)
self.assertTrue(
retrieved_edge == edge,
msg='valid call to get_edge returned incorrect data'
)
edge_version = self.client.create_edge_version(edge.get_id(), nv1.get_id(), nv2.get_id())
self.assertTrue(
edge_version is not None,
msg='create_edge_version with edge_id={} returned None instead of a edge version'
.format(edge.get_id())
)
self.assertTrue(
edge_version.get_edge_id() == edge.get_id(),
msg="created edge_version's edge_id does not match id of edge"
)
retrieved_nv = self.client.get_edge_version(edge_version.get_id())
# test retrieved edge_version is valid
self.assertTrue(
retrieved_nv is not None,
msg='valid call to get_edge_version returned None'
)
self.assertTrue(
retrieved_nv == edge_version,
msg='valid call to get_edge_version returned incorrect data'
)
latest = self.client.get_edge_latest_versions(source_key)
# test latest_version matches created edge_version
self.assertTrue(
latest == [edge_version.get_id()],
msg="get_edge_latest_version returns incorrect versions"
)
# TODO: uncomment when server side bug fixed
# history = self.client.get_edge_history(source_key)
# expected_history = {
# '0': [edge_version.get_id()]
# }
# self.assertTrue(
# history == expected_history,
# "call to get_edge_history did not match expected value"
# )
def test_structure(self):
source_key = uuid.uuid4().hex
structure = self.client.create_structure(source_key, source_key)
# test created structure is valid
self.assertTrue(
structure is not None,
msg="create_structure with source_key = {} returned None instead of a structure"
.format(source_key)
)
self.assertTrue(
source_key == structure.get_source_key(),
msg="structure created with source key {} has a differing source key: {}"
.format(source_key, structure.get_source_key())
)
retrieved_structure = self.client.get_structure(source_key)
# test retrieved structure_version is valid
self.assertTrue(
retrieved_structure is not None,
msg='valid call to get_structure returned None'
)
self.assertTrue(
retrieved_structure == structure,
msg='valid call to get_structure returned incorrect data'
)
structure_version = self.client.create_structure_version(structure.get_id(), {})
# test created structure_version is valid
self.assertTrue(
structure_version is not None,
msg='create_structure_version with structure_id={} returned None instead of a structure version'
.format(structure.get_id())
)
self.assertTrue(
structure_version.get_structure_id() == structure.get_id(),
msg="created structure_version's structure_id does not match id of structure"
)
retrieved_nv = self.client.get_structure_version(structure_version.get_id())
self.assertTrue(
retrieved_nv is not None,
msg='valid call to get_structure_version returned None'
)
self.assertTrue(
retrieved_nv == structure_version,
msg='valid call to get_structure_version returned incorrect data'
)
latest = self.client.get_structure_latest_versions(source_key)
self.assertTrue(
latest == [structure_version.get_id()],
msg="get_structure_latest_version returns incorrect versions"
)
def test_lineage_edge(self):
node1_source_key = uuid.uuid4().hex
node2_source_key = uuid.uuid4().hex
node1 = self.client.create_node(node1_source_key, node1_source_key)
node2 = self.client.create_node(node2_source_key, node2_source_key)
nv1 = self.client.create_node_version(node1.get_id())
nv2 = self.client.create_node_version(node2.get_id())
source_key = uuid.uuid4().hex
lineage_edge = self.client.create_lineage_edge(source_key, source_key)
self.assertTrue(
lineage_edge is not None,
msg="create_lineage_edge with source_key = {} returned None instead of a lineage_edge"
.format(source_key)
)
self.assertTrue(
source_key == lineage_edge.get_source_key(),
msg="lineage_edge created with source key {} has a differing source key: {}"
.format(source_key, lineage_edge.get_source_key())
)
retrieved_lineage_edge = self.client.get_lineage_edge(source_key)
self.assertTrue(
retrieved_lineage_edge is not None,
msg='valid call to get_lineage_edge returned None'
)
self.assertTrue(
retrieved_lineage_edge == lineage_edge,
msg='valid call to get_lineage_edge returned incorrect data'
)
lineage_edge_version = self.client.create_lineage_edge_version(
lineage_edge.get_id(), nv1.get_id(), nv2.get_id()
)
self.assertTrue(
lineage_edge_version is not None,
msg='create_lineage_edge_version with lineage_edge_id={} returned None instead of a lineage_edge version'
.format(lineage_edge.get_id())
)
self.assertTrue(
lineage_edge_version.get_lineage_edge_id() == lineage_edge.get_id(),
msg="created lineage_edge_version's lineage_edge_id does not match id of lineage_edge"
)
retrieved_nv = self.client.get_lineage_edge_version(lineage_edge_version.get_id())
# test retrieved lineage_edge_version is valid
self.assertTrue(
retrieved_nv is not None,
msg='valid call to get_lineage_edge_version returned None'
)
self.assertTrue(
retrieved_nv == lineage_edge_version,
msg='valid call to get_lineage_edge_version returned incorrect data'
)
latest = self.client.get_lineage_edge_latest_versions(source_key)
# test latest_version matches created lineage_edge_version
self.assertTrue(
latest == [lineage_edge_version.get_id()],
msg="get_lineage_edge_latest_version returns incorrect versions"
)
# TODO: uncomment when server side bug fixed
# history = self.client.get_lineage_edge_history(source_key)
# expected_history = {
# '0': [lineage_edge_version.get_id()]
# }
# self.assertTrue(
# history == expected_history,
# "call to get_lineage_edge_history did not match expected value"
# )
def test_lineage_graph(self):
# create rich versions aka node versions
node1_source_key = uuid.uuid4().hex
node2_source_key = uuid.uuid4().hex
node1 = self.client.create_node(node1_source_key, node1_source_key)
node2 = self.client.create_node(node2_source_key, node2_source_key)
nv1 = self.client.create_node_version(node1.get_id())
nv2 = self.client.create_node_version(node2.get_id())
source_key = uuid.uuid4().hex
lineage_graph = self.client.create_lineage_graph(source_key, source_key)
# test created lineage_graph is valid
self.assertTrue(
lineage_graph is not None,
msg="create_lineage_graph with source_key = {} returned None instead of a lineage_graph"
.format(source_key)
)
self.assertTrue(
source_key == lineage_graph.get_source_key(),
msg="lineage_graph created with source key {} has a differing source key: {}"
.format(source_key, lineage_graph.get_source_key())
)
retrieved_lineage_graph = self.client.get_lineage_graph(source_key)
# test retrieved lineage_graph_version is valid
self.assertTrue(
retrieved_lineage_graph is not None,
msg='valid call to get_lineage_graph returned None'
)
self.assertTrue(
retrieved_lineage_graph == lineage_graph,
msg='valid call to get_lineage_graph returned incorrect data'
)
lineage_graph_version = self.client.create_lineage_graph_version(
lineage_graph.get_id(), []
)
# test created lineage_graph_version is valid
self.assertTrue(
lineage_graph_version is not None,
msg='create_lineage_graph_version with lineage_graph_id={} returned None instead of a lineage_graph version'
.format(lineage_graph.get_id())
)
self.assertTrue(
lineage_graph_version.get_lineage_graph_id() == lineage_graph.get_id(),
msg="created lineage_graph_version's lineage_graph_id does not match id of lineage_graph"
)
retrieved_nv = self.client.get_lineage_graph_version(lineage_graph_version.get_id())
self.assertTrue(
retrieved_nv is not None,
msg='valid call to get_lineage_graph_version returned None'
)
self.assertTrue(
retrieved_nv == lineage_graph_version,
msg='valid call to get_lineage_graph_version returned incorrect data'
)
latest = self.client.get_lineage_graph_latest_versions(source_key)
self.assertTrue(
latest == [lineage_graph_version.get_id()],
msg="get_lineage_graph_latest_version returns incorrect versions"
)
if __name__ == '__main__':
unittest.main()
| true | true |
f7290541ba6a219c1b23507983095ee257129671 | 232 | py | Python | dbl_archive_data_storage/frugal_storer/frugal_collection.py | ubsicap/dbl-archive-data-storage | 03786ed54024a55ae96b93948a656a3c01269894 | [
"MIT"
] | null | null | null | dbl_archive_data_storage/frugal_storer/frugal_collection.py | ubsicap/dbl-archive-data-storage | 03786ed54024a55ae96b93948a656a3c01269894 | [
"MIT"
] | 12 | 2018-12-11T17:49:01.000Z | 2019-02-21T18:26:22.000Z | dbl_archive_data_storage/frugal_storer/frugal_collection.py | ubsicap/dbl-archive-data-storage | 03786ed54024a55ae96b93948a656a3c01269894 | [
"MIT"
] | null | null | null | class FrugalCollection:
id = None
snapshots = None
def __init__(self, collection_id):
snapshots = {}
self.id = collection_id
def __str__(self):
return "<Collection {0}>".format(self.id) | 21.090909 | 49 | 0.599138 | class FrugalCollection:
id = None
snapshots = None
def __init__(self, collection_id):
snapshots = {}
self.id = collection_id
def __str__(self):
return "<Collection {0}>".format(self.id) | true | true |
f7290550adfa216a398f7dd493901439630b261f | 5,006 | py | Python | functions/plot_utils.py | biomac-lab/covid19_forecast | 6613064f8a6d8023ecbdaddbc2e7525b6ad0796f | [
"Apache-2.0"
] | null | null | null | functions/plot_utils.py | biomac-lab/covid19_forecast | 6613064f8a6d8023ecbdaddbc2e7525b6ad0796f | [
"Apache-2.0"
] | null | null | null | functions/plot_utils.py | biomac-lab/covid19_forecast | 6613064f8a6d8023ecbdaddbc2e7525b6ad0796f | [
"Apache-2.0"
] | null | null | null | from matplotlib.dates import date2num, num2date
from matplotlib.colors import ListedColormap
from matplotlib import dates as mdates
from matplotlib.patches import Patch
from matplotlib import pyplot as plt
from matplotlib import ticker
import os
def plot_fit(df_fit, df_data, y_label='Deaths', y_lim_up = 200, color='blue', col_data='smoothed_death', col_up='high_95', col_down='low_95', col_point='median', ax=None, sharey=True, forecast=True, path_to_save=None):
""" df_fit with columns:
'mean', 'median', 'std', 'low_95', 'high_95', 'low_80', 'high_80', 'low_50', 'high_50', 'type'
type in ['estimate', 'forecast']
df_data with columns:
'confirmed', 'death', 'smoothed_confirmed', 'smoothed_death', 'type'
type in ['fitted', 'preliminary']
"""
df_estimate = df_fit.copy(); df_estimate = df_estimate[ df_estimate.type=='estimate' ]
df_forecast = df_fit.copy(); df_forecast = df_forecast[ df_forecast.type=='forecast' ]
df_data_fitted = df_data.copy(); df_data_fitted = df_data_fitted[df_data_fitted.type=='fitted']
df_data_preliminary = df_data.copy(); df_data_preliminary = df_data_preliminary[df_data_preliminary.type=='preliminary']
fig, axes = plt.subplots(1, 2, figsize=(20, 7), sharey=sharey)
axes[0].fill_between(df_estimate.index.values, df_estimate[col_down], df_estimate[col_up], color='gray', alpha=0.4, label='95 CI - Nowcast')
axes[0].plot(df_estimate.index.values, df_estimate[col_point], color='black', alpha=0.4, label='Median - Nowcast')
axes[0].scatter(df_data_fitted.index.values, df_data_fitted[col_data], facecolor='black', alpha=0.6, edgecolor='black', s=30)
(y1_l, y2_l) = axes[0].get_ylim()
axes[0].fill_between(df_forecast.index.values, df_forecast[col_down], df_forecast[col_up], color=color, alpha=0.6, label='95% CI')
axes[0].fill_between(df_forecast.index.values, df_forecast['low_80'], df_forecast['high_80'], color=color, alpha=0.4, label='80% CI')
axes[0].fill_between(df_forecast.index.values, df_forecast['low_50'], df_forecast['high_50'], color=color, alpha=0.4, label='50% CI')
axes[0].plot(df_forecast.index.values, df_forecast[col_point], color=color, alpha=0.4, label='Forecast - Median')
axes[0].scatter(df_forecast.index.values, df_forecast[col_point], edgecolor='k', facecolor='white', s=10)
axes[0].tick_params(axis='both', labelsize=15)
axes[1].fill_between(df_estimate.iloc[-10:].index.values, df_estimate.iloc[-10:][col_up], df_estimate.iloc[-10:][col_down], color='gray', alpha=0.4)
axes[1].plot(df_estimate.iloc[-10:].index.values, df_estimate.iloc[-10:][col_point], color='black', alpha=0.4)
axes[1].fill_between(df_forecast.index.values, df_forecast[col_down], df_forecast[col_up], color=color, alpha=0.2, label='90% CI')
axes[1].fill_between(df_forecast.index.values, df_forecast['low_80'], df_forecast['high_80'], color=color, alpha=0.4, label='80% CI')
axes[1].fill_between(df_forecast.index.values, df_forecast['low_50'], df_forecast['high_50'], color=color, alpha=0.6, label='50% CI')
axes[1].plot(df_forecast.index.values, df_forecast[col_point], color='black', alpha=0.4)
axes[1].scatter(df_estimate.iloc[-10:].index.values, df_data_fitted.iloc[-10:][col_data], facecolor='black', alpha=0.6, edgecolor='black', s=50)
axes[1].scatter(df_data_preliminary.index.values, df_data_preliminary[col_data], edgecolor='k', facecolor='red', s=50, label='Preliminary data')
for ax in axes:
ax.xaxis.set_major_locator(mdates.MonthLocator())
ax.xaxis.set_major_formatter(mdates.DateFormatter('%b'))
ax.xaxis.set_minor_locator(mdates.DayLocator())
ax.xaxis.set_major_locator(mdates.WeekdayLocator())
ax.xaxis.set_major_locator(mdates.MonthLocator())
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.grid(which='major', axis='y', c='k', alpha=.1, zorder=-2)
ax.yaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.0f}"))
ax.set_ylabel(y_label, size=15)
ax.set_ylim( (y1_l, y_lim_up) )
ax.legend(loc='upper left')
axes[1].xaxis.set_major_locator(mdates.MonthLocator())
axes[1].xaxis.set_major_formatter(mdates.DateFormatter('%d-%b'))
axes[1].xaxis.set_minor_locator(mdates.DayLocator())
axes[1].xaxis.set_minor_formatter(mdates.DateFormatter('%d'))
axes[1].xaxis.set_major_locator(mdates.WeekdayLocator())
axes[1].xaxis.set_major_locator(mdates.MonthLocator())
axes[1].tick_params(which='both', axis='both', labelrotation=90, labelsize=15)
axes[1].grid(which='both', axis='x', c='k', alpha=.1, zorder=-2)
axes[0].grid(which='major', axis='x', c='k', alpha=.1, zorder=-2)
plt.tight_layout()
if path_to_save:
fig.savefig(path_to_save, dpi=300, bbox_inches='tight', transparent=False)
plt.close() | 60.313253 | 220 | 0.702157 | from matplotlib.dates import date2num, num2date
from matplotlib.colors import ListedColormap
from matplotlib import dates as mdates
from matplotlib.patches import Patch
from matplotlib import pyplot as plt
from matplotlib import ticker
import os
def plot_fit(df_fit, df_data, y_label='Deaths', y_lim_up = 200, color='blue', col_data='smoothed_death', col_up='high_95', col_down='low_95', col_point='median', ax=None, sharey=True, forecast=True, path_to_save=None):
df_estimate = df_fit.copy(); df_estimate = df_estimate[ df_estimate.type=='estimate' ]
df_forecast = df_fit.copy(); df_forecast = df_forecast[ df_forecast.type=='forecast' ]
df_data_fitted = df_data.copy(); df_data_fitted = df_data_fitted[df_data_fitted.type=='fitted']
df_data_preliminary = df_data.copy(); df_data_preliminary = df_data_preliminary[df_data_preliminary.type=='preliminary']
fig, axes = plt.subplots(1, 2, figsize=(20, 7), sharey=sharey)
axes[0].fill_between(df_estimate.index.values, df_estimate[col_down], df_estimate[col_up], color='gray', alpha=0.4, label='95 CI - Nowcast')
axes[0].plot(df_estimate.index.values, df_estimate[col_point], color='black', alpha=0.4, label='Median - Nowcast')
axes[0].scatter(df_data_fitted.index.values, df_data_fitted[col_data], facecolor='black', alpha=0.6, edgecolor='black', s=30)
(y1_l, y2_l) = axes[0].get_ylim()
axes[0].fill_between(df_forecast.index.values, df_forecast[col_down], df_forecast[col_up], color=color, alpha=0.6, label='95% CI')
axes[0].fill_between(df_forecast.index.values, df_forecast['low_80'], df_forecast['high_80'], color=color, alpha=0.4, label='80% CI')
axes[0].fill_between(df_forecast.index.values, df_forecast['low_50'], df_forecast['high_50'], color=color, alpha=0.4, label='50% CI')
axes[0].plot(df_forecast.index.values, df_forecast[col_point], color=color, alpha=0.4, label='Forecast - Median')
axes[0].scatter(df_forecast.index.values, df_forecast[col_point], edgecolor='k', facecolor='white', s=10)
axes[0].tick_params(axis='both', labelsize=15)
axes[1].fill_between(df_estimate.iloc[-10:].index.values, df_estimate.iloc[-10:][col_up], df_estimate.iloc[-10:][col_down], color='gray', alpha=0.4)
axes[1].plot(df_estimate.iloc[-10:].index.values, df_estimate.iloc[-10:][col_point], color='black', alpha=0.4)
axes[1].fill_between(df_forecast.index.values, df_forecast[col_down], df_forecast[col_up], color=color, alpha=0.2, label='90% CI')
axes[1].fill_between(df_forecast.index.values, df_forecast['low_80'], df_forecast['high_80'], color=color, alpha=0.4, label='80% CI')
axes[1].fill_between(df_forecast.index.values, df_forecast['low_50'], df_forecast['high_50'], color=color, alpha=0.6, label='50% CI')
axes[1].plot(df_forecast.index.values, df_forecast[col_point], color='black', alpha=0.4)
axes[1].scatter(df_estimate.iloc[-10:].index.values, df_data_fitted.iloc[-10:][col_data], facecolor='black', alpha=0.6, edgecolor='black', s=50)
axes[1].scatter(df_data_preliminary.index.values, df_data_preliminary[col_data], edgecolor='k', facecolor='red', s=50, label='Preliminary data')
for ax in axes:
ax.xaxis.set_major_locator(mdates.MonthLocator())
ax.xaxis.set_major_formatter(mdates.DateFormatter('%b'))
ax.xaxis.set_minor_locator(mdates.DayLocator())
ax.xaxis.set_major_locator(mdates.WeekdayLocator())
ax.xaxis.set_major_locator(mdates.MonthLocator())
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.grid(which='major', axis='y', c='k', alpha=.1, zorder=-2)
ax.yaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.0f}"))
ax.set_ylabel(y_label, size=15)
ax.set_ylim( (y1_l, y_lim_up) )
ax.legend(loc='upper left')
axes[1].xaxis.set_major_locator(mdates.MonthLocator())
axes[1].xaxis.set_major_formatter(mdates.DateFormatter('%d-%b'))
axes[1].xaxis.set_minor_locator(mdates.DayLocator())
axes[1].xaxis.set_minor_formatter(mdates.DateFormatter('%d'))
axes[1].xaxis.set_major_locator(mdates.WeekdayLocator())
axes[1].xaxis.set_major_locator(mdates.MonthLocator())
axes[1].tick_params(which='both', axis='both', labelrotation=90, labelsize=15)
axes[1].grid(which='both', axis='x', c='k', alpha=.1, zorder=-2)
axes[0].grid(which='major', axis='x', c='k', alpha=.1, zorder=-2)
plt.tight_layout()
if path_to_save:
fig.savefig(path_to_save, dpi=300, bbox_inches='tight', transparent=False)
plt.close() | true | true |
f72906cf1c0c23a067d469ea51e3f75a7351ddf8 | 602 | py | Python | arctia/tasks/take.py | unternehmen/arctia | 5c0a9b1933199c09dc7312730ed32c3894bc33ac | [
"Unlicense"
] | 1 | 2018-01-12T15:11:03.000Z | 2018-01-12T15:11:03.000Z | arctia/tasks/take.py | unternehmen/arctia | 5c0a9b1933199c09dc7312730ed32c3894bc33ac | [
"Unlicense"
] | 4 | 2018-02-17T00:20:09.000Z | 2018-06-01T19:49:08.000Z | arctia/tasks/take.py | unternehmen/arctia | 5c0a9b1933199c09dc7312730ed32c3894bc33ac | [
"Unlicense"
] | null | null | null | class Take(object):
def __init__(self, stage, unit, entity,
not_found_proc, finished_proc):
self._stage = stage
self._unit = unit
self._entity = entity
self._finished_proc = finished_proc
self._not_found_proc = not_found_proc
def enact(self):
if not self._entity.location \
or self._entity.location != (self._unit.x, self._unit.y):
self._not_found_proc()
return
self._entity.location = None
self._stage.delete_entity(self._entity)
self._finished_proc()
return
| 30.1 | 68 | 0.604651 | class Take(object):
def __init__(self, stage, unit, entity,
not_found_proc, finished_proc):
self._stage = stage
self._unit = unit
self._entity = entity
self._finished_proc = finished_proc
self._not_found_proc = not_found_proc
def enact(self):
if not self._entity.location \
or self._entity.location != (self._unit.x, self._unit.y):
self._not_found_proc()
return
self._entity.location = None
self._stage.delete_entity(self._entity)
self._finished_proc()
return
| true | true |
f7290817b1b55a787a87349b4e16e983876e4dc9 | 603 | py | Python | chatbotmaker/tests/messenger_test.py | Dominique57/ChatBotMaker | 4d3d27f2a4f4a221eddac63316ddd7b56ab4b5e5 | [
"MIT"
] | null | null | null | chatbotmaker/tests/messenger_test.py | Dominique57/ChatBotMaker | 4d3d27f2a4f4a221eddac63316ddd7b56ab4b5e5 | [
"MIT"
] | null | null | null | chatbotmaker/tests/messenger_test.py | Dominique57/ChatBotMaker | 4d3d27f2a4f4a221eddac63316ddd7b56ab4b5e5 | [
"MIT"
] | null | null | null | from ..messenger import Messenger
from . import pytest
def test_send_message_should_fail():
messenger = Messenger()
with pytest.raises(Exception) as exception_info:
messenger.send("user", "message")
assert "implemented" in str(exception_info)
def test_mark_writing_on_should_do_nothing():
messenger = Messenger()
messenger.mark_writing("user", True)
def test_mark_writing_off_should_do_nothing():
messenger = Messenger()
messenger.mark_writing("user", False)
def test_mark_seen_should_do_nothing():
messenger = Messenger()
messenger.mark_seen("user")
| 24.12 | 52 | 0.742952 | from ..messenger import Messenger
from . import pytest
def test_send_message_should_fail():
messenger = Messenger()
with pytest.raises(Exception) as exception_info:
messenger.send("user", "message")
assert "implemented" in str(exception_info)
def test_mark_writing_on_should_do_nothing():
messenger = Messenger()
messenger.mark_writing("user", True)
def test_mark_writing_off_should_do_nothing():
messenger = Messenger()
messenger.mark_writing("user", False)
def test_mark_seen_should_do_nothing():
messenger = Messenger()
messenger.mark_seen("user")
| true | true |
f729085e094974666a7d52a1effd0459218151e6 | 6,100 | py | Python | qa/rpc-tests/proxy_test.py | listedlinked/newsvn | 2a53c7054fc37dfdab3ecac8e159f9d98823b2b5 | [
"MIT"
] | null | null | null | qa/rpc-tests/proxy_test.py | listedlinked/newsvn | 2a53c7054fc37dfdab3ecac8e159f9d98823b2b5 | [
"MIT"
] | null | null | null | qa/rpc-tests/proxy_test.py | listedlinked/newsvn | 2a53c7054fc37dfdab3ecac8e159f9d98823b2b5 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
# Copyright (c) 2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import socket
import traceback, sys
from binascii import hexlify
import time, os
from socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType
from test_framework import BitcoinTestFramework
from util import *
'''
Test plan:
- Start bitcoind's with different proxy configurations
- Use addnode to initiate connections
- Verify that proxies are connected to, and the right connection command is given
- Proxy configurations to test on bitcoind side:
- `-proxy` (proxy everything)
- `-onion` (proxy just onions)
- `-proxyrandomize` Circuit randomization
- Proxy configurations to test on proxy side,
- support no authentication (other proxy)
- support no authentication + user/pass authentication (Tor)
- proxy on IPv6
- Create various proxies (as threads)
- Create bitcoinds that connect to them
- Manipulate the bitcoinds using addnode (onetry) an observe effects
addnode connect to IPv4
addnode connect to IPv6
addnode connect to onion
addnode connect to generic DNS name
'''
class ProxyTest(BitcoinTestFramework):
def __init__(self):
# Create two proxies on different ports
# ... one unauthenticated
self.conf1 = Socks5Configuration()
self.conf1.addr = ('127.0.0.1', 13000 + (os.getpid() % 1000))
self.conf1.unauth = True
self.conf1.auth = False
# ... one supporting authenticated and unauthenticated (Tor)
self.conf2 = Socks5Configuration()
self.conf2.addr = ('127.0.0.1', 14000 + (os.getpid() % 1000))
self.conf2.unauth = True
self.conf2.auth = True
# ... one on IPv6 with similar configuration
self.conf3 = Socks5Configuration()
self.conf3.af = socket.AF_INET6
self.conf3.addr = ('::1', 15000 + (os.getpid() % 1000))
self.conf3.unauth = True
self.conf3.auth = True
self.serv1 = Socks5Server(self.conf1)
self.serv1.start()
self.serv2 = Socks5Server(self.conf2)
self.serv2.start()
self.serv3 = Socks5Server(self.conf3)
self.serv3.start()
def setup_nodes(self):
# Note: proxies are not used to connect to local nodes
# this is because the proxy to use is based on CService.GetNetwork(), which return NET_UNROUTABLE for localhost
return start_nodes(4, self.options.tmpdir, extra_args=[
['-listen', '-debug=net', '-debug=proxy', '-proxy=%s:%i' % (self.conf1.addr),'-proxyrandomize=1'],
['-listen', '-debug=net', '-debug=proxy', '-proxy=%s:%i' % (self.conf1.addr),'-onion=%s:%i' % (self.conf2.addr),'-proxyrandomize=0'],
['-listen', '-debug=net', '-debug=proxy', '-proxy=%s:%i' % (self.conf2.addr),'-proxyrandomize=1'],
['-listen', '-debug=net', '-debug=proxy', '-proxy=[%s]:%i' % (self.conf3.addr),'-proxyrandomize=0']
])
def node_test(self, node, proxies, auth):
rv = []
# Test: outgoing IPv4 connection through node
node.addnode("15.61.23.23:1234", "onetry")
cmd = proxies[0].queue.get()
assert(isinstance(cmd, Socks5Command))
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, "15.61.23.23")
assert_equal(cmd.port, 1234)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
# Test: outgoing IPv6 connection through node
node.addnode("[1233:3432:2434:2343:3234:2345:6546:4534]:5443", "onetry")
cmd = proxies[1].queue.get()
assert(isinstance(cmd, Socks5Command))
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, "1233:3432:2434:2343:3234:2345:6546:4534")
assert_equal(cmd.port, 5443)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
# Test: outgoing onion connection through node
node.addnode("newsvnvj7kcklujarx.onion:50020", "onetry")
cmd = proxies[2].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, "newsvnvj7kcklujarx.onion")
assert_equal(cmd.port, 50020)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
# Test: outgoing DNS name connection through node
node.addnode("node.noumenon:8333", "onetry")
cmd = proxies[3].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, "node.noumenon")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
return rv
def run_test(self):
# basic -proxy
self.node_test(self.nodes[0], [self.serv1, self.serv1, self.serv1, self.serv1], False)
# -proxy plus -onion
self.node_test(self.nodes[1], [self.serv1, self.serv1, self.serv2, self.serv1], False)
# -proxy plus -onion, -proxyrandomize
rv = self.node_test(self.nodes[2], [self.serv2, self.serv2, self.serv2, self.serv2], True)
# Check that credentials as used for -proxyrandomize connections are unique
credentials = set((x.username,x.password) for x in rv)
assert_equal(len(credentials), 4)
# proxy on IPv6 localhost
self.node_test(self.nodes[3], [self.serv3, self.serv3, self.serv3, self.serv3], False)
if __name__ == '__main__':
ProxyTest().main()
| 41.780822 | 145 | 0.653115 |
import socket
import traceback, sys
from binascii import hexlify
import time, os
from socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType
from test_framework import BitcoinTestFramework
from util import *
class ProxyTest(BitcoinTestFramework):
def __init__(self):
self.conf1 = Socks5Configuration()
self.conf1.addr = ('127.0.0.1', 13000 + (os.getpid() % 1000))
self.conf1.unauth = True
self.conf1.auth = False
self.conf2 = Socks5Configuration()
self.conf2.addr = ('127.0.0.1', 14000 + (os.getpid() % 1000))
self.conf2.unauth = True
self.conf2.auth = True
self.conf3 = Socks5Configuration()
self.conf3.af = socket.AF_INET6
self.conf3.addr = ('::1', 15000 + (os.getpid() % 1000))
self.conf3.unauth = True
self.conf3.auth = True
self.serv1 = Socks5Server(self.conf1)
self.serv1.start()
self.serv2 = Socks5Server(self.conf2)
self.serv2.start()
self.serv3 = Socks5Server(self.conf3)
self.serv3.start()
def setup_nodes(self):
return start_nodes(4, self.options.tmpdir, extra_args=[
['-listen', '-debug=net', '-debug=proxy', '-proxy=%s:%i' % (self.conf1.addr),'-proxyrandomize=1'],
['-listen', '-debug=net', '-debug=proxy', '-proxy=%s:%i' % (self.conf1.addr),'-onion=%s:%i' % (self.conf2.addr),'-proxyrandomize=0'],
['-listen', '-debug=net', '-debug=proxy', '-proxy=%s:%i' % (self.conf2.addr),'-proxyrandomize=1'],
['-listen', '-debug=net', '-debug=proxy', '-proxy=[%s]:%i' % (self.conf3.addr),'-proxyrandomize=0']
])
def node_test(self, node, proxies, auth):
rv = []
node.addnode("15.61.23.23:1234", "onetry")
cmd = proxies[0].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, "15.61.23.23")
assert_equal(cmd.port, 1234)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
# Test: outgoing IPv6 connection through node
node.addnode("[1233:3432:2434:2343:3234:2345:6546:4534]:5443", "onetry")
cmd = proxies[1].queue.get()
assert(isinstance(cmd, Socks5Command))
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, "1233:3432:2434:2343:3234:2345:6546:4534")
assert_equal(cmd.port, 5443)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
node.addnode("newsvnvj7kcklujarx.onion:50020", "onetry")
cmd = proxies[2].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, "newsvnvj7kcklujarx.onion")
assert_equal(cmd.port, 50020)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
node.addnode("node.noumenon:8333", "onetry")
cmd = proxies[3].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, "node.noumenon")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
return rv
def run_test(self):
self.node_test(self.nodes[0], [self.serv1, self.serv1, self.serv1, self.serv1], False)
self.node_test(self.nodes[1], [self.serv1, self.serv1, self.serv2, self.serv1], False)
rv = self.node_test(self.nodes[2], [self.serv2, self.serv2, self.serv2, self.serv2], True)
credentials = set((x.username,x.password) for x in rv)
assert_equal(len(credentials), 4)
self.node_test(self.nodes[3], [self.serv3, self.serv3, self.serv3, self.serv3], False)
if __name__ == '__main__':
ProxyTest().main()
| true | true |
f729091a3f57c75215b2d30ff6f1aa0a14f519d6 | 4,080 | py | Python | psono/restapi/views/membership.py | dirigeant/psono-server | a18c5b3c4d8bbbe4ecf1615b210d99fb77752205 | [
"Apache-2.0",
"CC0-1.0"
] | 48 | 2018-04-19T15:50:58.000Z | 2022-01-23T15:58:11.000Z | psono/restapi/views/membership.py | dirigeant/psono-server | a18c5b3c4d8bbbe4ecf1615b210d99fb77752205 | [
"Apache-2.0",
"CC0-1.0"
] | 9 | 2018-09-13T14:56:18.000Z | 2020-01-17T16:44:33.000Z | psono/restapi/views/membership.py | dirigeant/psono-server | a18c5b3c4d8bbbe4ecf1615b210d99fb77752205 | [
"Apache-2.0",
"CC0-1.0"
] | 11 | 2019-09-20T11:53:47.000Z | 2021-07-18T22:41:31.000Z | from rest_framework import status
from rest_framework.response import Response
from rest_framework.generics import GenericAPIView
from ..permissions import IsAuthenticated
from django.core.cache import cache
from django.conf import settings
from ..app_settings import (
CreateMembershipSerializer,
UpdateMembershipSerializer,
DeleteMembershipSerializer,
)
from ..models import (
User_Group_Membership
)
from ..authentication import TokenAuthentication
class MembershipView(GenericAPIView):
"""
Manages group memberships
"""
authentication_classes = (TokenAuthentication, )
permission_classes = (IsAuthenticated,)
allowed_methods = ('PUT', 'POST', 'DELETE', 'OPTIONS', 'HEAD')
def get(self, request, *args, **kwargs):
return Response({}, status=status.HTTP_405_METHOD_NOT_ALLOWED)
def put(self, request, *args, **kwargs):
"""
Creates a new group membership
:param request:
:type request:
:param args:
:type args:
:param kwargs:
:type kwargs:
:return: 201 / 400
:rtype:
"""
serializer = CreateMembershipSerializer(data=request.data, context=self.get_serializer_context())
if not serializer.is_valid():
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
membership = User_Group_Membership.objects.create(
user_id = serializer.validated_data['user_id'],
group_id = serializer.validated_data['group_id'],
creator = request.user,
secret_key = str(serializer.validated_data['secret_key']),
secret_key_nonce = str(serializer.validated_data['secret_key_nonce']),
secret_key_type = str(serializer.validated_data['secret_key_type']),
private_key = str(serializer.validated_data['private_key']),
private_key_nonce = str(serializer.validated_data['private_key_nonce']),
private_key_type = str(serializer.validated_data['private_key_type']),
group_admin = serializer.validated_data['group_admin'],
share_admin = serializer.validated_data['share_admin'],
)
if settings.CACHE_ENABLE:
cache_key = 'psono_user_status_' + str(serializer.validated_data['user_id'])
cache.delete(cache_key)
return Response({'membership_id': membership.id}, status=status.HTTP_201_CREATED)
def post(self, request, *args, **kwargs):
"""
Updates a group membership
:param request:
:type request:
:param args:
:type args:
:param kwargs:
:type kwargs:
:return: 200 / 400
:rtype:
"""
serializer = UpdateMembershipSerializer(data=request.data, context=self.get_serializer_context())
if not serializer.is_valid():
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
membership = serializer.validated_data['membership']
membership.group_admin = serializer.validated_data['group_admin']
membership.share_admin = serializer.validated_data['share_admin']
membership.save()
return Response(status=status.HTTP_200_OK)
def delete(self, request, *args, **kwargs):
"""
Deletes a group membership
:param request:
:param args:
:param kwargs:
:return: 200 / 400
"""
serializer = DeleteMembershipSerializer(data=request.data, context=self.get_serializer_context())
if not serializer.is_valid():
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
membership = serializer.validated_data.get('membership')
if settings.CACHE_ENABLE:
cache_key = 'psono_user_status_' + str(membership.user.id)
cache.delete(cache_key)
# delete it
membership.delete()
return Response(status=status.HTTP_200_OK)
| 30.909091 | 105 | 0.647059 | from rest_framework import status
from rest_framework.response import Response
from rest_framework.generics import GenericAPIView
from ..permissions import IsAuthenticated
from django.core.cache import cache
from django.conf import settings
from ..app_settings import (
CreateMembershipSerializer,
UpdateMembershipSerializer,
DeleteMembershipSerializer,
)
from ..models import (
User_Group_Membership
)
from ..authentication import TokenAuthentication
class MembershipView(GenericAPIView):
authentication_classes = (TokenAuthentication, )
permission_classes = (IsAuthenticated,)
allowed_methods = ('PUT', 'POST', 'DELETE', 'OPTIONS', 'HEAD')
def get(self, request, *args, **kwargs):
return Response({}, status=status.HTTP_405_METHOD_NOT_ALLOWED)
def put(self, request, *args, **kwargs):
serializer = CreateMembershipSerializer(data=request.data, context=self.get_serializer_context())
if not serializer.is_valid():
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
membership = User_Group_Membership.objects.create(
user_id = serializer.validated_data['user_id'],
group_id = serializer.validated_data['group_id'],
creator = request.user,
secret_key = str(serializer.validated_data['secret_key']),
secret_key_nonce = str(serializer.validated_data['secret_key_nonce']),
secret_key_type = str(serializer.validated_data['secret_key_type']),
private_key = str(serializer.validated_data['private_key']),
private_key_nonce = str(serializer.validated_data['private_key_nonce']),
private_key_type = str(serializer.validated_data['private_key_type']),
group_admin = serializer.validated_data['group_admin'],
share_admin = serializer.validated_data['share_admin'],
)
if settings.CACHE_ENABLE:
cache_key = 'psono_user_status_' + str(serializer.validated_data['user_id'])
cache.delete(cache_key)
return Response({'membership_id': membership.id}, status=status.HTTP_201_CREATED)
def post(self, request, *args, **kwargs):
serializer = UpdateMembershipSerializer(data=request.data, context=self.get_serializer_context())
if not serializer.is_valid():
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
membership = serializer.validated_data['membership']
membership.group_admin = serializer.validated_data['group_admin']
membership.share_admin = serializer.validated_data['share_admin']
membership.save()
return Response(status=status.HTTP_200_OK)
def delete(self, request, *args, **kwargs):
serializer = DeleteMembershipSerializer(data=request.data, context=self.get_serializer_context())
if not serializer.is_valid():
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
membership = serializer.validated_data.get('membership')
if settings.CACHE_ENABLE:
cache_key = 'psono_user_status_' + str(membership.user.id)
cache.delete(cache_key)
membership.delete()
return Response(status=status.HTTP_200_OK)
| true | true |
f7290948c42d8e7aa37006897fa91645c005e290 | 339 | py | Python | ironsms/exceptions/limits.py | viuipan/ironsmslib | 0d494ea08a6bcdd0f11f32e88baccd3555b5a0b3 | [
"MIT"
] | null | null | null | ironsms/exceptions/limits.py | viuipan/ironsmslib | 0d494ea08a6bcdd0f11f32e88baccd3555b5a0b3 | [
"MIT"
] | null | null | null | ironsms/exceptions/limits.py | viuipan/ironsmslib | 0d494ea08a6bcdd0f11f32e88baccd3555b5a0b3 | [
"MIT"
] | null | null | null | from .base import APIException
class LimitActiveException(APIException):
message = "LIMIT_ACTIVE"
description = "number activation has already been completed"
limit: int
def __init__(self, response):
self.limit = response['limit']
super(LimitActiveException, self).__init__(description=self.description)
| 28.25 | 80 | 0.731563 | from .base import APIException
class LimitActiveException(APIException):
message = "LIMIT_ACTIVE"
description = "number activation has already been completed"
limit: int
def __init__(self, response):
self.limit = response['limit']
super(LimitActiveException, self).__init__(description=self.description)
| true | true |
f72909bddee715522f021c82b50fd72ecc6d200a | 1,803 | py | Python | tests/outputs/test_standard_output.py | AustinScola/illud | a6aca1de38bbe9d5a795aaa084bcbd6731767d18 | [
"MIT"
] | 1 | 2020-12-05T00:59:15.000Z | 2020-12-05T00:59:15.000Z | tests/outputs/test_standard_output.py | AustinScola/illud | a6aca1de38bbe9d5a795aaa084bcbd6731767d18 | [
"MIT"
] | 112 | 2021-01-15T21:42:27.000Z | 2021-04-17T19:11:21.000Z | tests/outputs/test_standard_output.py | AustinScola/illud | a6aca1de38bbe9d5a795aaa084bcbd6731767d18 | [
"MIT"
] | null | null | null | """Test illud.outputs.standard_output."""
import sys
from typing import Any
from unittest.mock import MagicMock, patch
import pytest
from illud.output import Output
from illud.outputs.standard_output import StandardOutput
def test_inheritance() -> None:
"""Test illud.outputs.standard_output.StandardOutput inheritance."""
assert issubclass(StandardOutput, Output)
def test_init() -> None:
"""Test illud.outputs.standard_output.StandardOutput.__init__."""
standard_output: StandardOutput = StandardOutput()
assert standard_output._stdout == sys.stdout # pylint: disable=protected-access
# yapf: disable
@pytest.mark.parametrize('standard_output, other, expected_equality', [
(StandardOutput(), 'foo', False),
(StandardOutput(), StandardOutput(), True),
])
# yapf: enable
def test_eq(standard_output: StandardOutput, other: Any, expected_equality: bool) -> None:
"""Test illud.outputs.standard_output.StandardOutput.__eq__."""
equality: bool = standard_output == other
assert equality == expected_equality
# yapf: disable
@pytest.mark.parametrize('string', [
(''),
('foo'),
])
# yapf: enable
def test_write(string: str) -> None:
"""Test illud.outputs.standard_output.StandardOutput.write."""
stdout_mock = MagicMock(sys.stdout)
with patch('sys.stdout', stdout_mock):
standard_output: StandardOutput = StandardOutput()
standard_output.write(string)
stdout_mock.write.assert_called_once_with(string)
def test_flush() -> None:
"""Test illud.outputs.standard_output.StandardOutput.flush."""
stdout_mock = MagicMock(sys.stdout)
with patch('sys.stdout', stdout_mock):
standard_output: StandardOutput = StandardOutput()
standard_output.flush()
stdout_mock.flush.assert_called_once_with()
| 28.619048 | 90 | 0.733222 | import sys
from typing import Any
from unittest.mock import MagicMock, patch
import pytest
from illud.output import Output
from illud.outputs.standard_output import StandardOutput
def test_inheritance() -> None:
assert issubclass(StandardOutput, Output)
def test_init() -> None:
standard_output: StandardOutput = StandardOutput()
assert standard_output._stdout == sys.stdout
@pytest.mark.parametrize('standard_output, other, expected_equality', [
(StandardOutput(), 'foo', False),
(StandardOutput(), StandardOutput(), True),
])
def test_eq(standard_output: StandardOutput, other: Any, expected_equality: bool) -> None:
equality: bool = standard_output == other
assert equality == expected_equality
@pytest.mark.parametrize('string', [
(''),
('foo'),
])
def test_write(string: str) -> None:
stdout_mock = MagicMock(sys.stdout)
with patch('sys.stdout', stdout_mock):
standard_output: StandardOutput = StandardOutput()
standard_output.write(string)
stdout_mock.write.assert_called_once_with(string)
def test_flush() -> None:
stdout_mock = MagicMock(sys.stdout)
with patch('sys.stdout', stdout_mock):
standard_output: StandardOutput = StandardOutput()
standard_output.flush()
stdout_mock.flush.assert_called_once_with()
| true | true |
f72909d51f6781094ad2796f88e7cadc34ab5a20 | 438 | py | Python | Entrenamiento Algoritmico/listas.py | andersonvelasco/Programming-2020B | bd843a406353b55ca83b9684c394aec1556aaddc | [
"Apache-2.0"
] | null | null | null | Entrenamiento Algoritmico/listas.py | andersonvelasco/Programming-2020B | bd843a406353b55ca83b9684c394aec1556aaddc | [
"Apache-2.0"
] | null | null | null | Entrenamiento Algoritmico/listas.py | andersonvelasco/Programming-2020B | bd843a406353b55ca83b9684c394aec1556aaddc | [
"Apache-2.0"
] | null | null | null | #Packages
import os
os.system("cls")
lista=[]
#Functions
def llenar_lista(x):
lista.append(x)
#def validacion_lista():
# print("Validación :")
#def mostrar_lista():
# print("Mostrar")
#Main
num=int(input("Ingrese No: "))
op=int(input("::::Desea Agregar un nuevo Número a la Lista : \n1. Si\n2. No : "))
if op =="s" or op =="S" or op == "1":
llenar_lista(num)
else:
print("opción incorrecta")
# validacion_lista()
| 19.043478 | 81 | 0.63242 |
import os
os.system("cls")
lista=[]
def llenar_lista(x):
lista.append(x)
num=int(input("Ingrese No: "))
op=int(input("::::Desea Agregar un nuevo Número a la Lista : \n1. Si\n2. No : "))
if op =="s" or op =="S" or op == "1":
llenar_lista(num)
else:
print("opción incorrecta")
| true | true |
f7290b4c53116bdaf469ce7678a80eb24c95ee72 | 6,490 | py | Python | simple_history/utils.py | felixschloesser/django-simple-history | 28abacb8a776fbaffcf0a42432a6a88be3561a86 | [
"BSD-3-Clause"
] | 2 | 2021-03-26T09:20:05.000Z | 2021-05-26T13:46:48.000Z | simple_history/utils.py | felixschloesser/django-simple-history | 28abacb8a776fbaffcf0a42432a6a88be3561a86 | [
"BSD-3-Clause"
] | 55 | 2020-12-25T06:47:15.000Z | 2022-03-28T20:06:13.000Z | simple_history/utils.py | hramezani/django-simple-history | 32645206749a1cc68539d9ad6499f1a938b2c9f4 | [
"BSD-3-Clause"
] | null | null | null | import warnings
import django
from django.db import transaction
from django.db.models import ManyToManyField
from django.forms.models import model_to_dict
from simple_history.exceptions import AlternativeManagerError, NotHistoricalModelError
def update_change_reason(instance, reason):
attrs = {}
model = type(instance)
manager = instance if instance.id is not None else model
history = get_history_manager_for_model(manager)
history_fields = [field.attname for field in history.model._meta.fields]
for field in instance._meta.fields:
if field.attname not in history_fields:
continue
value = getattr(instance, field.attname)
if field.primary_key is True:
if value is not None:
attrs[field.attname] = value
else:
attrs[field.attname] = value
record = history.filter(**attrs).order_by("-history_date").first()
record.history_change_reason = reason
record.save()
def get_history_manager_for_model(model):
"""Return the history manager for a given app model."""
try:
manager_name = model._meta.simple_history_manager_attribute
except AttributeError:
raise NotHistoricalModelError(
"Cannot find a historical model for {model}.".format(model=model)
)
return getattr(model, manager_name)
def get_history_model_for_model(model):
"""Return the history model for a given app model."""
return get_history_manager_for_model(model).model
def bulk_create_with_history(
objs,
model,
batch_size=None,
ignore_conflicts=False,
default_user=None,
default_change_reason=None,
default_date=None,
):
"""
Bulk create the objects specified by objs while also bulk creating
their history (all in one transaction).
Because of not providing primary key attribute after bulk_create on any DB except
Postgres (https://docs.djangoproject.com/en/2.2/ref/models/querysets/#bulk-create)
Divide this process on two transactions for other DB's
:param objs: List of objs (not yet saved to the db) of type model
:param model: Model class that should be created
:param batch_size: Number of objects that should be created in each batch
:param default_user: Optional user to specify as the history_user in each historical
record
:param default_change_reason: Optional change reason to specify as the change_reason
in each historical record
:param default_date: Optional date to specify as the history_date in each historical
record
:return: List of objs with IDs
"""
# Exclude ManyToManyFields because they end up as invalid kwargs to
# model.objects.filter(...) below.
exclude_fields = [
field.name
for field in model._meta.get_fields()
if isinstance(field, ManyToManyField)
]
history_manager = get_history_manager_for_model(model)
model_manager = model._default_manager
second_transaction_required = True
with transaction.atomic(savepoint=False):
objs_with_id = model_manager.bulk_create(
objs, batch_size=batch_size, ignore_conflicts=ignore_conflicts
)
if objs_with_id and objs_with_id[0].pk and not ignore_conflicts:
second_transaction_required = False
history_manager.bulk_history_create(
objs_with_id,
batch_size=batch_size,
default_user=default_user,
default_change_reason=default_change_reason,
default_date=default_date,
)
if second_transaction_required:
obj_list = []
with transaction.atomic(savepoint=False):
for obj in objs_with_id:
attributes = dict(
filter(
lambda x: x[1] is not None,
model_to_dict(obj, exclude=exclude_fields).items(),
)
)
obj_list += model_manager.filter(**attributes)
history_manager.bulk_history_create(
obj_list,
batch_size=batch_size,
default_user=default_user,
default_change_reason=default_change_reason,
default_date=default_date,
)
objs_with_id = obj_list
return objs_with_id
def bulk_update_with_history(
objs,
model,
fields,
batch_size=None,
default_user=None,
default_change_reason=None,
default_date=None,
manager=None,
):
"""
Bulk update the objects specified by objs while also bulk creating
their history (all in one transaction).
:param objs: List of objs of type model to be updated
:param model: Model class that should be updated
:param fields: The fields that are updated
:param batch_size: Number of objects that should be updated in each batch
:param default_user: Optional user to specify as the history_user in each historical
record
:param default_change_reason: Optional change reason to specify as the change_reason
in each historical record
:param default_date: Optional date to specify as the history_date in each historical
record
:param manager: Optional model manager to use for the model instead of the default
manager
"""
history_manager = get_history_manager_for_model(model)
model_manager = manager or model._default_manager
if model_manager.model is not model:
raise AlternativeManagerError("The given manager does not belong to the model.")
with transaction.atomic(savepoint=False):
model_manager.bulk_update(objs, fields, batch_size=batch_size)
history_manager.bulk_history_create(
objs,
batch_size=batch_size,
update=True,
default_user=default_user,
default_change_reason=default_change_reason,
default_date=default_date,
)
def get_change_reason_from_object(obj):
if hasattr(obj, "_change_reason"):
return getattr(obj, "_change_reason")
if hasattr(obj, "changeReason"):
warning_msg = (
"Using the attr changeReason to populate history_change_reason is"
" deprecated in 2.10.0 and will be removed in 3.0.0. Use "
"_change_reason instead. "
)
warnings.warn(warning_msg, DeprecationWarning)
return getattr(obj, "changeReason")
return None
| 36.666667 | 88 | 0.679045 | import warnings
import django
from django.db import transaction
from django.db.models import ManyToManyField
from django.forms.models import model_to_dict
from simple_history.exceptions import AlternativeManagerError, NotHistoricalModelError
def update_change_reason(instance, reason):
attrs = {}
model = type(instance)
manager = instance if instance.id is not None else model
history = get_history_manager_for_model(manager)
history_fields = [field.attname for field in history.model._meta.fields]
for field in instance._meta.fields:
if field.attname not in history_fields:
continue
value = getattr(instance, field.attname)
if field.primary_key is True:
if value is not None:
attrs[field.attname] = value
else:
attrs[field.attname] = value
record = history.filter(**attrs).order_by("-history_date").first()
record.history_change_reason = reason
record.save()
def get_history_manager_for_model(model):
try:
manager_name = model._meta.simple_history_manager_attribute
except AttributeError:
raise NotHistoricalModelError(
"Cannot find a historical model for {model}.".format(model=model)
)
return getattr(model, manager_name)
def get_history_model_for_model(model):
return get_history_manager_for_model(model).model
def bulk_create_with_history(
objs,
model,
batch_size=None,
ignore_conflicts=False,
default_user=None,
default_change_reason=None,
default_date=None,
):
exclude_fields = [
field.name
for field in model._meta.get_fields()
if isinstance(field, ManyToManyField)
]
history_manager = get_history_manager_for_model(model)
model_manager = model._default_manager
second_transaction_required = True
with transaction.atomic(savepoint=False):
objs_with_id = model_manager.bulk_create(
objs, batch_size=batch_size, ignore_conflicts=ignore_conflicts
)
if objs_with_id and objs_with_id[0].pk and not ignore_conflicts:
second_transaction_required = False
history_manager.bulk_history_create(
objs_with_id,
batch_size=batch_size,
default_user=default_user,
default_change_reason=default_change_reason,
default_date=default_date,
)
if second_transaction_required:
obj_list = []
with transaction.atomic(savepoint=False):
for obj in objs_with_id:
attributes = dict(
filter(
lambda x: x[1] is not None,
model_to_dict(obj, exclude=exclude_fields).items(),
)
)
obj_list += model_manager.filter(**attributes)
history_manager.bulk_history_create(
obj_list,
batch_size=batch_size,
default_user=default_user,
default_change_reason=default_change_reason,
default_date=default_date,
)
objs_with_id = obj_list
return objs_with_id
def bulk_update_with_history(
objs,
model,
fields,
batch_size=None,
default_user=None,
default_change_reason=None,
default_date=None,
manager=None,
):
history_manager = get_history_manager_for_model(model)
model_manager = manager or model._default_manager
if model_manager.model is not model:
raise AlternativeManagerError("The given manager does not belong to the model.")
with transaction.atomic(savepoint=False):
model_manager.bulk_update(objs, fields, batch_size=batch_size)
history_manager.bulk_history_create(
objs,
batch_size=batch_size,
update=True,
default_user=default_user,
default_change_reason=default_change_reason,
default_date=default_date,
)
def get_change_reason_from_object(obj):
if hasattr(obj, "_change_reason"):
return getattr(obj, "_change_reason")
if hasattr(obj, "changeReason"):
warning_msg = (
"Using the attr changeReason to populate history_change_reason is"
" deprecated in 2.10.0 and will be removed in 3.0.0. Use "
"_change_reason instead. "
)
warnings.warn(warning_msg, DeprecationWarning)
return getattr(obj, "changeReason")
return None
| true | true |
f7290c2a98dd473c3692b518839a458e52199318 | 24,669 | py | Python | src/sage/plot/arrow.py | rekhabiswal/sage | e8633b09919542a65e7e990c8369fee30c7edefd | [
"BSL-1.0"
] | null | null | null | src/sage/plot/arrow.py | rekhabiswal/sage | e8633b09919542a65e7e990c8369fee30c7edefd | [
"BSL-1.0"
] | null | null | null | src/sage/plot/arrow.py | rekhabiswal/sage | e8633b09919542a65e7e990c8369fee30c7edefd | [
"BSL-1.0"
] | null | null | null | """
Arrows
"""
#*****************************************************************************
# Copyright (C) 2006 Alex Clemesha <clemesha@gmail.com>,
# William Stein <wstein@gmail.com>,
# 2008 Mike Hansen <mhansen@gmail.com>,
# 2009 Emily Kirkman
#
# Distributed under the terms of the GNU General Public License (GPL)
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# The full text of the GPL is available at:
#
# http://www.gnu.org/licenses/
#*****************************************************************************
from sage.plot.primitive import GraphicPrimitive
from sage.misc.decorators import options, rename_keyword
from sage.plot.colors import to_mpl_color
class CurveArrow(GraphicPrimitive):
def __init__(self, path, options):
"""
Returns an arrow graphics primitive along the provided path (bezier curve).
EXAMPLES::
sage: from sage.plot.arrow import CurveArrow
sage: b = CurveArrow(path=[[(0,0),(.5,.5),(1,0)],[(.5,1),(0,0)]],
....: options={})
sage: b
CurveArrow from (0, 0) to (0, 0)
"""
import numpy as np
self.path = path
codes = [1] + (len(self.path[0])-1)*[len(self.path[0])]
vertices = self.path[0]
for curve in self.path[1:]:
vertices += curve
codes += (len(curve))*[len(curve)+1]
self.codes = codes
self.vertices = np.array(vertices, np.float)
GraphicPrimitive.__init__(self, options)
def get_minmax_data(self):
"""
Returns a dictionary with the bounding box data.
EXAMPLES::
sage: from sage.plot.arrow import CurveArrow
sage: b = CurveArrow(path=[[(0,0),(.5,.5),(1,0)],[(.5,1),(0,0)]],
....: options={})
sage: d = b.get_minmax_data()
sage: d['xmin']
0.0
sage: d['xmax']
1.0
"""
return {'xmin': self.vertices[:,0].min(),
'xmax': self.vertices[:,0].max(),
'ymin': self.vertices[:,1].min(),
'ymax': self.vertices[:,1].max()}
def _allowed_options(self):
"""
Return the dictionary of allowed options for the curve arrow graphics
primitive.
EXAMPLES::
sage: from sage.plot.arrow import CurveArrow
sage: list(sorted(CurveArrow(path=[[(0,0),(2,3)]],options={})._allowed_options().items()))
[('arrowsize', 'The size of the arrowhead'),
('arrowstyle', 'todo'),
('head', '2-d only: Which end of the path to draw the head (one of 0 (start), 1 (end) or 2 (both)'),
('hue', 'The color given as a hue.'),
('legend_color', 'The color of the legend text.'),
('legend_label', 'The label for this item in the legend.'),
('linestyle', "2d only: The style of the line, which is one of
'dashed', 'dotted', 'solid', 'dashdot', or '--', ':', '-', '-.',
respectively."),
('rgbcolor', 'The color as an RGB tuple.'),
('thickness', 'The thickness of the arrow.'),
('width', 'The width of the shaft of the arrow, in points.'),
('zorder', '2-d only: The layer level in which to draw')]
"""
return {'width': 'The width of the shaft of the arrow, in points.',
'rgbcolor': 'The color as an RGB tuple.',
'hue': 'The color given as a hue.',
'legend_label': 'The label for this item in the legend.',
'legend_color': 'The color of the legend text.',
'arrowstyle': 'todo',
'arrowsize': 'The size of the arrowhead',
'thickness': 'The thickness of the arrow.',
'zorder': '2-d only: The layer level in which to draw',
'head': '2-d only: Which end of the path to draw the head (one of 0 (start), 1 (end) or 2 (both)',
'linestyle': "2d only: The style of the line, which is one of "
"'dashed', 'dotted', 'solid', 'dashdot', or '--', ':', '-', '-.', "
"respectively."}
def _repr_(self):
"""
Text representation of an arrow graphics primitive.
EXAMPLES::
sage: from sage.plot.arrow import CurveArrow
sage: CurveArrow(path=[[(0,0),(1,4),(2,3)]],options={})._repr_()
'CurveArrow from (0, 0) to (2, 3)'
"""
return "CurveArrow from %s to %s" % (self.path[0][0], self.path[-1][-1])
def _render_on_subplot(self, subplot):
"""
Render this arrow in a subplot. This is the key function that
defines how this arrow graphics primitive is rendered in
matplotlib's library.
EXAMPLES::
This function implicitly ends up rendering this arrow on a matplotlib
subplot:
sage: arrow(path=[[(0,1), (2,-1), (4,5)]])
Graphics object consisting of 1 graphics primitive
"""
from sage.plot.misc import get_matplotlib_linestyle
options = self.options()
width = float(options['width'])
head = options.pop('head')
if head == 0: style = '<|-'
elif head == 1: style = '-|>'
elif head == 2: style = '<|-|>'
else: raise KeyError('head parameter must be one of 0 (start), 1 (end) or 2 (both).')
arrowsize = float(options.get('arrowsize', 5))
head_width = arrowsize
head_length = arrowsize * 2.0
color = to_mpl_color(options['rgbcolor'])
from matplotlib.patches import FancyArrowPatch
from matplotlib.path import Path
bpath = Path(self.vertices, self.codes)
p = FancyArrowPatch(path=bpath,
lw=width, arrowstyle='%s,head_width=%s,head_length=%s' % (style, head_width, head_length),
fc=color, ec=color,
linestyle=get_matplotlib_linestyle(options['linestyle'], return_type='long'))
p.set_zorder(options['zorder'])
p.set_label(options['legend_label'])
subplot.add_patch(p)
return p
class Arrow(GraphicPrimitive):
"""
Primitive class that initializes the (line) arrow graphics type
EXAMPLES:
We create an arrow graphics object, then take the 0th entry
in it to get the actual Arrow graphics primitive::
sage: P = arrow((0,1), (2,3))[0]
sage: type(P)
<class 'sage.plot.arrow.Arrow'>
sage: P
Arrow from (0.0,1.0) to (2.0,3.0)
"""
def __init__(self, xtail, ytail, xhead, yhead, options):
"""
Create an arrow graphics primitive.
EXAMPLES::
sage: from sage.plot.arrow import Arrow
sage: Arrow(0,0,2,3,{})
Arrow from (0.0,0.0) to (2.0,3.0)
"""
self.xtail = float(xtail)
self.xhead = float(xhead)
self.ytail = float(ytail)
self.yhead = float(yhead)
GraphicPrimitive.__init__(self, options)
def get_minmax_data(self):
"""
Returns a bounding box for this arrow.
EXAMPLES::
sage: d = arrow((1,1), (5,5)).get_minmax_data()
sage: d['xmin']
1.0
sage: d['xmax']
5.0
"""
return {'xmin': min(self.xtail, self.xhead),
'xmax': max(self.xtail, self.xhead),
'ymin': min(self.ytail, self.yhead),
'ymax': max(self.ytail, self.yhead)}
def _allowed_options(self):
"""
Return the dictionary of allowed options for the line arrow graphics
primitive.
EXAMPLES::
sage: from sage.plot.arrow import Arrow
sage: list(sorted(Arrow(0,0,2,3,{})._allowed_options().items()))
[('arrowshorten', 'The length in points to shorten the arrow.'),
('arrowsize', 'The size of the arrowhead'),
('head',
'2-d only: Which end of the path to draw the head (one of 0 (start), 1 (end) or 2 (both)'),
('hue', 'The color given as a hue.'),
('legend_color', 'The color of the legend text.'),
('legend_label', 'The label for this item in the legend.'),
('linestyle',
"2d only: The style of the line, which is one of 'dashed',
'dotted', 'solid', 'dashdot', or '--', ':', '-', '-.',
respectively."),
('rgbcolor', 'The color as an RGB tuple.'),
('thickness', 'The thickness of the arrow.'),
('width', 'The width of the shaft of the arrow, in points.'),
('zorder', '2-d only: The layer level in which to draw')]
"""
return {'width': 'The width of the shaft of the arrow, in points.',
'rgbcolor': 'The color as an RGB tuple.',
'hue': 'The color given as a hue.',
'arrowshorten': 'The length in points to shorten the arrow.',
'arrowsize': 'The size of the arrowhead',
'thickness': 'The thickness of the arrow.',
'legend_label': 'The label for this item in the legend.',
'legend_color': 'The color of the legend text.',
'zorder': '2-d only: The layer level in which to draw',
'head': '2-d only: Which end of the path to draw the head (one of 0 (start), 1 (end) or 2 (both)',
'linestyle': "2d only: The style of the line, which is one of "
"'dashed', 'dotted', 'solid', 'dashdot', or '--', ':', '-', '-.', "
"respectively."}
def _plot3d_options(self, options=None):
"""
Translate 2D plot options into 3D plot options.
EXAMPLES::
sage: P = arrow((0,1), (2,3), width=5)
sage: p=P[0]; p
Arrow from (0.0,1.0) to (2.0,3.0)
sage: q=p.plot3d()
sage: q.thickness
5
"""
if options is None:
options = self.options()
options = dict(self.options())
options_3d = {}
if 'width' in options:
options_3d['thickness'] = options['width']
del options['width']
# ignore zorder and head in 3d plotting
if 'zorder' in options:
del options['zorder']
if 'head' in options:
del options['head']
if 'linestyle' in options:
del options['linestyle']
options_3d.update(GraphicPrimitive._plot3d_options(self, options))
return options_3d
def plot3d(self, ztail=0, zhead=0, **kwds):
"""
Takes 2D plot and places it in 3D.
EXAMPLES::
sage: A = arrow((0,0),(1,1))[0].plot3d()
sage: A.jmol_repr(A.testing_render_params())[0]
'draw line_1 diameter 2 arrow {0.0 0.0 0.0} {1.0 1.0 0.0} '
Note that we had to index the arrow to get the Arrow graphics
primitive. We can also change the height via the :meth:`Graphics.plot3d`
method, but only as a whole::
sage: A = arrow((0,0),(1,1)).plot3d(3)
sage: A.jmol_repr(A.testing_render_params())[0][0]
'draw line_1 diameter 2 arrow {0.0 0.0 3.0} {1.0 1.0 3.0} '
Optional arguments place both the head and tail outside the
`xy`-plane, but at different heights. This must be done on
the graphics primitive obtained by indexing::
sage: A=arrow((0,0),(1,1))[0].plot3d(3,4)
sage: A.jmol_repr(A.testing_render_params())[0]
'draw line_1 diameter 2 arrow {0.0 0.0 3.0} {1.0 1.0 4.0} '
"""
from sage.plot.plot3d.shapes2 import line3d
options = self._plot3d_options()
options.update(kwds)
return line3d([(self.xtail, self.ytail, ztail), (self.xhead, self.yhead, zhead)], arrow_head=True, **options)
def _repr_(self):
"""
Text representation of an arrow graphics primitive.
EXAMPLES::
sage: from sage.plot.arrow import Arrow
sage: Arrow(0,0,2,3,{})._repr_()
'Arrow from (0.0,0.0) to (2.0,3.0)'
"""
return "Arrow from (%s,%s) to (%s,%s)" % (self.xtail, self.ytail, self.xhead, self.yhead)
def _render_on_subplot(self, subplot):
r"""
Render this arrow in a subplot. This is the key function that
defines how this arrow graphics primitive is rendered in
matplotlib's library.
EXAMPLES:
This function implicitly ends up rendering this arrow on
a matplotlib subplot::
sage: arrow((0,1), (2,-1))
Graphics object consisting of 1 graphics primitive
TESTS:
The length of the ends (shrinkA and shrinkB) should not depend
on the width of the arrow, because Matplotlib already takes
this into account. See :trac:`12836`::
sage: fig = Graphics().matplotlib()
sage: sp = fig.add_subplot(1,1,1, label='axis1')
sage: a = arrow((0,0), (1,1))
sage: b = arrow((0,0), (1,1), width=20)
sage: p1 = a[0]._render_on_subplot(sp)
sage: p2 = b[0]._render_on_subplot(sp)
sage: p1.shrinkA == p2.shrinkA
True
sage: p1.shrinkB == p2.shrinkB
True
Dashed arrows should have solid arrowheads,
:trac:`12852`. This test saves the plot of a dashed arrow to
an EPS file. Within the EPS file, ``stroke`` will be called
twice: once to draw the line, and again to draw the
arrowhead. We check that both calls do not occur while the
dashed line style is enabled::
sage: a = arrow((0,0), (1,1), linestyle='dashed')
sage: filename = tmp_filename(ext='.eps')
sage: a.save(filename=filename)
sage: with open(filename, 'r') as f:
....: contents = f.read().replace('\n', ' ')
sage: two_stroke_pattern = r'setdash.*stroke.*stroke.*setdash.*setdash'
sage: import re
sage: two_stroke_re = re.compile(two_stroke_pattern)
sage: two_stroke_re.search(contents) is None
True
"""
from sage.plot.misc import get_matplotlib_linestyle
options = self.options()
head = options.pop('head')
if head == 0: style = '<|-'
elif head == 1: style = '-|>'
elif head == 2: style = '<|-|>'
else: raise KeyError('head parameter must be one of 0 (start), 1 (end) or 2 (both).')
width = float(options['width'])
arrowshorten_end = float(options.get('arrowshorten', 0)) / 2.0
arrowsize = float(options.get('arrowsize', 5))
head_width = arrowsize
head_length = arrowsize * 2.0
color = to_mpl_color(options['rgbcolor'])
from matplotlib.patches import FancyArrowPatch
p = FancyArrowPatch((self.xtail, self.ytail), (self.xhead, self.yhead),
lw=width,
arrowstyle='%s,head_width=%s,head_length=%s' % (style, head_width, head_length),
shrinkA=arrowshorten_end, shrinkB=arrowshorten_end,
fc=color, ec=color,
linestyle=get_matplotlib_linestyle(options['linestyle'], return_type='long'))
p.set_zorder(options['zorder'])
p.set_label(options['legend_label'])
if options['linestyle'] != 'solid':
# The next few lines work around a design issue in matplotlib.
# Currently, the specified linestyle is used to draw both the path
# and the arrowhead. If linestyle is 'dashed', this looks really
# odd. This code is from Jae-Joon Lee in response to a post to the
# matplotlib mailing list.
# See http://sourceforge.net/mailarchive/forum.php?thread_name=CAG%3DuJ%2Bnw2dE05P9TOXTz_zp-mGP3cY801vMH7yt6vgP9_WzU8w%40mail.gmail.com&forum_name=matplotlib-users
import matplotlib.patheffects as pe
class CheckNthSubPath(object):
def __init__(self, patch, n):
"""
creates an callable object that returns True if the
provided path is the n-th path from the patch.
"""
self._patch = patch
self._n = n
def get_paths(self, renderer):
self._patch.set_dpi_cor(renderer.points_to_pixels(1.))
paths, fillables = self._patch.get_path_in_displaycoord()
return paths
def __call__(self, renderer, gc, tpath, affine, rgbFace):
path = self.get_paths(renderer)[self._n]
vert1, code1 = path.vertices, path.codes
import numpy as np
return np.array_equal(vert1, tpath.vertices) and np.array_equal(code1, tpath.codes)
class ConditionalStroke(pe.RendererBase):
def __init__(self, condition_func, pe_list):
"""
path effect that is only applied when the condition_func
returns True.
"""
super(ConditionalStroke, self).__init__()
self._pe_list = pe_list
self._condition_func = condition_func
def draw_path(self, renderer, gc, tpath, affine, rgbFace):
if self._condition_func(renderer, gc, tpath, affine, rgbFace):
for pe1 in self._pe_list:
pe1.draw_path(renderer, gc, tpath, affine, rgbFace)
pe1 = ConditionalStroke(CheckNthSubPath(p, 0), [pe.Stroke()])
pe2 = ConditionalStroke(CheckNthSubPath(p, 1), [pe.Stroke(dashes={'dash_offset': 0, 'dash_list': None})])
p.set_path_effects([pe1, pe2])
subplot.add_patch(p)
return p
def arrow(tailpoint=None, headpoint=None, **kwds):
"""
Returns either a 2-dimensional or 3-dimensional arrow depending
on value of points.
For information regarding additional arguments, see either arrow2d?
or arrow3d?.
EXAMPLES::
sage: arrow((0,0), (1,1))
Graphics object consisting of 1 graphics primitive
.. PLOT::
sphinx_plot(arrow((0,0), (1,1)))
::
sage: arrow((0,0,1), (1,1,1))
Graphics3d Object
.. PLOT::
sphinx_plot(arrow((0,0,1), (1,1,1)))
"""
try:
return arrow2d(tailpoint, headpoint, **kwds)
except ValueError:
from sage.plot.plot3d.shapes import arrow3d
return arrow3d(tailpoint, headpoint, **kwds)
@rename_keyword(color='rgbcolor')
@options(width=2, rgbcolor=(0,0,1), zorder=2, head=1, linestyle='solid', legend_label=None)
def arrow2d(tailpoint=None, headpoint=None, path=None, **options):
"""
If ``tailpoint`` and ``headpoint`` are provided, returns an arrow from
(xtail, ytail) to (xhead, yhead). If ``tailpoint`` or ``headpoint`` is None and
``path`` is not None, returns an arrow along the path. (See further info on
paths in :class:`bezier_path`).
INPUT:
- ``tailpoint`` - the starting point of the arrow
- ``headpoint`` - where the arrow is pointing to
- ``path`` - the list of points and control points (see bezier_path for
detail) that the arrow will follow from source to destination
- ``head`` - 0, 1 or 2, whether to draw the head at the start (0), end (1)
or both (2) of the path (using 0 will swap headpoint and tailpoint).
This is ignored in 3D plotting.
- ``linestyle`` - (default: ``'solid'``) The style of the line, which is
one of ``'dashed'``, ``'dotted'``, ``'solid'``, ``'dashdot'``,
or ``'--'``, ``':'``, ``'-'``, ``'-.'``, respectively.
- ``width`` - (default: 2) the width of the arrow shaft, in points
- ``color`` - (default: (0,0,1)) the color of the arrow (as an RGB tuple or
a string)
- ``hue`` - the color of the arrow (as a number)
- ``arrowsize`` - the size of the arrowhead
- ``arrowshorten`` - the length in points to shorten the arrow (ignored if
using path parameter)
- ``legend_label`` - the label for this item in the legend
- ``legend_color`` - the color for the legend label
- ``zorder`` - the layer level to draw the arrow-- note that this is
ignored in 3D plotting.
EXAMPLES:
A straight, blue arrow::
sage: arrow2d((1,1), (3,3))
Graphics object consisting of 1 graphics primitive
.. PLOT::
sphinx_plot(arrow2d((1,1), (3,3)))
Make a red arrow::
sage: arrow2d((-1,-1), (2,3), color=(1,0,0))
Graphics object consisting of 1 graphics primitive
.. PLOT::
sphinx_plot(arrow2d((-1,-1), (2,3), color=(1,0,0)))
::
sage: arrow2d((-1,-1), (2,3), color='red')
Graphics object consisting of 1 graphics primitive
.. PLOT::
sphinx_plot(arrow2d((-1,-1), (2,3), color='red'))
You can change the width of an arrow::
sage: arrow2d((1,1), (3,3), width=5, arrowsize=15)
Graphics object consisting of 1 graphics primitive
.. PLOT::
P = arrow2d((1,1), (3,3), width=5, arrowsize=15)
sphinx_plot(P)
Use a dashed line instead of a solid one for the arrow::
sage: arrow2d((1,1), (3,3), linestyle='dashed')
Graphics object consisting of 1 graphics primitive
sage: arrow2d((1,1), (3,3), linestyle='--')
Graphics object consisting of 1 graphics primitive
.. PLOT::
P = arrow2d((1,1), (3,3), linestyle='--')
sphinx_plot(P)
A pretty circle of arrows::
sage: sum([arrow2d((0,0), (cos(x),sin(x)), hue=x/(2*pi)) for x in [0..2*pi,step=0.1]])
Graphics object consisting of 63 graphics primitives
.. PLOT::
P = sum([arrow2d((0,0), (cos(x*0.1),sin(x*0.1)), hue=x/(20*pi)) for x in range(floor(20*pi)+1)])
sphinx_plot(P)
If we want to draw the arrow between objects, for example, the
boundaries of two lines, we can use the ``arrowshorten`` option
to make the arrow shorter by a certain number of points::
sage: L1 = line([(0,0), (1,0)], thickness=10)
sage: L2 = line([(0,1), (1,1)], thickness=10)
sage: A = arrow2d((0.5,0), (0.5,1), arrowshorten=10, rgbcolor=(1,0,0))
sage: L1 + L2 + A
Graphics object consisting of 3 graphics primitives
.. PLOT::
L1 = line([(0,0), (1,0)],thickness=10)
L2 = line([(0,1), (1,1)], thickness=10)
A = arrow2d((0.5,0), (0.5,1), arrowshorten=10, rgbcolor=(1,0,0))
sphinx_plot(L1 + L2 + A)
If BOTH ``headpoint`` and ``tailpoint`` are None, then an empty plot is
returned::
sage: arrow2d(headpoint=None, tailpoint=None)
Graphics object consisting of 0 graphics primitives
We can also draw an arrow with a legend::
sage: arrow((0,0), (0,2), legend_label='up', legend_color='purple')
Graphics object consisting of 1 graphics primitive
.. PLOT::
P = arrow((0,0), (0,2), legend_label='up', legend_color='purple')
sphinx_plot(P)
Extra options will get passed on to :meth:`Graphics.show()`, as long as they are valid::
sage: arrow2d((-2,2), (7,1), frame=True)
Graphics object consisting of 1 graphics primitive
.. PLOT::
sphinx_plot(arrow2d((-2,2), (7,1), frame=True))
::
sage: arrow2d((-2,2), (7,1)).show(frame=True)
"""
from sage.plot.all import Graphics
g = Graphics()
g._set_extra_kwds(Graphics._extract_kwds_for_show(options))
if headpoint is not None and tailpoint is not None:
xtail, ytail = tailpoint
xhead, yhead = headpoint
g.add_primitive(Arrow(xtail, ytail, xhead, yhead, options=options))
elif path is not None:
g.add_primitive(CurveArrow(path, options=options))
elif tailpoint is None and headpoint is None:
return g
else:
raise TypeError('Arrow requires either both headpoint and tailpoint or a path parameter.')
if options['legend_label']:
g.legend(True)
g._legend_colors = [options['legend_color']]
return g
| 37.894009 | 175 | 0.557177 |
from sage.plot.primitive import GraphicPrimitive
from sage.misc.decorators import options, rename_keyword
from sage.plot.colors import to_mpl_color
class CurveArrow(GraphicPrimitive):
def __init__(self, path, options):
import numpy as np
self.path = path
codes = [1] + (len(self.path[0])-1)*[len(self.path[0])]
vertices = self.path[0]
for curve in self.path[1:]:
vertices += curve
codes += (len(curve))*[len(curve)+1]
self.codes = codes
self.vertices = np.array(vertices, np.float)
GraphicPrimitive.__init__(self, options)
def get_minmax_data(self):
return {'xmin': self.vertices[:,0].min(),
'xmax': self.vertices[:,0].max(),
'ymin': self.vertices[:,1].min(),
'ymax': self.vertices[:,1].max()}
def _allowed_options(self):
return {'width': 'The width of the shaft of the arrow, in points.',
'rgbcolor': 'The color as an RGB tuple.',
'hue': 'The color given as a hue.',
'legend_label': 'The label for this item in the legend.',
'legend_color': 'The color of the legend text.',
'arrowstyle': 'todo',
'arrowsize': 'The size of the arrowhead',
'thickness': 'The thickness of the arrow.',
'zorder': '2-d only: The layer level in which to draw',
'head': '2-d only: Which end of the path to draw the head (one of 0 (start), 1 (end) or 2 (both)',
'linestyle': "2d only: The style of the line, which is one of "
"'dashed', 'dotted', 'solid', 'dashdot', or '--', ':', '-', '-.', "
"respectively."}
def _repr_(self):
return "CurveArrow from %s to %s" % (self.path[0][0], self.path[-1][-1])
def _render_on_subplot(self, subplot):
from sage.plot.misc import get_matplotlib_linestyle
options = self.options()
width = float(options['width'])
head = options.pop('head')
if head == 0: style = '<|-'
elif head == 1: style = '-|>'
elif head == 2: style = '<|-|>'
else: raise KeyError('head parameter must be one of 0 (start), 1 (end) or 2 (both).')
arrowsize = float(options.get('arrowsize', 5))
head_width = arrowsize
head_length = arrowsize * 2.0
color = to_mpl_color(options['rgbcolor'])
from matplotlib.patches import FancyArrowPatch
from matplotlib.path import Path
bpath = Path(self.vertices, self.codes)
p = FancyArrowPatch(path=bpath,
lw=width, arrowstyle='%s,head_width=%s,head_length=%s' % (style, head_width, head_length),
fc=color, ec=color,
linestyle=get_matplotlib_linestyle(options['linestyle'], return_type='long'))
p.set_zorder(options['zorder'])
p.set_label(options['legend_label'])
subplot.add_patch(p)
return p
class Arrow(GraphicPrimitive):
def __init__(self, xtail, ytail, xhead, yhead, options):
self.xtail = float(xtail)
self.xhead = float(xhead)
self.ytail = float(ytail)
self.yhead = float(yhead)
GraphicPrimitive.__init__(self, options)
def get_minmax_data(self):
return {'xmin': min(self.xtail, self.xhead),
'xmax': max(self.xtail, self.xhead),
'ymin': min(self.ytail, self.yhead),
'ymax': max(self.ytail, self.yhead)}
def _allowed_options(self):
return {'width': 'The width of the shaft of the arrow, in points.',
'rgbcolor': 'The color as an RGB tuple.',
'hue': 'The color given as a hue.',
'arrowshorten': 'The length in points to shorten the arrow.',
'arrowsize': 'The size of the arrowhead',
'thickness': 'The thickness of the arrow.',
'legend_label': 'The label for this item in the legend.',
'legend_color': 'The color of the legend text.',
'zorder': '2-d only: The layer level in which to draw',
'head': '2-d only: Which end of the path to draw the head (one of 0 (start), 1 (end) or 2 (both)',
'linestyle': "2d only: The style of the line, which is one of "
"'dashed', 'dotted', 'solid', 'dashdot', or '--', ':', '-', '-.', "
"respectively."}
def _plot3d_options(self, options=None):
if options is None:
options = self.options()
options = dict(self.options())
options_3d = {}
if 'width' in options:
options_3d['thickness'] = options['width']
del options['width']
if 'zorder' in options:
del options['zorder']
if 'head' in options:
del options['head']
if 'linestyle' in options:
del options['linestyle']
options_3d.update(GraphicPrimitive._plot3d_options(self, options))
return options_3d
def plot3d(self, ztail=0, zhead=0, **kwds):
from sage.plot.plot3d.shapes2 import line3d
options = self._plot3d_options()
options.update(kwds)
return line3d([(self.xtail, self.ytail, ztail), (self.xhead, self.yhead, zhead)], arrow_head=True, **options)
def _repr_(self):
return "Arrow from (%s,%s) to (%s,%s)" % (self.xtail, self.ytail, self.xhead, self.yhead)
def _render_on_subplot(self, subplot):
from sage.plot.misc import get_matplotlib_linestyle
options = self.options()
head = options.pop('head')
if head == 0: style = '<|-'
elif head == 1: style = '-|>'
elif head == 2: style = '<|-|>'
else: raise KeyError('head parameter must be one of 0 (start), 1 (end) or 2 (both).')
width = float(options['width'])
arrowshorten_end = float(options.get('arrowshorten', 0)) / 2.0
arrowsize = float(options.get('arrowsize', 5))
head_width = arrowsize
head_length = arrowsize * 2.0
color = to_mpl_color(options['rgbcolor'])
from matplotlib.patches import FancyArrowPatch
p = FancyArrowPatch((self.xtail, self.ytail), (self.xhead, self.yhead),
lw=width,
arrowstyle='%s,head_width=%s,head_length=%s' % (style, head_width, head_length),
shrinkA=arrowshorten_end, shrinkB=arrowshorten_end,
fc=color, ec=color,
linestyle=get_matplotlib_linestyle(options['linestyle'], return_type='long'))
p.set_zorder(options['zorder'])
p.set_label(options['legend_label'])
if options['linestyle'] != 'solid':
import matplotlib.patheffects as pe
class CheckNthSubPath(object):
def __init__(self, patch, n):
self._patch = patch
self._n = n
def get_paths(self, renderer):
self._patch.set_dpi_cor(renderer.points_to_pixels(1.))
paths, fillables = self._patch.get_path_in_displaycoord()
return paths
def __call__(self, renderer, gc, tpath, affine, rgbFace):
path = self.get_paths(renderer)[self._n]
vert1, code1 = path.vertices, path.codes
import numpy as np
return np.array_equal(vert1, tpath.vertices) and np.array_equal(code1, tpath.codes)
class ConditionalStroke(pe.RendererBase):
def __init__(self, condition_func, pe_list):
super(ConditionalStroke, self).__init__()
self._pe_list = pe_list
self._condition_func = condition_func
def draw_path(self, renderer, gc, tpath, affine, rgbFace):
if self._condition_func(renderer, gc, tpath, affine, rgbFace):
for pe1 in self._pe_list:
pe1.draw_path(renderer, gc, tpath, affine, rgbFace)
pe1 = ConditionalStroke(CheckNthSubPath(p, 0), [pe.Stroke()])
pe2 = ConditionalStroke(CheckNthSubPath(p, 1), [pe.Stroke(dashes={'dash_offset': 0, 'dash_list': None})])
p.set_path_effects([pe1, pe2])
subplot.add_patch(p)
return p
def arrow(tailpoint=None, headpoint=None, **kwds):
try:
return arrow2d(tailpoint, headpoint, **kwds)
except ValueError:
from sage.plot.plot3d.shapes import arrow3d
return arrow3d(tailpoint, headpoint, **kwds)
@rename_keyword(color='rgbcolor')
@options(width=2, rgbcolor=(0,0,1), zorder=2, head=1, linestyle='solid', legend_label=None)
def arrow2d(tailpoint=None, headpoint=None, path=None, **options):
from sage.plot.all import Graphics
g = Graphics()
g._set_extra_kwds(Graphics._extract_kwds_for_show(options))
if headpoint is not None and tailpoint is not None:
xtail, ytail = tailpoint
xhead, yhead = headpoint
g.add_primitive(Arrow(xtail, ytail, xhead, yhead, options=options))
elif path is not None:
g.add_primitive(CurveArrow(path, options=options))
elif tailpoint is None and headpoint is None:
return g
else:
raise TypeError('Arrow requires either both headpoint and tailpoint or a path parameter.')
if options['legend_label']:
g.legend(True)
g._legend_colors = [options['legend_color']]
return g
| true | true |
f7290d71439131c7316cb803ec52d5d227b0a777 | 7,254 | py | Python | src/integral_timber_joints/process/state.py | gramaziokohler/integral_timber_joints | 70e75a66e13b5ada580fcffc58879f5fcb8fce32 | [
"MIT"
] | 3 | 2021-09-16T13:08:32.000Z | 2022-02-21T17:20:21.000Z | src/integral_timber_joints/process/state.py | gramaziokohler/integral_timber_joints | 70e75a66e13b5ada580fcffc58879f5fcb8fce32 | [
"MIT"
] | 80 | 2021-09-06T09:55:38.000Z | 2022-03-22T18:44:24.000Z | src/integral_timber_joints/process/state.py | gramaziokohler/integral_timber_joints | 70e75a66e13b5ada580fcffc58879f5fcb8fce32 | [
"MIT"
] | null | null | null | try:
from typing import Dict, List, Optional, Tuple
from integral_timber_joints.process import RobotClampAssemblyProcess
except:
pass
import itertools
from compas.data import Data
from compas.geometry import Frame
from compas.geometry.transformations.transformation import Transformation
from compas.robots import Configuration
class SceneState(Data):
"""
This is a dictionary like object that holds all the states of all objects in a given moment.
This static moment is refered to as a scene.
A movement has a start scene and end scene.
"""
def __init__(self, process=None):
# type: (RobotClampAssemblyProcess) -> None
self.object_state_dict = {}
self.object_keys = []
if process is not None:
assembly = process.assembly
# self.beam_states = {} # type: dict[str, list]
# compile a list of keys that should contain state values
for beam_id in assembly.sequence:
self.object_keys.append((beam_id, 'f'))
self.object_keys.append((beam_id, 'a'))
for tool_id in process.tool_ids:
self.object_keys.append((tool_id, 'f'))
self.object_keys.append((tool_id, 'a'))
self.object_keys.append((tool_id, 'c'))
# Singleton items
self.object_keys.append(('tool_changer', 'f')) # TC Base Frame
self.object_keys.append(('tool_changer', 'a')) # Always True
self.object_keys.append(('robot', 'f')) # Robot Target Frame
self.object_keys.append(('robot', 'c')) # Full Configuration
@classmethod
def from_data(cls, data):
"""Construct a RobotClampAssemblyProcess from structured data.
Overridden the from_data method because we have to assign self to dependency.process
"""
scene = cls()
scene.data = data
return scene
def to_data(self):
return self.data
def __getitem__(self, key):
if key not in self.object_state_dict:
return None
else:
return self.object_state_dict[key]
def __setitem__(self, key, value):
self.object_state_dict[key] = value
def __len__(self):
return len(self.object_state_dict)
def __contains__(self, key):
return key in self.object_state_dict
@property
def data(self):
# Flatten Tuple dict keys to strings
flattened_dict = {}
for key, value in self.object_state_dict.items():
flattened_dict[str(key)] = value
data = {
'object_keys': self.object_keys,
'flattened_dict': flattened_dict,
}
return data
@data.setter
def data(self, data):
self.object_keys = data.get('object_keys', [])
# Unflatten dict keys to Tuples
import ast
flattened_dict = data.get('flattened_dict', {})
for key, value in flattened_dict.items():
self[ast.literal_eval(key)] = value
def has_unknown_state(self, skip_robot_config=True):
# type: (RobotClampAssemblyProcess) -> bool
return len(list(self.keys_with_unknown_state, skip_robot_config)) > 0
def keys_with_unknown_state(self, skip_robot_config=True):
for key in self.object_keys:
if skip_robot_config and key == ('robot', 'c'):
continue
if key not in self:
yield key
class ObjectState(object):
""" Base class of a state object.
Note that the State object should exist in a state dictionary where the key refers to an object id.
`ObjectState.current_frame`
- A frame describing the loaction of an object. `None` can be used to represent an un-transformed state.
`ObjectState.kinematic_config`
- A dictionary to describe the kinematic configuration of a RobotModel or ToolModel (such as Clamp and Gripper)
- The dictionary uses `joint names` as keys and `values` in degrees or millimeters.
- `None` can be used for Beams and Env Objects that does not have kinematic state
`ObjectState.attached_to_robot`
- Bool value describing if an object is attached to the robot.
- Note that the ToolChanger is always attached to the robot's flange. (Level 1)
- Gripper or Clamp can be attached to a robot's ToolChanger. (Level 2)
- Beam can only be attached to a Gripper or Clamp (Level 3)
"""
def __init__(self, current_frame=None, attached_to_robot=False, kinematic_config=None):
self.current_frame = current_frame # type: Frame
self.attached_to_robot = attached_to_robot # type: bool
self.kinematic_config = kinematic_config # type: ignore
def to_data(self):
"""Simpliest way to get this class serialized.
"""
return self.data
@classmethod
def from_data(cls, data):
"""Construct a Movement from structured data. Subclass must add their properity to
the data properity.
"""
state = cls()
state.data = data
return state
@property
def data(self):
data = {
'current_frame': self.current_frame,
'kinematic_config': self.kinematic_config,
'attached_to_robot': self.attached_to_robot,
}
return data
@data.setter
def data(self, data):
self.current_frame = data.get('current_frame', None)
self.kinematic_config = data.get('kinematic_config', None)
self.attached_to_robot = data.get('attached_to_robot', False)
def __repr__(self):
return self.__str__()
def __str__(self):
return "State: current frame: {} | config: {} | attached to robot: {}".format(
self.current_frame, self.kinematic_config, self.attached_to_robot)
def __eq__(self, other):
if not hasattr(other, 'current_frame') or not hasattr(other, 'attached_to_robot') or not hasattr(other, 'kinematic_config'):
return False
if self.current_frame != other.current_frame:
return False
if self.attached_to_robot != other.attached_to_robot:
return False
if self.kinematic_config != other.kinematic_config:
return False
return True
def get_object_from_flange(object_states, object_id):
flange_frame = object_states['robot'].current_frame
object_frame = object_states[object_id].current_frame
world_from_flange = Transformation.from_frame(flange_frame)
world_from_object = Transformation.from_frame(object_frame)
return world_from_object.inverse() * world_from_flange
def copy_state_dict(target_state_dict, source_state_dict, clear=False, deep_copy=False):
# type: (dict[str, ObjectState], dict[str, ObjectState], bool, bool) -> None
"""Copy one state dictionary to another.
If `clear = True`, keys not present in the source_state_dict are removed.
If `deep_copy = True`, deep copy is made for each object State """
if clear:
target_state_dict.clear()
for object_id, state in source_state_dict.items():
if deep_copy:
target_state_dict[object_id] = ObjectState.from_data(state.to_data())
else:
target_state_dict[object_id] = state
| 35.73399 | 132 | 0.656465 | try:
from typing import Dict, List, Optional, Tuple
from integral_timber_joints.process import RobotClampAssemblyProcess
except:
pass
import itertools
from compas.data import Data
from compas.geometry import Frame
from compas.geometry.transformations.transformation import Transformation
from compas.robots import Configuration
class SceneState(Data):
def __init__(self, process=None):
self.object_state_dict = {}
self.object_keys = []
if process is not None:
assembly = process.assembly
for beam_id in assembly.sequence:
self.object_keys.append((beam_id, 'f'))
self.object_keys.append((beam_id, 'a'))
for tool_id in process.tool_ids:
self.object_keys.append((tool_id, 'f'))
self.object_keys.append((tool_id, 'a'))
self.object_keys.append((tool_id, 'c'))
self.object_keys.append(('tool_changer', 'f'))
self.object_keys.append(('tool_changer', 'a'))
self.object_keys.append(('robot', 'f'))
self.object_keys.append(('robot', 'c'))
@classmethod
def from_data(cls, data):
scene = cls()
scene.data = data
return scene
def to_data(self):
return self.data
def __getitem__(self, key):
if key not in self.object_state_dict:
return None
else:
return self.object_state_dict[key]
def __setitem__(self, key, value):
self.object_state_dict[key] = value
def __len__(self):
return len(self.object_state_dict)
def __contains__(self, key):
return key in self.object_state_dict
@property
def data(self):
flattened_dict = {}
for key, value in self.object_state_dict.items():
flattened_dict[str(key)] = value
data = {
'object_keys': self.object_keys,
'flattened_dict': flattened_dict,
}
return data
@data.setter
def data(self, data):
self.object_keys = data.get('object_keys', [])
import ast
flattened_dict = data.get('flattened_dict', {})
for key, value in flattened_dict.items():
self[ast.literal_eval(key)] = value
def has_unknown_state(self, skip_robot_config=True):
return len(list(self.keys_with_unknown_state, skip_robot_config)) > 0
def keys_with_unknown_state(self, skip_robot_config=True):
for key in self.object_keys:
if skip_robot_config and key == ('robot', 'c'):
continue
if key not in self:
yield key
class ObjectState(object):
def __init__(self, current_frame=None, attached_to_robot=False, kinematic_config=None):
self.current_frame = current_frame
self.attached_to_robot = attached_to_robot
self.kinematic_config = kinematic_config
def to_data(self):
return self.data
@classmethod
def from_data(cls, data):
state = cls()
state.data = data
return state
@property
def data(self):
data = {
'current_frame': self.current_frame,
'kinematic_config': self.kinematic_config,
'attached_to_robot': self.attached_to_robot,
}
return data
@data.setter
def data(self, data):
self.current_frame = data.get('current_frame', None)
self.kinematic_config = data.get('kinematic_config', None)
self.attached_to_robot = data.get('attached_to_robot', False)
def __repr__(self):
return self.__str__()
def __str__(self):
return "State: current frame: {} | config: {} | attached to robot: {}".format(
self.current_frame, self.kinematic_config, self.attached_to_robot)
def __eq__(self, other):
if not hasattr(other, 'current_frame') or not hasattr(other, 'attached_to_robot') or not hasattr(other, 'kinematic_config'):
return False
if self.current_frame != other.current_frame:
return False
if self.attached_to_robot != other.attached_to_robot:
return False
if self.kinematic_config != other.kinematic_config:
return False
return True
def get_object_from_flange(object_states, object_id):
flange_frame = object_states['robot'].current_frame
object_frame = object_states[object_id].current_frame
world_from_flange = Transformation.from_frame(flange_frame)
world_from_object = Transformation.from_frame(object_frame)
return world_from_object.inverse() * world_from_flange
def copy_state_dict(target_state_dict, source_state_dict, clear=False, deep_copy=False):
if clear:
target_state_dict.clear()
for object_id, state in source_state_dict.items():
if deep_copy:
target_state_dict[object_id] = ObjectState.from_data(state.to_data())
else:
target_state_dict[object_id] = state
| true | true |
f7290e7b579df05a7b491debaaa1d6c44c7e50e1 | 1,409 | py | Python | mrjob/examples/mr_wc.py | cleemesser/mrjob | f24991ffae9e7a2dad9fd3403d5e96635ededa4a | [
"Apache-2.0"
] | null | null | null | mrjob/examples/mr_wc.py | cleemesser/mrjob | f24991ffae9e7a2dad9fd3403d5e96635ededa4a | [
"Apache-2.0"
] | null | null | null | mrjob/examples/mr_wc.py | cleemesser/mrjob | f24991ffae9e7a2dad9fd3403d5e96635ededa4a | [
"Apache-2.0"
] | null | null | null | # Copyright 2009-2010 Yelp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An implementation of wc as an MRJob.
This is meant as an example of why mapper_final is useful."""
from mrjob.job import MRJob
class MRWordCountUtility(MRJob):
def __init__(self, *args, **kwargs):
super(MRWordCountUtility, self).__init__(*args, **kwargs)
self.chars = 0
self.words = 0
self.lines = 0
def mapper(self, _, line):
if False: yield # I'm a generator!
self.chars += len(line) + 1 # +1 for newline
self.words += sum(1 for word in line.split() if word.strip())
self.lines += 1
def mapper_final(self):
yield('chars', self.chars)
yield('words', self.words)
yield('lines', self.lines)
def reducer(self, key, values):
yield(key, sum(values))
if __name__ == '__main__':
MRWordCountUtility.run()
| 29.978723 | 74 | 0.66785 |
from mrjob.job import MRJob
class MRWordCountUtility(MRJob):
def __init__(self, *args, **kwargs):
super(MRWordCountUtility, self).__init__(*args, **kwargs)
self.chars = 0
self.words = 0
self.lines = 0
def mapper(self, _, line):
if False: yield
self.chars += len(line) + 1 # +1 for newline
self.words += sum(1 for word in line.split() if word.strip())
self.lines += 1
def mapper_final(self):
yield('chars', self.chars)
yield('words', self.words)
yield('lines', self.lines)
def reducer(self, key, values):
yield(key, sum(values))
if __name__ == '__main__':
MRWordCountUtility.run()
| true | true |
f7290f75e2fb79fe8a9681c9242e628e2d363fde | 2,843 | py | Python | doom/database.py | iiCodeThings/admin | 851ebcb8b90d4bea4bd7468ed403ec13fca801ee | [
"MIT"
] | 1 | 2022-02-16T08:20:12.000Z | 2022-02-16T08:20:12.000Z | doom/database.py | iiCodeThings/doom | a04f239abfefc4c8b70ec7c7b00b42622ec13a86 | [
"MIT"
] | null | null | null | doom/database.py | iiCodeThings/doom | a04f239abfefc4c8b70ec7c7b00b42622ec13a86 | [
"MIT"
] | null | null | null | import datetime
from .extensions import db
Column = db.Column
relationship = db.relationship
class CRUDMixin(object):
"""Mixin that adds convenience methods for CRUD (create, read, update, delete) operations."""
@classmethod
def create(cls, **kwargs):
"""Create a new record and save it the database."""
instance = cls(**kwargs)
return instance.save()
def update(self, commit=True, **kwargs):
"""Update specific fields of a record."""
for attr, value in kwargs.items():
setattr(self, attr, value)
if commit:
return self.save()
return self
def save(self, commit=True):
"""Save the record."""
db.session.add(self)
if commit:
db.session.commit()
return self
def delete(self, commit: bool = True) -> None:
"""Remove the record from the database."""
db.session.delete(self)
if commit:
return db.session.commit()
return
class Model(CRUDMixin, db.Model):
"""Base model class that includes CRUD convenience methods."""
__abstract__ = True
def to_dict(self, ignored_fields=['password'], added_params=None):
result = dict((col.name, getattr(self, col.name)) for col in self.__table__.columns)
for k, v in result.items():
if type(result[k]) in [datetime.datetime, datetime.date]:
result[k] = v.strftime("%Y-%m-%d %H:%M:%S")
for ignore_field in ignored_fields:
if ignore_field in result.keys():
result.pop(ignore_field)
if added_params:
for key in added_params.keys():
result[key] = added_params[key]
return result
class PkModel(Model):
"""Base model class that includes CRUD convenience methods, plus adds a 'primary key' column named ``id``."""
__abstract__ = True
id = Column(db.Integer, primary_key=True)
@classmethod
def get_by_id(cls, record_id):
"""Get record by ID."""
if any(
(
isinstance(record_id, (str, bytes)) and record_id.isdigit(),
isinstance(record_id, (int, float)),
)
):
return cls.query.get(int(record_id))
return None
def reference_col(
tablename, nullable=False, pk_name="id", foreign_key_kwargs=None, column_kwargs=None
):
"""Column that adds primary key foreign key reference.
Usage: ::
category_id = reference_col('category')
category = relationship('Category', backref='categories')
"""
foreign_key_kwargs = foreign_key_kwargs or {}
column_kwargs = column_kwargs or {}
return Column(
db.ForeignKey(f"{tablename}.{pk_name}", **foreign_key_kwargs),
nullable=nullable,
**column_kwargs,
)
| 28.43 | 113 | 0.602884 | import datetime
from .extensions import db
Column = db.Column
relationship = db.relationship
class CRUDMixin(object):
@classmethod
def create(cls, **kwargs):
instance = cls(**kwargs)
return instance.save()
def update(self, commit=True, **kwargs):
for attr, value in kwargs.items():
setattr(self, attr, value)
if commit:
return self.save()
return self
def save(self, commit=True):
db.session.add(self)
if commit:
db.session.commit()
return self
def delete(self, commit: bool = True) -> None:
db.session.delete(self)
if commit:
return db.session.commit()
return
class Model(CRUDMixin, db.Model):
__abstract__ = True
def to_dict(self, ignored_fields=['password'], added_params=None):
result = dict((col.name, getattr(self, col.name)) for col in self.__table__.columns)
for k, v in result.items():
if type(result[k]) in [datetime.datetime, datetime.date]:
result[k] = v.strftime("%Y-%m-%d %H:%M:%S")
for ignore_field in ignored_fields:
if ignore_field in result.keys():
result.pop(ignore_field)
if added_params:
for key in added_params.keys():
result[key] = added_params[key]
return result
class PkModel(Model):
__abstract__ = True
id = Column(db.Integer, primary_key=True)
@classmethod
def get_by_id(cls, record_id):
if any(
(
isinstance(record_id, (str, bytes)) and record_id.isdigit(),
isinstance(record_id, (int, float)),
)
):
return cls.query.get(int(record_id))
return None
def reference_col(
tablename, nullable=False, pk_name="id", foreign_key_kwargs=None, column_kwargs=None
):
foreign_key_kwargs = foreign_key_kwargs or {}
column_kwargs = column_kwargs or {}
return Column(
db.ForeignKey(f"{tablename}.{pk_name}", **foreign_key_kwargs),
nullable=nullable,
**column_kwargs,
)
| true | true |
f7290fa324d8e90eca6e3ad67c7ef41b58f33c7b | 11,952 | py | Python | Linear_Ridge_Regression .py | Nirmal1313/Regression-Methods | b1f885dc798ca4aae47661e0a27fe0e21e4ee4e0 | [
"MIT"
] | null | null | null | Linear_Ridge_Regression .py | Nirmal1313/Regression-Methods | b1f885dc798ca4aae47661e0a27fe0e21e4ee4e0 | [
"MIT"
] | null | null | null | Linear_Ridge_Regression .py | Nirmal1313/Regression-Methods | b1f885dc798ca4aae47661e0a27fe0e21e4ee4e0 | [
"MIT"
] | null | null | null |
# coding: utf-8
# In[1]:
import pandas as pd # for working with data in Python
import numpy as np
import matplotlib.pyplot as plt # for visualization
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
from sklearn import linear_model
# use Pandas to read in csv files. The pd.read_csv() method creates a DataFrame from a csv file
train = pd.read_csv('train.csv')
test = pd.read_csv('test.csv')
print("1 \n")
# check out the size of the data
print("Train data shape:", train.shape)
print("Test data shape:", test.shape)
print("2 \n")
# look at a few rows using the DataFrame.head() method
# train.head()
print(train.head())
# In[3]:
plt.style.use(style='ggplot')
plt.rcParams['figure.figsize'] = (10, 6)
#######################################################
# 2. Explore the data and engineer Features ###
#######################################################
print("3 \n")
# In[4]:
# to get more information like count, mean, std, min, max etc
# train.SalePrice.describe()
print (train.SalePrice.describe())
print("4 \n")
# to plot a histogram of SalePrice
print ("Skew is:", train.SalePrice.skew())
plt.hist(train.SalePrice, color='blue')
plt.show()
print("5 \n")
# In[5]:
# use np.log() to transform train.SalePric and calculate the skewness a second time, as well as re-plot the data
target = np.log(train.SalePrice)
print ("\n Skew is:", target.skew())
plt.hist(target, color='blue')
plt.show()
# In[6]:
# return a subset of columns matching the specified data types
numeric_features = train.select_dtypes(include=[np.number])
# numeric_features.dtypes
print(numeric_features.dtypes)
# In[7]:
corr = numeric_features.corr()
# The first five features are the most positively correlated with SalePrice, while the next five are the most negatively correlated.
print (corr['SalePrice'].sort_values(ascending=False)[:5], '\n')
print (corr['SalePrice'].sort_values(ascending=False)[-5:])
# In[8]:
print(train.OverallQual.unique())
"""
print("9 \n")
"""
#investigate the relationship between OverallQual and SalePrice.
#We set index='OverallQual' and values='SalePrice'. We chose to look at the median here.
quality_pivot = train.pivot_table(index='OverallQual', values='SalePrice', aggfunc=np.median)
print(quality_pivot)
# In[11]:
#visualize this pivot table more easily, we can create a bar plot
#Notice that the median sales price strictly increases as Overall Quality increases.
quality_pivot.plot(kind='bar', color='blue')
plt.xlabel('Overall Quality')
plt.ylabel('Median Sale Price')
plt.xticks(rotation=0)
plt.show()
# In[12]:
print("11 \n")
"""
#to generate some scatter plots and visualize the relationship between the Ground Living Area(GrLivArea) and SalePrice
plt.scatter(x=train['GrLivArea'], y=target)
plt.ylabel('Sale Price')
plt.xlabel('Above grade (ground) living area square feet')
plt.show()
"""
print("12 \n")
# do the same for GarageArea.
plt.scatter(x=train['GarageArea'], y=target)
plt.ylabel('Sale Price')
plt.xlabel('Garage Area')
plt.show()
# In[13]:
# create a new dataframe with some outliers removed
train = train[train['GarageArea'] < 1200]
# display the previous graph again without outliers
plt.scatter(x=train['GarageArea'], y=np.log(train.SalePrice))
plt.xlim(-200,1600) # This forces the same scale as before
plt.ylabel('Sale Price')
plt.xlabel('Garage Area')
plt.show()
# In[14]:
# create a DataFrame to view the top null columns and return the counts of the null values in each column
nulls = pd.DataFrame(train.isnull().sum().sort_values(ascending=False)[:25])
nulls.columns = ['Null Count']
nulls.index.name = 'Feature'
#nulls
print(nulls)
# In[15]:
print("15 \n")
"""
#to return a list of the unique values
print ("Unique values are:", train.MiscFeature.unique())
"""
######################################################
# Wrangling the non-numeric Features ##
######################################################
print("16 \n")
# consider the non-numeric features and display details of columns
categoricals = train.select_dtypes(exclude=[np.number])
#categoricals.describe()
print(categoricals.describe())
# In[16]:
#####################################################
# Transforming and engineering features ##
######################################################
print("17 \n")
# When transforming features, it's important to remember that any transformations that you've applied to the training data before
# fitting the model must be applied to the test data.
#Eg:
print ("Original: \n")
print (train.Street.value_counts(), "\n")
# In[17]:
print("18 \n")
# our model needs numerical data, so we will use one-hot encoding to transform the data into a Boolean column.
# create a new column called enc_street. The pd.get_dummies() method will handle this for us
train['enc_street'] = pd.get_dummies(train.Street, drop_first=True)
test['enc_street'] = pd.get_dummies(test.Street, drop_first=True)
print ('Encoded: \n')
print (train.enc_street.value_counts()) # Pave and Grvl values converted into 1 and 0
print("19 \n")
# look at SaleCondition by constructing and plotting a pivot table, as we did above for OverallQual
condition_pivot = train.pivot_table(index='SaleCondition', values='SalePrice', aggfunc=np.median)
condition_pivot.plot(kind='bar', color='blue')
plt.xlabel('Sale Condition')
plt.ylabel('Median Sale Price')
plt.xticks(rotation=0)
plt.show()
# In[18]:
# encode this SaleCondition as a new feature by using a similar method that we used for Street above
def encode(x): return 1 if x == 'Partial' else 0
train['enc_condition'] = train.SaleCondition.apply(encode)
test['enc_condition'] = test.SaleCondition.apply(encode)
print("20 \n")
# explore this newly modified feature as a plot.
condition_pivot = train.pivot_table(index='enc_condition', values='SalePrice', aggfunc=np.median)
condition_pivot.plot(kind='bar', color='blue')
plt.xlabel('Encoded Sale Condition')
plt.ylabel('Median Sale Price')
plt.xticks(rotation=0)
plt.show()
# In[19]:
######################################################################################################
# Dealing with missing values #
# We'll fill the missing values with an average value and then assign the results to data #
# This is a method of interpolation #
######################################################################################################
data = train.select_dtypes(include=[np.number]).interpolate().dropna()
print("21 \n")
# Check if the all of the columns have 0 null values.
# sum(data.isnull().sum() != 0)
print(sum(data.isnull().sum() != 0))
print("22 \n")
# In[20]:
######################################################
# 3. Build a linear model ##
######################################################
# separate the features and the target variable for modeling.
# We will assign the features to X and the target variable(Sales Price)to y.
y = np.log(train.SalePrice)
X = data.drop(['SalePrice', 'Id'], axis=1)
# exclude ID from features since Id is just an index with no relationship to SalePrice.
#======= partition the data ===================================================================================================#
# Partitioning the data in this way allows us to evaluate how our model might perform on data that it has never seen before.
# If we train the model on all of the test data, it will be difficult to tell if overfitting has taken place.
#==============================================================================================================================#
# also state how many percentage from train data set, we want to take as test data set
# In this example, about 33% of the data is devoted to the hold-out set.
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42, test_size=.33)
# In[21]:
#========= Begin modelling =========================#
# Linear Regression Model #
#===================================================#
# ---- first create a Linear Regression model.
# First, we instantiate the model.
lr = linear_model.LinearRegression()
# ---- fit the model / Model fitting
# lr.fit() method will fit the linear regression on the features and target variable that we pass.
model = lr.fit(X_train, y_train)
print("23 \n")
# In[22]:
# ---- Evaluate the performance and visualize results
# r-squared value is a measure of how close the data are to the fitted regression line
# a higher r-squared value means a better fit(very close to value 1)
print("R^2 is: \n", model.score(X_test, y_test))
# use the model we have built to make predictions on the test data set.
predictions = model.predict(X_test)
print("24 \n")
# In[23]:
print('RMSE is: \n', mean_squared_error(y_test, predictions))
print("25 \n")
# view this relationship between predictions and actual_values graphically with a scatter plot.
actual_values = y_test
plt.scatter(predictions, actual_values, alpha=.75,
color='b') # alpha helps to show overlapping data
plt.xlabel('Predicted Price')
plt.ylabel('Actual Price')
plt.title('Linear Regression Model')
plt.show()
# In[24]:
#====== improve the model ================================================================#
# try using Ridge Regularization to decrease the influence of less important features #
#=========================================================================================#
print("26 \n")
# experiment by looping through a few different values of alpha, and see how this changes our results.
for i in range (-2, 3):
alpha = 10**i
rm = linear_model.Ridge(alpha=alpha)
ridge_model = rm.fit(X_train, y_train)
preds_ridge = ridge_model.predict(X_test)
plt.scatter(preds_ridge, actual_values, alpha=.75, color='b')
plt.xlabel('Predicted Price')
plt.ylabel('Actual Price')
plt.title('Ridge Regularization with alpha = {}'.format(alpha))
overlay = 'R^2 is: {}\nRMSE is: {}'.format(
ridge_model.score(X_test, y_test),
mean_squared_error(y_test, preds_ridge))
plt.annotate(s=overlay,xy=(12.1,10.6),size='x-large')
plt.show()
# if you examined the plots you can see these models perform almost identically to the first model.
# In our case, adjusting the alpha did not substantially improve our model.
print("27 \n")
print("R^2 is: \n", model.score(X_test, y_test))
# In[25]:
######################################################
# 4. Make a submission ##
######################################################
# create a csv that contains the predicted SalePrice for each observation in the test.csv dataset.
submission = pd.DataFrame()
# The first column must the contain the ID from the test data.
submission['Id'] = test.Id
# select the features from the test data for the model as we did above.
feats = test.select_dtypes(
include=[np.number]).drop(['Id'], axis=1).interpolate()
# generate predictions
predictions = model.predict(feats)
# transform the predictions to the correct form
# apply np.exp() to our predictions becasuse we have taken the logarithm(np.log()) previously.
final_predictions = np.exp(predictions)
print("28 \n")
# check the difference
print("Original predictions are: \n", predictions[:10], "\n")
print("Final predictions are: \n", final_predictions[:10])
print("29 \n")
# assign these predictions and check
submission['SalePrice'] = final_predictions
# submission.head()
print(submission.head())
# export to a .csv file as Kaggle expects.
# pass index=False because Pandas otherwise would create a new index for us.
submission.to_csv('submission1.csv', index=False)
print("\n Finish")
| 29.222494 | 132 | 0.632865 |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
from sklearn import linear_model
train = pd.read_csv('train.csv')
test = pd.read_csv('test.csv')
print("1 \n")
print("Train data shape:", train.shape)
print("Test data shape:", test.shape)
print("2 \n")
print(train.head())
plt.style.use(style='ggplot')
plt.rcParams['figure.figsize'] = (10, 6)
| true | true |
f7290ff22916348c59aa6e233ad818395010ec56 | 18,267 | py | Python | test/functional/rpc_blockchain.py | bitcoincore-dev/bitcoin | 3f083a5bbdbb41cfa6be8f9d4b0e306515bfa100 | [
"MIT"
] | 9 | 2016-08-19T18:42:36.000Z | 2022-03-29T08:13:38.000Z | test/functional/rpc_blockchain.py | taweesak0803650558/bitcoin | fd557ceb885ec55ac6865953e7325bcebc5a6972 | [
"MIT"
] | 3 | 2019-05-22T12:52:35.000Z | 2021-08-01T16:13:24.000Z | test/functional/rpc_blockchain.py | taweesak0803650558/bitcoin | fd557ceb885ec55ac6865953e7325bcebc5a6972 | [
"MIT"
] | 7 | 2020-02-01T02:10:14.000Z | 2022-03-29T08:13:26.000Z | #!/usr/bin/env python3
# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPCs related to blockchainstate.
Test the following RPCs:
- getblockchaininfo
- gettxoutsetinfo
- getdifficulty
- getbestblockhash
- getblockhash
- getblockheader
- getchaintxstats
- getnetworkhashps
- verifychain
Tests correspond to code in rpc/blockchain.cpp.
"""
from decimal import Decimal
import http.client
import os
import subprocess
from test_framework.address import ADDRESS_BCRT1_P2WSH_OP_TRUE
from test_framework.blocktools import (
create_block,
create_coinbase,
TIME_GENESIS_BLOCK,
)
from test_framework.messages import (
CBlockHeader,
from_hex,
msg_block,
)
from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_greater_than_or_equal,
assert_raises,
assert_raises_rpc_error,
assert_is_hex_string,
assert_is_hash_string,
get_datadir_path,
)
from test_framework.wallet import MiniWallet
class BlockchainTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.supports_cli = False
def run_test(self):
self.mine_chain()
self.restart_node(0, extra_args=['-stopatheight=207', '-prune=1']) # Set extra args with pruning after rescan is complete
self._test_getblockchaininfo()
self._test_getchaintxstats()
self._test_gettxoutsetinfo()
self._test_getblockheader()
self._test_getdifficulty()
self._test_getnetworkhashps()
self._test_stopatheight()
self._test_waitforblockheight()
self._test_getblock()
assert self.nodes[0].verifychain(4, 0)
def mine_chain(self):
self.log.info('Create some old blocks')
for t in range(TIME_GENESIS_BLOCK, TIME_GENESIS_BLOCK + 200 * 600, 600):
# ten-minute steps from genesis block time
self.nodes[0].setmocktime(t)
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_P2WSH_OP_TRUE)
assert_equal(self.nodes[0].getblockchaininfo()['blocks'], 200)
def _test_getblockchaininfo(self):
self.log.info("Test getblockchaininfo")
keys = [
'bestblockhash',
'blocks',
'chain',
'chainwork',
'difficulty',
'headers',
'initialblockdownload',
'mediantime',
'pruned',
'size_on_disk',
'softforks',
'time',
'verificationprogress',
'warnings',
]
res = self.nodes[0].getblockchaininfo()
assert isinstance(res['time'], int)
# result should have these additional pruning keys if manual pruning is enabled
assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning'] + keys))
# size_on_disk should be > 0
assert_greater_than(res['size_on_disk'], 0)
# pruneheight should be greater or equal to 0
assert_greater_than_or_equal(res['pruneheight'], 0)
# check other pruning fields given that prune=1
assert res['pruned']
assert not res['automatic_pruning']
self.restart_node(0, ['-stopatheight=207'])
res = self.nodes[0].getblockchaininfo()
# should have exact keys
assert_equal(sorted(res.keys()), keys)
self.restart_node(0, ['-stopatheight=207', '-prune=550'])
res = self.nodes[0].getblockchaininfo()
# result should have these additional pruning keys if prune=550
assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning', 'prune_target_size'] + keys))
# check related fields
assert res['pruned']
assert_equal(res['pruneheight'], 0)
assert res['automatic_pruning']
assert_equal(res['prune_target_size'], 576716800)
assert_greater_than(res['size_on_disk'], 0)
assert_equal(res['softforks'], {
'bip34': {'type': 'buried', 'active': False, 'height': 500},
'bip66': {'type': 'buried', 'active': False, 'height': 1251},
'bip65': {'type': 'buried', 'active': False, 'height': 1351},
'csv': {'type': 'buried', 'active': False, 'height': 432},
'segwit': {'type': 'buried', 'active': True, 'height': 0},
'testdummy': {
'type': 'bip9',
'bip9': {
'status': 'started',
'bit': 28,
'start_time': 0,
'timeout': 0x7fffffffffffffff, # testdummy does not have a timeout so is set to the max int64 value
'since': 144,
'statistics': {
'period': 144,
'threshold': 108,
'elapsed': 57,
'count': 57,
'possible': True,
},
'min_activation_height': 0,
},
'active': False
},
'taproot': {
'type': 'bip9',
'bip9': {
'status': 'active',
'start_time': -1,
'timeout': 9223372036854775807,
'since': 0,
'min_activation_height': 0,
},
'height': 0,
'active': True
}
})
def _test_getchaintxstats(self):
self.log.info("Test getchaintxstats")
# Test `getchaintxstats` invalid extra parameters
assert_raises_rpc_error(-1, 'getchaintxstats', self.nodes[0].getchaintxstats, 0, '', 0)
# Test `getchaintxstats` invalid `nblocks`
assert_raises_rpc_error(-1, "JSON value is not an integer as expected", self.nodes[0].getchaintxstats, '')
assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, -1)
assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, self.nodes[0].getblockcount())
# Test `getchaintxstats` invalid `blockhash`
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getchaintxstats, blockhash=0)
assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 1, for '0')", self.nodes[0].getchaintxstats, blockhash='0')
assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getchaintxstats, blockhash='ZZZ0000000000000000000000000000000000000000000000000000000000000')
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getchaintxstats, blockhash='0000000000000000000000000000000000000000000000000000000000000000')
blockhash = self.nodes[0].getblockhash(200)
self.nodes[0].invalidateblock(blockhash)
assert_raises_rpc_error(-8, "Block is not in main chain", self.nodes[0].getchaintxstats, blockhash=blockhash)
self.nodes[0].reconsiderblock(blockhash)
chaintxstats = self.nodes[0].getchaintxstats(nblocks=1)
# 200 txs plus genesis tx
assert_equal(chaintxstats['txcount'], 201)
# tx rate should be 1 per 10 minutes, or 1/600
# we have to round because of binary math
assert_equal(round(chaintxstats['txrate'] * 600, 10), Decimal(1))
b1_hash = self.nodes[0].getblockhash(1)
b1 = self.nodes[0].getblock(b1_hash)
b200_hash = self.nodes[0].getblockhash(200)
b200 = self.nodes[0].getblock(b200_hash)
time_diff = b200['mediantime'] - b1['mediantime']
chaintxstats = self.nodes[0].getchaintxstats()
assert_equal(chaintxstats['time'], b200['time'])
assert_equal(chaintxstats['txcount'], 201)
assert_equal(chaintxstats['window_final_block_hash'], b200_hash)
assert_equal(chaintxstats['window_final_block_height'], 200)
assert_equal(chaintxstats['window_block_count'], 199)
assert_equal(chaintxstats['window_tx_count'], 199)
assert_equal(chaintxstats['window_interval'], time_diff)
assert_equal(round(chaintxstats['txrate'] * time_diff, 10), Decimal(199))
chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1_hash)
assert_equal(chaintxstats['time'], b1['time'])
assert_equal(chaintxstats['txcount'], 2)
assert_equal(chaintxstats['window_final_block_hash'], b1_hash)
assert_equal(chaintxstats['window_final_block_height'], 1)
assert_equal(chaintxstats['window_block_count'], 0)
assert 'window_tx_count' not in chaintxstats
assert 'window_interval' not in chaintxstats
assert 'txrate' not in chaintxstats
def _test_gettxoutsetinfo(self):
node = self.nodes[0]
res = node.gettxoutsetinfo()
assert_equal(res['total_amount'], Decimal('8725.00000000'))
assert_equal(res['transactions'], 200)
assert_equal(res['height'], 200)
assert_equal(res['txouts'], 200)
assert_equal(res['bogosize'], 16800),
assert_equal(res['bestblock'], node.getblockhash(200))
size = res['disk_size']
assert size > 6400
assert size < 64000
assert_equal(len(res['bestblock']), 64)
assert_equal(len(res['hash_serialized_2']), 64)
self.log.info("Test that gettxoutsetinfo() works for blockchain with just the genesis block")
b1hash = node.getblockhash(1)
node.invalidateblock(b1hash)
res2 = node.gettxoutsetinfo()
assert_equal(res2['transactions'], 0)
assert_equal(res2['total_amount'], Decimal('0'))
assert_equal(res2['height'], 0)
assert_equal(res2['txouts'], 0)
assert_equal(res2['bogosize'], 0),
assert_equal(res2['bestblock'], node.getblockhash(0))
assert_equal(len(res2['hash_serialized_2']), 64)
self.log.info("Test that gettxoutsetinfo() returns the same result after invalidate/reconsider block")
node.reconsiderblock(b1hash)
res3 = node.gettxoutsetinfo()
# The field 'disk_size' is non-deterministic and can thus not be
# compared between res and res3. Everything else should be the same.
del res['disk_size'], res3['disk_size']
assert_equal(res, res3)
self.log.info("Test hash_type option for gettxoutsetinfo()")
# Adding hash_type 'hash_serialized_2', which is the default, should
# not change the result.
res4 = node.gettxoutsetinfo(hash_type='hash_serialized_2')
del res4['disk_size']
assert_equal(res, res4)
# hash_type none should not return a UTXO set hash.
res5 = node.gettxoutsetinfo(hash_type='none')
assert 'hash_serialized_2' not in res5
# hash_type muhash should return a different UTXO set hash.
res6 = node.gettxoutsetinfo(hash_type='muhash')
assert 'muhash' in res6
assert(res['hash_serialized_2'] != res6['muhash'])
# muhash should not be returned unless requested.
for r in [res, res2, res3, res4, res5]:
assert 'muhash' not in r
# Unknown hash_type raises an error
assert_raises_rpc_error(-8, "foohash is not a valid hash_type", node.gettxoutsetinfo, "foohash")
def _test_getblockheader(self):
node = self.nodes[0]
assert_raises_rpc_error(-8, "hash must be of length 64 (not 8, for 'nonsense')", node.getblockheader, "nonsense")
assert_raises_rpc_error(-8, "hash must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", node.getblockheader, "ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844")
assert_raises_rpc_error(-5, "Block not found", node.getblockheader, "0cf7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844")
besthash = node.getbestblockhash()
secondbesthash = node.getblockhash(199)
header = node.getblockheader(blockhash=besthash)
assert_equal(header['hash'], besthash)
assert_equal(header['height'], 200)
assert_equal(header['confirmations'], 1)
assert_equal(header['previousblockhash'], secondbesthash)
assert_is_hex_string(header['chainwork'])
assert_equal(header['nTx'], 1)
assert_is_hash_string(header['hash'])
assert_is_hash_string(header['previousblockhash'])
assert_is_hash_string(header['merkleroot'])
assert_is_hash_string(header['bits'], length=None)
assert isinstance(header['time'], int)
assert isinstance(header['mediantime'], int)
assert isinstance(header['nonce'], int)
assert isinstance(header['version'], int)
assert isinstance(int(header['versionHex'], 16), int)
assert isinstance(header['difficulty'], Decimal)
# Test with verbose=False, which should return the header as hex.
header_hex = node.getblockheader(blockhash=besthash, verbose=False)
assert_is_hex_string(header_hex)
header = from_hex(CBlockHeader(), header_hex)
header.calc_sha256()
assert_equal(header.hash, besthash)
assert 'previousblockhash' not in node.getblockheader(node.getblockhash(0))
assert 'nextblockhash' not in node.getblockheader(node.getbestblockhash())
def _test_getdifficulty(self):
difficulty = self.nodes[0].getdifficulty()
# 1 hash in 2 should be valid, so difficulty should be 1/2**31
# binary => decimal => binary math is why we do this check
assert abs(difficulty * 2**31 - 1) < 0.0001
def _test_getnetworkhashps(self):
hashes_per_second = self.nodes[0].getnetworkhashps()
# This should be 2 hashes every 10 minutes or 1/300
assert abs(hashes_per_second * 300 - 1) < 0.0001
def _test_stopatheight(self):
assert_equal(self.nodes[0].getblockcount(), 200)
self.nodes[0].generatetoaddress(6, ADDRESS_BCRT1_P2WSH_OP_TRUE)
assert_equal(self.nodes[0].getblockcount(), 206)
self.log.debug('Node should not stop at this height')
assert_raises(subprocess.TimeoutExpired, lambda: self.nodes[0].process.wait(timeout=3))
try:
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_P2WSH_OP_TRUE)
except (ConnectionError, http.client.BadStatusLine):
pass # The node already shut down before response
self.log.debug('Node should stop at this height...')
self.nodes[0].wait_until_stopped()
self.start_node(0)
assert_equal(self.nodes[0].getblockcount(), 207)
def _test_waitforblockheight(self):
self.log.info("Test waitforblockheight")
node = self.nodes[0]
peer = node.add_p2p_connection(P2PInterface())
current_height = node.getblock(node.getbestblockhash())['height']
# Create a fork somewhere below our current height, invalidate the tip
# of that fork, and then ensure that waitforblockheight still
# works as expected.
#
# (Previously this was broken based on setting
# `rpc/blockchain.cpp:latestblock` incorrectly.)
#
b20hash = node.getblockhash(20)
b20 = node.getblock(b20hash)
def solve_and_send_block(prevhash, height, time):
b = create_block(prevhash, create_coinbase(height), time)
b.solve()
peer.send_and_ping(msg_block(b))
return b
b21f = solve_and_send_block(int(b20hash, 16), 21, b20['time'] + 1)
b22f = solve_and_send_block(b21f.sha256, 22, b21f.nTime + 1)
node.invalidateblock(b22f.hash)
def assert_waitforheight(height, timeout=2):
assert_equal(
node.waitforblockheight(height=height, timeout=timeout)['height'],
current_height)
assert_waitforheight(0)
assert_waitforheight(current_height - 1)
assert_waitforheight(current_height)
assert_waitforheight(current_height + 1)
def _test_getblock(self):
node = self.nodes[0]
miniwallet = MiniWallet(node)
miniwallet.scan_blocks(num=5)
fee_per_byte = Decimal('0.00000010')
fee_per_kb = 1000 * fee_per_byte
miniwallet.send_self_transfer(fee_rate=fee_per_kb, from_node=node)
blockhash = node.generate(1)[0]
self.log.info("Test that getblock with verbosity 1 doesn't include fee")
block = node.getblock(blockhash, 1)
assert 'fee' not in block['tx'][1]
self.log.info('Test that getblock with verbosity 2 includes expected fee')
block = node.getblock(blockhash, 2)
tx = block['tx'][1]
assert 'fee' in tx
assert_equal(tx['fee'], tx['vsize'] * fee_per_byte)
self.log.info("Test that getblock with verbosity 2 still works with pruned Undo data")
datadir = get_datadir_path(self.options.tmpdir, 0)
self.log.info("Test that getblock with invalid verbosity type returns proper error message")
assert_raises_rpc_error(-1, "JSON value is not an integer as expected", node.getblock, blockhash, "2")
def move_block_file(old, new):
old_path = os.path.join(datadir, self.chain, 'blocks', old)
new_path = os.path.join(datadir, self.chain, 'blocks', new)
os.rename(old_path, new_path)
# Move instead of deleting so we can restore chain state afterwards
move_block_file('rev00000.dat', 'rev_wrong')
block = node.getblock(blockhash, 2)
assert 'fee' not in block['tx'][1]
# Restore chain state
move_block_file('rev_wrong', 'rev00000.dat')
assert 'previousblockhash' not in node.getblock(node.getblockhash(0))
assert 'nextblockhash' not in node.getblock(node.getbestblockhash())
if __name__ == '__main__':
BlockchainTest().main()
| 41.610478 | 257 | 0.646685 |
from decimal import Decimal
import http.client
import os
import subprocess
from test_framework.address import ADDRESS_BCRT1_P2WSH_OP_TRUE
from test_framework.blocktools import (
create_block,
create_coinbase,
TIME_GENESIS_BLOCK,
)
from test_framework.messages import (
CBlockHeader,
from_hex,
msg_block,
)
from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_greater_than_or_equal,
assert_raises,
assert_raises_rpc_error,
assert_is_hex_string,
assert_is_hash_string,
get_datadir_path,
)
from test_framework.wallet import MiniWallet
class BlockchainTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.supports_cli = False
def run_test(self):
self.mine_chain()
self.restart_node(0, extra_args=['-stopatheight=207', '-prune=1'])
self._test_getblockchaininfo()
self._test_getchaintxstats()
self._test_gettxoutsetinfo()
self._test_getblockheader()
self._test_getdifficulty()
self._test_getnetworkhashps()
self._test_stopatheight()
self._test_waitforblockheight()
self._test_getblock()
assert self.nodes[0].verifychain(4, 0)
def mine_chain(self):
self.log.info('Create some old blocks')
for t in range(TIME_GENESIS_BLOCK, TIME_GENESIS_BLOCK + 200 * 600, 600):
self.nodes[0].setmocktime(t)
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_P2WSH_OP_TRUE)
assert_equal(self.nodes[0].getblockchaininfo()['blocks'], 200)
def _test_getblockchaininfo(self):
self.log.info("Test getblockchaininfo")
keys = [
'bestblockhash',
'blocks',
'chain',
'chainwork',
'difficulty',
'headers',
'initialblockdownload',
'mediantime',
'pruned',
'size_on_disk',
'softforks',
'time',
'verificationprogress',
'warnings',
]
res = self.nodes[0].getblockchaininfo()
assert isinstance(res['time'], int)
assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning'] + keys))
assert_greater_than(res['size_on_disk'], 0)
assert_greater_than_or_equal(res['pruneheight'], 0)
assert res['pruned']
assert not res['automatic_pruning']
self.restart_node(0, ['-stopatheight=207'])
res = self.nodes[0].getblockchaininfo()
assert_equal(sorted(res.keys()), keys)
self.restart_node(0, ['-stopatheight=207', '-prune=550'])
res = self.nodes[0].getblockchaininfo()
assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning', 'prune_target_size'] + keys))
assert res['pruned']
assert_equal(res['pruneheight'], 0)
assert res['automatic_pruning']
assert_equal(res['prune_target_size'], 576716800)
assert_greater_than(res['size_on_disk'], 0)
assert_equal(res['softforks'], {
'bip34': {'type': 'buried', 'active': False, 'height': 500},
'bip66': {'type': 'buried', 'active': False, 'height': 1251},
'bip65': {'type': 'buried', 'active': False, 'height': 1351},
'csv': {'type': 'buried', 'active': False, 'height': 432},
'segwit': {'type': 'buried', 'active': True, 'height': 0},
'testdummy': {
'type': 'bip9',
'bip9': {
'status': 'started',
'bit': 28,
'start_time': 0,
'timeout': 0x7fffffffffffffff,
'since': 144,
'statistics': {
'period': 144,
'threshold': 108,
'elapsed': 57,
'count': 57,
'possible': True,
},
'min_activation_height': 0,
},
'active': False
},
'taproot': {
'type': 'bip9',
'bip9': {
'status': 'active',
'start_time': -1,
'timeout': 9223372036854775807,
'since': 0,
'min_activation_height': 0,
},
'height': 0,
'active': True
}
})
def _test_getchaintxstats(self):
self.log.info("Test getchaintxstats")
assert_raises_rpc_error(-1, 'getchaintxstats', self.nodes[0].getchaintxstats, 0, '', 0)
assert_raises_rpc_error(-1, "JSON value is not an integer as expected", self.nodes[0].getchaintxstats, '')
assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, -1)
assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, self.nodes[0].getblockcount())
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getchaintxstats, blockhash=0)
assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 1, for '0')", self.nodes[0].getchaintxstats, blockhash='0')
assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getchaintxstats, blockhash='ZZZ0000000000000000000000000000000000000000000000000000000000000')
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getchaintxstats, blockhash='0000000000000000000000000000000000000000000000000000000000000000')
blockhash = self.nodes[0].getblockhash(200)
self.nodes[0].invalidateblock(blockhash)
assert_raises_rpc_error(-8, "Block is not in main chain", self.nodes[0].getchaintxstats, blockhash=blockhash)
self.nodes[0].reconsiderblock(blockhash)
chaintxstats = self.nodes[0].getchaintxstats(nblocks=1)
assert_equal(chaintxstats['txcount'], 201)
assert_equal(round(chaintxstats['txrate'] * 600, 10), Decimal(1))
b1_hash = self.nodes[0].getblockhash(1)
b1 = self.nodes[0].getblock(b1_hash)
b200_hash = self.nodes[0].getblockhash(200)
b200 = self.nodes[0].getblock(b200_hash)
time_diff = b200['mediantime'] - b1['mediantime']
chaintxstats = self.nodes[0].getchaintxstats()
assert_equal(chaintxstats['time'], b200['time'])
assert_equal(chaintxstats['txcount'], 201)
assert_equal(chaintxstats['window_final_block_hash'], b200_hash)
assert_equal(chaintxstats['window_final_block_height'], 200)
assert_equal(chaintxstats['window_block_count'], 199)
assert_equal(chaintxstats['window_tx_count'], 199)
assert_equal(chaintxstats['window_interval'], time_diff)
assert_equal(round(chaintxstats['txrate'] * time_diff, 10), Decimal(199))
chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1_hash)
assert_equal(chaintxstats['time'], b1['time'])
assert_equal(chaintxstats['txcount'], 2)
assert_equal(chaintxstats['window_final_block_hash'], b1_hash)
assert_equal(chaintxstats['window_final_block_height'], 1)
assert_equal(chaintxstats['window_block_count'], 0)
assert 'window_tx_count' not in chaintxstats
assert 'window_interval' not in chaintxstats
assert 'txrate' not in chaintxstats
def _test_gettxoutsetinfo(self):
node = self.nodes[0]
res = node.gettxoutsetinfo()
assert_equal(res['total_amount'], Decimal('8725.00000000'))
assert_equal(res['transactions'], 200)
assert_equal(res['height'], 200)
assert_equal(res['txouts'], 200)
assert_equal(res['bogosize'], 16800),
assert_equal(res['bestblock'], node.getblockhash(200))
size = res['disk_size']
assert size > 6400
assert size < 64000
assert_equal(len(res['bestblock']), 64)
assert_equal(len(res['hash_serialized_2']), 64)
self.log.info("Test that gettxoutsetinfo() works for blockchain with just the genesis block")
b1hash = node.getblockhash(1)
node.invalidateblock(b1hash)
res2 = node.gettxoutsetinfo()
assert_equal(res2['transactions'], 0)
assert_equal(res2['total_amount'], Decimal('0'))
assert_equal(res2['height'], 0)
assert_equal(res2['txouts'], 0)
assert_equal(res2['bogosize'], 0),
assert_equal(res2['bestblock'], node.getblockhash(0))
assert_equal(len(res2['hash_serialized_2']), 64)
self.log.info("Test that gettxoutsetinfo() returns the same result after invalidate/reconsider block")
node.reconsiderblock(b1hash)
res3 = node.gettxoutsetinfo()
del res['disk_size'], res3['disk_size']
assert_equal(res, res3)
self.log.info("Test hash_type option for gettxoutsetinfo()")
res4 = node.gettxoutsetinfo(hash_type='hash_serialized_2')
del res4['disk_size']
assert_equal(res, res4)
res5 = node.gettxoutsetinfo(hash_type='none')
assert 'hash_serialized_2' not in res5
res6 = node.gettxoutsetinfo(hash_type='muhash')
assert 'muhash' in res6
assert(res['hash_serialized_2'] != res6['muhash'])
for r in [res, res2, res3, res4, res5]:
assert 'muhash' not in r
assert_raises_rpc_error(-8, "foohash is not a valid hash_type", node.gettxoutsetinfo, "foohash")
def _test_getblockheader(self):
node = self.nodes[0]
assert_raises_rpc_error(-8, "hash must be of length 64 (not 8, for 'nonsense')", node.getblockheader, "nonsense")
assert_raises_rpc_error(-8, "hash must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", node.getblockheader, "ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844")
assert_raises_rpc_error(-5, "Block not found", node.getblockheader, "0cf7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844")
besthash = node.getbestblockhash()
secondbesthash = node.getblockhash(199)
header = node.getblockheader(blockhash=besthash)
assert_equal(header['hash'], besthash)
assert_equal(header['height'], 200)
assert_equal(header['confirmations'], 1)
assert_equal(header['previousblockhash'], secondbesthash)
assert_is_hex_string(header['chainwork'])
assert_equal(header['nTx'], 1)
assert_is_hash_string(header['hash'])
assert_is_hash_string(header['previousblockhash'])
assert_is_hash_string(header['merkleroot'])
assert_is_hash_string(header['bits'], length=None)
assert isinstance(header['time'], int)
assert isinstance(header['mediantime'], int)
assert isinstance(header['nonce'], int)
assert isinstance(header['version'], int)
assert isinstance(int(header['versionHex'], 16), int)
assert isinstance(header['difficulty'], Decimal)
header_hex = node.getblockheader(blockhash=besthash, verbose=False)
assert_is_hex_string(header_hex)
header = from_hex(CBlockHeader(), header_hex)
header.calc_sha256()
assert_equal(header.hash, besthash)
assert 'previousblockhash' not in node.getblockheader(node.getblockhash(0))
assert 'nextblockhash' not in node.getblockheader(node.getbestblockhash())
def _test_getdifficulty(self):
difficulty = self.nodes[0].getdifficulty()
assert abs(difficulty * 2**31 - 1) < 0.0001
def _test_getnetworkhashps(self):
hashes_per_second = self.nodes[0].getnetworkhashps()
assert abs(hashes_per_second * 300 - 1) < 0.0001
def _test_stopatheight(self):
assert_equal(self.nodes[0].getblockcount(), 200)
self.nodes[0].generatetoaddress(6, ADDRESS_BCRT1_P2WSH_OP_TRUE)
assert_equal(self.nodes[0].getblockcount(), 206)
self.log.debug('Node should not stop at this height')
assert_raises(subprocess.TimeoutExpired, lambda: self.nodes[0].process.wait(timeout=3))
try:
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_P2WSH_OP_TRUE)
except (ConnectionError, http.client.BadStatusLine):
pass
self.log.debug('Node should stop at this height...')
self.nodes[0].wait_until_stopped()
self.start_node(0)
assert_equal(self.nodes[0].getblockcount(), 207)
def _test_waitforblockheight(self):
self.log.info("Test waitforblockheight")
node = self.nodes[0]
peer = node.add_p2p_connection(P2PInterface())
current_height = node.getblock(node.getbestblockhash())['height']
b20hash = node.getblockhash(20)
b20 = node.getblock(b20hash)
def solve_and_send_block(prevhash, height, time):
b = create_block(prevhash, create_coinbase(height), time)
b.solve()
peer.send_and_ping(msg_block(b))
return b
b21f = solve_and_send_block(int(b20hash, 16), 21, b20['time'] + 1)
b22f = solve_and_send_block(b21f.sha256, 22, b21f.nTime + 1)
node.invalidateblock(b22f.hash)
def assert_waitforheight(height, timeout=2):
assert_equal(
node.waitforblockheight(height=height, timeout=timeout)['height'],
current_height)
assert_waitforheight(0)
assert_waitforheight(current_height - 1)
assert_waitforheight(current_height)
assert_waitforheight(current_height + 1)
def _test_getblock(self):
node = self.nodes[0]
miniwallet = MiniWallet(node)
miniwallet.scan_blocks(num=5)
fee_per_byte = Decimal('0.00000010')
fee_per_kb = 1000 * fee_per_byte
miniwallet.send_self_transfer(fee_rate=fee_per_kb, from_node=node)
blockhash = node.generate(1)[0]
self.log.info("Test that getblock with verbosity 1 doesn't include fee")
block = node.getblock(blockhash, 1)
assert 'fee' not in block['tx'][1]
self.log.info('Test that getblock with verbosity 2 includes expected fee')
block = node.getblock(blockhash, 2)
tx = block['tx'][1]
assert 'fee' in tx
assert_equal(tx['fee'], tx['vsize'] * fee_per_byte)
self.log.info("Test that getblock with verbosity 2 still works with pruned Undo data")
datadir = get_datadir_path(self.options.tmpdir, 0)
self.log.info("Test that getblock with invalid verbosity type returns proper error message")
assert_raises_rpc_error(-1, "JSON value is not an integer as expected", node.getblock, blockhash, "2")
def move_block_file(old, new):
old_path = os.path.join(datadir, self.chain, 'blocks', old)
new_path = os.path.join(datadir, self.chain, 'blocks', new)
os.rename(old_path, new_path)
# Move instead of deleting so we can restore chain state afterwards
move_block_file('rev00000.dat', 'rev_wrong')
block = node.getblock(blockhash, 2)
assert 'fee' not in block['tx'][1]
# Restore chain state
move_block_file('rev_wrong', 'rev00000.dat')
assert 'previousblockhash' not in node.getblock(node.getblockhash(0))
assert 'nextblockhash' not in node.getblock(node.getbestblockhash())
if __name__ == '__main__':
BlockchainTest().main()
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.