code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import json
import logging
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.models import User
from django.forms.forms import NON_FIELD_ERRORS
from django.forms.util import ErrorList
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _, ugettext_noop
from django.views.generic import View
from corehq.apps.hqwebapp.async_handler import AsyncHandlerMixin
from corehq.apps.hqwebapp.encoders import LazyEncoder
from corehq.util.translation import localize
from dimagi.utils.decorators.memoized import memoized
from corehq.apps.accounting.forms import (
BillingAccountBasicForm, BillingAccountContactForm, CreditForm,
SubscriptionForm, CancelForm,
PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm,
ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm,
ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm,
TestReminderEmailFrom,
CreateAdminForm)
from corehq.apps.accounting.exceptions import (
NewSubscriptionError, InvoiceError, CreditLineError,
CreateAccountingAdminError,
)
from corehq.apps.accounting.interface import (
AccountingInterface, SubscriptionInterface, SoftwarePlanInterface,
InvoiceInterface
)
from corehq.apps.accounting.async_handlers import (
FeatureRateAsyncHandler,
Select2RateAsyncHandler,
SoftwareProductRateAsyncHandler,
Select2BillingInfoHandler,
Select2InvoiceTriggerHandler,
SubscriberFilterAsyncHandler,
SubscriptionFilterAsyncHandler,
AccountFilterAsyncHandler,
BillingContactInfoAsyncHandler,
SoftwarePlanAsyncHandler,
)
from corehq.apps.accounting.models import (
SoftwareProductType, Invoice, BillingAccount, CreditLine, Subscription,
SoftwarePlanVersion, SoftwarePlan, CreditAdjustment
)
from corehq.apps.accounting.user_text import PricingTable
from corehq.apps.accounting.utils import (
fmt_feature_rate_dict, fmt_product_rate_dict,
has_subscription_already_ended
)
from corehq.apps.hqwebapp.views import BaseSectionPageView, CRUDPaginatedViewMixin
from corehq import privileges, toggles
from django_prbac.decorators import requires_privilege_raise404
from django_prbac.models import Role, Grant
logger = logging.getLogger('accounting')
@requires_privilege_raise404(privileges.ACCOUNTING_ADMIN)
def accounting_default(request):
return HttpResponseRedirect(AccountingInterface.get_url())
class AccountingSectionView(BaseSectionPageView):
section_name = 'Accounting'
@property
def section_url(self):
return reverse('accounting_default')
@method_decorator(requires_privilege_raise404(privileges.ACCOUNTING_ADMIN))
def dispatch(self, request, *args, **kwargs):
return super(AccountingSectionView, self).dispatch(request, *args, **kwargs)
class BillingAccountsSectionView(AccountingSectionView):
@property
def parent_pages(self):
return [{
'title': AccountingInterface.name,
'url': AccountingInterface.get_url(),
}]
class NewBillingAccountView(BillingAccountsSectionView):
page_title = 'New Billing Account'
template_name = 'accounting/accounts_base.html'
urlname = 'new_billing_account'
@property
@memoized
def account_form(self):
if self.request.method == 'POST':
return BillingAccountBasicForm(None, self.request.POST)
return BillingAccountBasicForm(None)
@property
def page_context(self):
return {
'basic_form': self.account_form,
}
@property
def page_url(self):
return reverse(self.urlname)
def post(self, request, *args, **kwargs):
if self.account_form.is_valid():
account = self.account_form.create_account()
return HttpResponseRedirect(reverse('manage_billing_account', args=(account.id,)))
else:
return self.get(request, *args, **kwargs)
class ManageBillingAccountView(BillingAccountsSectionView, AsyncHandlerMixin):
page_title = 'Manage Billing Account'
template_name = 'accounting/accounts.html'
urlname = 'manage_billing_account'
async_handlers = [
Select2BillingInfoHandler,
]
@property
@memoized
def account(self):
return BillingAccount.objects.get(id=self.args[0])
@property
@memoized
def basic_account_form(self):
if (self.request.method == 'POST'
and 'account_basic' in self.request.POST):
return BillingAccountBasicForm(self.account, self.request.POST)
return BillingAccountBasicForm(self.account)
@property
@memoized
def contact_form(self):
if (self.request.method == 'POST'
and 'account_contact' in self.request.POST):
return BillingAccountContactForm(self.account, self.request.POST)
return BillingAccountContactForm(self.account)
@property
@memoized
def credit_form(self):
if (self.request.method == 'POST'
and 'adjust_credit' in self.request.POST):
return CreditForm(self.account, None, self.request.POST)
return CreditForm(self.account, None)
@property
def page_context(self):
return {
'account': self.account,
'credit_form': self.credit_form,
'credit_list': CreditLine.objects.filter(account=self.account),
'basic_form': self.basic_account_form,
'contact_form': self.contact_form,
'subscription_list': [
(sub, Invoice.objects.filter(subscription=sub).latest('date_due').date_due # TODO - check query
if len(Invoice.objects.filter(subscription=sub)) != 0 else 'None on record',
) for sub in Subscription.objects.filter(account=self.account)
],
}
@property
def page_url(self):
return reverse(self.urlname, args=(self.args[0],))
def post(self, request, *args, **kwargs):
if self.async_response is not None:
return self.async_response
if ('account_basic' in self.request.POST
and self.basic_account_form.is_valid()):
self.basic_account_form.update_basic_info(self.account)
messages.success(request, "Account successfully updated.")
return HttpResponseRedirect(self.page_url)
elif ('account_contact' in self.request.POST
and self.contact_form.is_valid()):
self.contact_form.update_contact_info(self.account)
messages.success(request, "Account Contact Info successfully updated.")
return HttpResponseRedirect(self.page_url)
elif ('adjust_credit' in self.request.POST
and self.credit_form.is_valid()):
try:
if self.credit_form.adjust_credit(web_user=request.user.username):
messages.success(request, "Successfully adjusted credit.")
return HttpResponseRedirect(self.page_url)
except CreditLineError as e:
logger.error(
"[BILLING] failed to add credit in admin UI due to: %s"
% e
)
messages.error(request, "Issue adding credit: %s" % e)
return self.get(request, *args, **kwargs)
class NewSubscriptionView(AccountingSectionView, AsyncHandlerMixin):
page_title = 'New Subscription'
template_name = 'accounting/subscriptions_base.html'
urlname = 'new_subscription'
async_handlers = [
Select2BillingInfoHandler,
]
@property
@memoized
def account_id(self):
return self.args[0]
@property
@memoized
def subscription_form(self):
if self.request.method == 'POST':
return SubscriptionForm(
None, self.account_id, self.request.user.username,
self.request.POST
)
return SubscriptionForm(None, self.account_id, None)
@property
def page_context(self):
return {
'form': self.subscription_form,
}
@property
def page_url(self):
return reverse(self.urlname, args=(self.account_id,))
@property
def parent_pages(self):
return [{
'title': AccountingInterface.name,
'url': AccountingInterface.get_url(),
}]
def post(self, request, *args, **kwargs):
if self.async_response is not None:
return self.async_response
if self.subscription_form.is_valid():
try:
subscription = self.subscription_form.create_subscription()
return HttpResponseRedirect(
reverse(ManageBillingAccountView.urlname, args=(subscription.account.id,)))
except NewSubscriptionError as e:
errors = ErrorList()
errors.extend([e.message])
self.subscription_form._errors.setdefault(NON_FIELD_ERRORS, errors)
return self.get(request, *args, **kwargs)
class NewSubscriptionViewNoDefaultDomain(NewSubscriptionView):
urlname = 'new_subscription_no_default_domain'
@property
@memoized
def account_id(self):
return None
@property
def page_url(self):
return reverse(self.urlname)
class EditSubscriptionView(AccountingSectionView, AsyncHandlerMixin):
page_title = 'Edit Subscription'
template_name = 'accounting/subscriptions.html'
urlname = 'edit_subscription'
async_handlers = [
Select2BillingInfoHandler,
]
@property
@memoized
def subscription_id(self):
return self.args[0]
@property
@memoized
def subscription(self):
return Subscription.objects.get(id=self.subscription_id)
@property
@memoized
def subscription_form(self):
if self.request.method == 'POST' and 'set_subscription' in self.request.POST:
return SubscriptionForm(
self.subscription, None, self.request.user.username,
self.request.POST
)
return SubscriptionForm(self.subscription, None, None)
@property
@memoized
def change_subscription_form(self):
if (self.request.method == 'POST'
and 'subscription_change_note' in self.request.POST):
return ChangeSubscriptionForm(
self.subscription, self.request.user.username,
self.request.POST
)
return ChangeSubscriptionForm(self.subscription,
self.request.user.username)
@property
@memoized
def credit_form(self):
if self.request.method == 'POST' and 'adjust_credit' in self.request.POST:
return CreditForm(self.subscription.account, self.subscription,
self.request.POST)
return CreditForm(self.subscription.account, self.subscription)
@property
@memoized
def cancel_form(self):
if (self.request.method == 'POST'
and 'cancel_subscription' in self.request.POST):
return CancelForm(self.request.POST)
return CancelForm()
@property
def page_context(self):
return {
'cancel_form': self.cancel_form,
'credit_form': self.credit_form,
'can_change_subscription': self.subscription.is_active,
'change_subscription_form': self.change_subscription_form,
'credit_list': CreditLine.objects.filter(subscription=self.subscription),
'disable_cancel': has_subscription_already_ended(self.subscription),
'form': self.subscription_form,
'subscription': self.subscription,
'subscription_canceled': self.subscription_canceled if hasattr(self, 'subscription_canceled') else False,
}
@property
def page_url(self):
return reverse(self.urlname, args=(self.subscription_id,))
@property
def parent_pages(self):
return [{
'title': SubscriptionInterface.name,
'url': SubscriptionInterface.get_url(),
}]
def post(self, request, *args, **kwargs):
if self.async_response is not None:
return self.async_response
if 'set_subscription' in self.request.POST and self.subscription_form.is_valid():
try:
self.subscription_form.update_subscription()
messages.success(request, "The subscription has been updated.")
except Exception as e:
messages.error(request,
"Could not update subscription due to: %s" % e)
return HttpResponseRedirect(self.page_url)
elif 'adjust_credit' in self.request.POST and self.credit_form.is_valid():
if self.credit_form.adjust_credit(web_user=request.user.username):
return HttpResponseRedirect(self.page_url)
elif ('cancel_subscription' in self.request.POST
and self.cancel_form.is_valid()):
self.cancel_subscription()
messages.success(request, "The subscription has been cancelled.")
elif ('subscription_change_note' in self.request.POST
and self.change_subscription_form.is_valid()
):
try:
new_sub = self.change_subscription_form.change_subscription()
return HttpResponseRedirect(reverse(self.urlname, args=[new_sub.id]))
except Exception as e:
messages.error(request,
"Could not change subscription due to: %s" % e)
return self.get(request, *args, **kwargs)
def cancel_subscription(self):
self.subscription.cancel_subscription(
note=self.cancel_form.cleaned_data['note'],
web_user=self.request.user.username,
)
self.subscription_canceled = True
class NewSoftwarePlanView(AccountingSectionView):
page_title = 'New Software Plan'
template_name = 'accounting/plans_base.html'
urlname = 'new_software_plan'
@property
@memoized
def plan_info_form(self):
if self.request.method == 'POST':
return PlanInformationForm(None, self.request.POST)
return PlanInformationForm(None)
@property
def page_context(self):
return {
'plan_info_form': self.plan_info_form,
}
@property
def page_url(self):
return reverse(self.urlname)
@property
def parent_pages(self):
return [{
'title': SoftwarePlanInterface.name,
'url': SoftwarePlanInterface.get_url(),
}]
def post(self, request, *args, **kwargs):
if self.plan_info_form.is_valid():
plan = self.plan_info_form.create_plan()
return HttpResponseRedirect(reverse(EditSoftwarePlanView.urlname, args=(plan.id,)))
return self.get(request, *args, **kwargs)
class EditSoftwarePlanView(AccountingSectionView, AsyncHandlerMixin):
template_name = 'accounting/plans.html'
urlname = 'edit_software_plan'
page_title = "Edit Software Plan"
async_handlers = [
Select2RateAsyncHandler,
FeatureRateAsyncHandler,
SoftwareProductRateAsyncHandler,
]
@property
@memoized
def plan(self):
return SoftwarePlan.objects.get(id=self.args[0])
@property
@memoized
def plan_info_form(self):
if self.request.method == 'POST' and 'update_version' not in self.request.POST:
return PlanInformationForm(self.plan, self.request.POST)
return PlanInformationForm(self.plan)
@property
@memoized
def software_plan_version_form(self):
plan_version = self.plan.get_version()
initial = {
'feature_rates': json.dumps([fmt_feature_rate_dict(r.feature, r)
for r in plan_version.feature_rates.all()] if plan_version else []),
'product_rates': json.dumps([fmt_product_rate_dict(r.product, r)
for r in plan_version.product_rates.all()] if plan_version else []),
'role_slug': plan_version.role.slug if plan_version else None,
}
if self.request.method == 'POST' and 'update_version' in self.request.POST:
return SoftwarePlanVersionForm(self.plan, self.plan.get_version(), self.request.POST, initial=initial)
return SoftwarePlanVersionForm(self.plan, self.plan.get_version(), initial=initial)
@property
def page_context(self):
return {
'plan_info_form': self.plan_info_form,
'plan_version_form': self.software_plan_version_form,
'feature_rate_form': FeatureRateForm(),
'product_rate_form': ProductRateForm(),
'plan_versions': SoftwarePlanVersion.objects.filter(plan=self.plan).order_by('-date_created')
}
@property
def page_url(self):
return reverse(self.urlname, args=self.args)
@property
def parent_pages(self):
return [{
'title': SoftwarePlanInterface.name,
'url': SoftwarePlanInterface.get_url(),
}]
def post(self, request, *args, **kwargs):
if self.async_response is not None:
return self.async_response
if 'update_version' in request.POST:
if self.software_plan_version_form.is_valid():
self.software_plan_version_form.save(request)
return HttpResponseRedirect(self.page_url)
elif self.plan_info_form.is_valid():
self.plan_info_form.update_plan(self.plan)
messages.success(request, "The %s Software Plan was successfully updated." % self.plan.name)
return self.get(request, *args, **kwargs)
class TriggerInvoiceView(AccountingSectionView, AsyncHandlerMixin):
urlname = 'accounting_trigger_invoice'
page_title = "Trigger Invoice"
template_name = 'accounting/trigger_invoice.html'
async_handlers = [
Select2InvoiceTriggerHandler,
]
@property
@memoized
def trigger_form(self):
if self.request.method == 'POST':
return TriggerInvoiceForm(self.request.POST)
return TriggerInvoiceForm()
@property
def page_url(self):
return reverse(self.urlname)
@property
def page_context(self):
return {
'trigger_form': self.trigger_form,
}
def post(self, request, *args, **kwargs):
if self.async_response is not None:
return self.async_response
if self.trigger_form.is_valid():
try:
self.trigger_form.trigger_invoice()
messages.success(
request, "Successfully triggered invoices for domain %s."
% self.trigger_form.cleaned_data['domain'])
return HttpResponseRedirect(reverse(self.urlname))
except (CreditLineError, InvoiceError) as e:
messages.error(request, "Error generating invoices: %s" % e)
return self.get(request, *args, **kwargs)
class TriggerBookkeeperEmailView(AccountingSectionView):
urlname = 'accounting_trigger_bookkeeper_email'
page_title = "Trigger Bookkeeper Email"
template_name = 'accounting/trigger_bookkeeper.html'
@property
@memoized
def trigger_email_form(self):
if self.request.method == 'POST':
return TriggerBookkeeperEmailForm(self.request.POST)
return TriggerBookkeeperEmailForm()
@property
def page_url(self):
return reverse(self.urlname)
@property
def page_context(self):
return {
'trigger_email_form': self.trigger_email_form,
}
def post(self, request, *args, **kwargs):
if self.trigger_email_form.is_valid():
self.trigger_email_form.trigger_email()
messages.success(request, "Sent the Bookkeeper email!")
return HttpResponseRedirect(reverse(self.urlname))
return self.get(request, *args, **kwargs)
class TestRenewalEmailView(AccountingSectionView):
urlname = 'accocunting_test_renewal_email'
page_title = "Test Renewal Reminder Email"
template_name = 'accounting/test_reminder_emails.html'
@property
@memoized
def reminder_email_form(self):
if self.request.method == 'POST':
return TestReminderEmailFrom(self.request.POST)
return TestReminderEmailFrom()
@property
def page_url(self):
return reverse(self.urlname)
@property
def page_context(self):
return {
'reminder_email_form': self.reminder_email_form,
}
def post(self, request, *args, **kwargs):
if self.reminder_email_form.is_valid():
self.reminder_email_form.send_emails()
messages.success(request, "Sent the Reminder emails!")
return HttpResponseRedirect(reverse(self.urlname))
return self.get(request, *args, **kwargs)
def pricing_table_json(request, product, locale):
if product not in [c[0] for c in SoftwareProductType.CHOICES]:
return HttpResponseBadRequest("Not a valid product")
if locale not in [l[0] for l in settings.LANGUAGES]:
return HttpResponseBadRequest("Not a supported language.")
with localize(locale):
table = PricingTable.get_table_by_product(product)
table_json = json.dumps(table, cls=LazyEncoder)
# This is necessary for responding to requests from Internet Explorer.
# IE you can FOAD.
callback = request.GET.get('callback') or request.POST.get('callback')
if callback is not None:
table_json = "%s(%s)" % (callback, table_json)
response = HttpResponse(table_json,
content_type='application/json; charset=UTF-8')
response["Access-Control-Allow-Origin"] = "*"
response["Access-Control-Allow-Methods"] = "POST, GET, OPTIONS"
response["Access-Control-Max-Age"] = "1000"
response["Access-Control-Allow-Headers"] = "*"
return response
class InvoiceSummaryView(AccountingSectionView):
template_name = 'accounting/invoice.html'
urlname = 'invoice_summary'
@property
@memoized
def invoice(self):
return Invoice.objects.get(id=self.args[0])
@property
def page_title(self):
return "Invoice #%s" % self.invoice.invoice_number
@property
def page_url(self):
return reverse(self.urlname, args=self.args)
@property
def parent_pages(self):
return [{
'title': InvoiceInterface.name,
'url': InvoiceInterface.get_url(),
}]
@property
@memoized
def adjust_balance_form(self):
if self.request.method == 'POST':
return AdjustBalanceForm(self.invoice, self.request.POST)
return AdjustBalanceForm(self.invoice)
@property
@memoized
def adjustment_list(self):
adjustment_list = CreditAdjustment.objects.filter(invoice=self.invoice)
return adjustment_list.order_by('-date_created')
@property
@memoized
def billing_records(self):
return self.invoice.billingrecord_set.all()
@property
@memoized
def invoice_info_form(self):
return InvoiceInfoForm(self.invoice)
@property
@memoized
def resend_email_form(self):
if self.request.method == 'POST':
return ResendEmailForm(self.invoice, self.request.POST)
return ResendEmailForm(self.invoice)
@property
def page_context(self):
return {
'adjust_balance_form': self.adjust_balance_form,
'adjustment_list': self.adjustment_list,
'billing_records': self.billing_records,
'invoice_info_form': self.invoice_info_form,
'resend_email_form': self.resend_email_form,
'can_send_email': not self.invoice.subscription.do_not_invoice,
}
def post(self, request, *args, **kwargs):
if 'adjust_balance' in self.request.POST:
if self.adjust_balance_form.is_valid():
self.adjust_balance_form.adjust_balance(
web_user=self.request.user.username,
)
return HttpResponseRedirect(self.page_url)
elif 'resend_email' in self.request.POST:
if self.resend_email_form.is_valid():
try:
self.resend_email_form.resend_email()
return HttpResponseRedirect(self.page_url)
except Exception as e:
messages.error(request,
"Could not send emails due to: %s" % e)
return self.get(request, *args, **kwargs)
class ManageAccountingAdminsView(AccountingSectionView, CRUDPaginatedViewMixin):
template_name = 'accounting/accounting_admins.html'
urlname = 'accounting_manage_admins'
page_title = ugettext_noop("Accounting Admins")
limit_text = ugettext_noop("Admins per page")
empty_notification = ugettext_noop("You haven't specified any accounting admins. "
"How are you viewing this page??! x_x")
loading_message = ugettext_noop("Loading admin list...")
deleted_items_header = ugettext_noop("Removed Users:")
new_items_header = ugettext_noop("Added Users:")
@property
def page_url(self):
return reverse(self.urlname)
@property
def page_context(self):
return self.pagination_context
@property
def parameters(self):
return self.request.POST if self.request.method == 'POST' else self.request.GET
@property
def accounting_admin_queryset(self):
return User.objects.filter(
prbac_role__role__memberships_granted__to_role__slug=privileges.OPERATIONS_TEAM
).exclude(username=self.request.user.username)
@property
@memoized
def total(self):
return self.accounting_admin_queryset.count()
@property
def column_names(self):
return [
_('Username'),
_('Action'),
]
@property
def paginated_list(self):
for admin in self.accounting_admin_queryset:
yield {
'itemData': self._fmt_admin_data(admin),
'template': 'accounting-admin-row',
}
def _fmt_admin_data(self, admin):
return {
'id': admin.id,
'username': admin.username,
}
def get_create_form(self, is_blank=False):
if self.request.method == 'POST' and not is_blank:
return CreateAdminForm(self.request.POST)
return CreateAdminForm()
def get_create_item_data(self, create_form):
try:
user = create_form.add_admin_user()
except CreateAccountingAdminError as e:
return {
'error': "Could Not Add to Admins: %s" % e,
}
return {
'itemData': self._fmt_admin_data(user),
'template': 'accounting-admin-new',
}
def get_deleted_item_data(self, item_id):
user = User.objects.get(id=item_id)
ops_role = Role.objects.get(slug=privileges.OPERATIONS_TEAM)
grant_to_remove = Grant.objects.filter(
from_role=user.prbac_role.role,
to_role=ops_role,
)
grant_to_remove.delete()
return {
'deletedItem': self._fmt_admin_data(user),
'template': 'accounting-admin-removed',
}
def post(self, *args, **kwargs):
return self.paginate_crud_response
class AccountingSingleOptionResponseView(View, AsyncHandlerMixin):
urlname = 'accounting_subscriber_response'
http_method_names = ['post']
async_handlers = [
SubscriberFilterAsyncHandler,
SubscriptionFilterAsyncHandler,
AccountFilterAsyncHandler,
BillingContactInfoAsyncHandler,
SoftwarePlanAsyncHandler,
]
@method_decorator(requires_privilege_raise404(privileges.ACCOUNTING_ADMIN))
def dispatch(self, request, *args, **kwargs):
return super(AccountingSingleOptionResponseView, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
if self.async_response:
return self.async_response
return HttpResponseBadRequest("Please check your query.")
|
[
"django_prbac.models.Grant.objects.filter",
"corehq.apps.accounting.forms.TriggerBookkeeperEmailForm",
"corehq.apps.accounting.models.Invoice.objects.get",
"django_prbac.decorators.requires_privilege_raise404",
"django.core.urlresolvers.reverse",
"corehq.apps.accounting.models.CreditLine.objects.filter",
"corehq.apps.accounting.forms.TestReminderEmailFrom",
"django.contrib.messages.error",
"corehq.apps.accounting.forms.SubscriptionForm",
"django.contrib.auth.models.User.objects.filter",
"json.dumps",
"corehq.apps.accounting.models.Subscription.objects.filter",
"django.http.HttpResponseRedirect",
"corehq.apps.accounting.models.SoftwarePlan.objects.get",
"corehq.apps.accounting.utils.has_subscription_already_ended",
"corehq.apps.accounting.forms.CreditForm",
"corehq.apps.accounting.models.BillingAccount.objects.get",
"django.http.HttpResponse",
"corehq.apps.accounting.forms.PlanInformationForm",
"corehq.apps.accounting.models.CreditAdjustment.objects.filter",
"django.contrib.auth.models.User.objects.get",
"corehq.util.translation.localize",
"corehq.apps.accounting.interface.SoftwarePlanInterface.get_url",
"corehq.apps.accounting.forms.BillingAccountContactForm",
"corehq.apps.accounting.models.Subscription.objects.get",
"corehq.apps.accounting.forms.ProductRateForm",
"django.utils.translation.ugettext",
"corehq.apps.accounting.forms.CancelForm",
"django_prbac.models.Role.objects.get",
"corehq.apps.accounting.forms.CreateAdminForm",
"django.http.HttpResponseBadRequest",
"corehq.apps.accounting.forms.ResendEmailForm",
"corehq.apps.accounting.models.SoftwarePlanVersion.objects.filter",
"django.forms.util.ErrorList",
"corehq.apps.accounting.forms.TriggerInvoiceForm",
"django.utils.translation.ugettext_noop",
"corehq.apps.accounting.forms.InvoiceInfoForm",
"corehq.apps.accounting.interface.InvoiceInterface.get_url",
"corehq.apps.accounting.utils.fmt_feature_rate_dict",
"corehq.apps.accounting.interface.SubscriptionInterface.get_url",
"corehq.apps.accounting.interface.AccountingInterface.get_url",
"corehq.apps.accounting.utils.fmt_product_rate_dict",
"corehq.apps.accounting.forms.FeatureRateForm",
"corehq.apps.accounting.forms.AdjustBalanceForm",
"corehq.apps.accounting.forms.BillingAccountBasicForm",
"corehq.apps.accounting.forms.ChangeSubscriptionForm",
"corehq.apps.accounting.models.Invoice.objects.filter",
"corehq.apps.accounting.user_text.PricingTable.get_table_by_product",
"django.contrib.messages.success",
"logging.getLogger"
] |
[((2411, 2442), 'logging.getLogger', 'logging.getLogger', (['"""accounting"""'], {}), "('accounting')\n", (2428, 2442), False, 'import logging\n'), ((2446, 2502), 'django_prbac.decorators.requires_privilege_raise404', 'requires_privilege_raise404', (['privileges.ACCOUNTING_ADMIN'], {}), '(privileges.ACCOUNTING_ADMIN)\n', (2473, 2502), False, 'from django_prbac.decorators import requires_privilege_raise404\n'), ((21926, 21998), 'django.http.HttpResponse', 'HttpResponse', (['table_json'], {'content_type': '"""application/json; charset=UTF-8"""'}), "(table_json, content_type='application/json; charset=UTF-8')\n", (21938, 21998), False, 'from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest\n'), ((25122, 25156), 'django.utils.translation.ugettext_noop', 'ugettext_noop', (['"""Accounting Admins"""'], {}), "('Accounting Admins')\n", (25135, 25156), False, 'from django.utils.translation import ugettext as _, ugettext_noop\n'), ((25175, 25207), 'django.utils.translation.ugettext_noop', 'ugettext_noop', (['"""Admins per page"""'], {}), "('Admins per page')\n", (25188, 25207), False, 'from django.utils.translation import ugettext as _, ugettext_noop\n'), ((25233, 25341), 'django.utils.translation.ugettext_noop', 'ugettext_noop', (['"""You haven\'t specified any accounting admins. How are you viewing this page??! x_x"""'], {}), '(\n "You haven\'t specified any accounting admins. How are you viewing this page??! x_x"\n )\n', (25246, 25341), False, 'from django.utils.translation import ugettext as _, ugettext_noop\n'), ((25396, 25434), 'django.utils.translation.ugettext_noop', 'ugettext_noop', (['"""Loading admin list..."""'], {}), "('Loading admin list...')\n", (25409, 25434), False, 'from django.utils.translation import ugettext as _, ugettext_noop\n'), ((25462, 25493), 'django.utils.translation.ugettext_noop', 'ugettext_noop', (['"""Removed Users:"""'], {}), "('Removed Users:')\n", (25475, 25493), False, 'from django.utils.translation import ugettext as _, ugettext_noop\n'), ((25517, 25546), 'django.utils.translation.ugettext_noop', 'ugettext_noop', (['"""Added Users:"""'], {}), "('Added Users:')\n", (25530, 25546), False, 'from django.utils.translation import ugettext as _, ugettext_noop\n'), ((2568, 2597), 'corehq.apps.accounting.interface.AccountingInterface.get_url', 'AccountingInterface.get_url', ([], {}), '()\n', (2595, 2597), False, 'from corehq.apps.accounting.interface import AccountingInterface, SubscriptionInterface, SoftwarePlanInterface, InvoiceInterface\n'), ((2740, 2769), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting_default"""'], {}), "('accounting_default')\n", (2747, 2769), False, 'from django.core.urlresolvers import reverse\n'), ((2793, 2849), 'django_prbac.decorators.requires_privilege_raise404', 'requires_privilege_raise404', (['privileges.ACCOUNTING_ADMIN'], {}), '(privileges.ACCOUNTING_ADMIN)\n', (2820, 2849), False, 'from django_prbac.decorators import requires_privilege_raise404\n'), ((3581, 3610), 'corehq.apps.accounting.forms.BillingAccountBasicForm', 'BillingAccountBasicForm', (['None'], {}), '(None)\n', (3604, 3610), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((3780, 3801), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {}), '(self.urlname)\n', (3787, 3801), False, 'from django.core.urlresolvers import reverse\n'), ((4450, 4493), 'corehq.apps.accounting.models.BillingAccount.objects.get', 'BillingAccount.objects.get', ([], {'id': 'self.args[0]'}), '(id=self.args[0])\n', (4476, 4493), False, 'from corehq.apps.accounting.models import SoftwareProductType, Invoice, BillingAccount, CreditLine, Subscription, SoftwarePlanVersion, SoftwarePlan, CreditAdjustment\n'), ((4749, 4786), 'corehq.apps.accounting.forms.BillingAccountBasicForm', 'BillingAccountBasicForm', (['self.account'], {}), '(self.account)\n', (4772, 4786), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((5040, 5079), 'corehq.apps.accounting.forms.BillingAccountContactForm', 'BillingAccountContactForm', (['self.account'], {}), '(self.account)\n', (5065, 5079), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((5321, 5351), 'corehq.apps.accounting.forms.CreditForm', 'CreditForm', (['self.account', 'None'], {}), '(self.account, None)\n', (5331, 5351), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((6072, 6115), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {'args': '(self.args[0],)'}), '(self.urlname, args=(self.args[0],))\n', (6079, 6115), False, 'from django.core.urlresolvers import reverse\n'), ((8117, 8162), 'corehq.apps.accounting.forms.SubscriptionForm', 'SubscriptionForm', (['None', 'self.account_id', 'None'], {}), '(None, self.account_id, None)\n', (8133, 8162), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((8331, 8377), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {'args': '(self.account_id,)'}), '(self.urlname, args=(self.account_id,))\n', (8338, 8377), False, 'from django.core.urlresolvers import reverse\n'), ((9461, 9482), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {}), '(self.urlname)\n', (9468, 9482), False, 'from django.core.urlresolvers import reverse\n'), ((9902, 9951), 'corehq.apps.accounting.models.Subscription.objects.get', 'Subscription.objects.get', ([], {'id': 'self.subscription_id'}), '(id=self.subscription_id)\n', (9926, 9951), False, 'from corehq.apps.accounting.models import SoftwareProductType, Invoice, BillingAccount, CreditLine, Subscription, SoftwarePlanVersion, SoftwarePlan, CreditAdjustment\n'), ((10269, 10316), 'corehq.apps.accounting.forms.SubscriptionForm', 'SubscriptionForm', (['self.subscription', 'None', 'None'], {}), '(self.subscription, None, None)\n', (10285, 10316), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((10662, 10731), 'corehq.apps.accounting.forms.ChangeSubscriptionForm', 'ChangeSubscriptionForm', (['self.subscription', 'self.request.user.username'], {}), '(self.subscription, self.request.user.username)\n', (10684, 10731), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((11049, 11105), 'corehq.apps.accounting.forms.CreditForm', 'CreditForm', (['self.subscription.account', 'self.subscription'], {}), '(self.subscription.account, self.subscription)\n', (11059, 11105), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((11329, 11341), 'corehq.apps.accounting.forms.CancelForm', 'CancelForm', ([], {}), '()\n', (11339, 11341), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((12071, 12122), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {'args': '(self.subscription_id,)'}), '(self.urlname, args=(self.subscription_id,))\n', (12078, 12122), False, 'from django.core.urlresolvers import reverse\n'), ((14435, 14460), 'corehq.apps.accounting.forms.PlanInformationForm', 'PlanInformationForm', (['None'], {}), '(None)\n', (14454, 14460), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((14636, 14657), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {}), '(self.urlname)\n', (14643, 14657), False, 'from django.core.urlresolvers import reverse\n'), ((15509, 15550), 'corehq.apps.accounting.models.SoftwarePlan.objects.get', 'SoftwarePlan.objects.get', ([], {'id': 'self.args[0]'}), '(id=self.args[0])\n', (15533, 15550), False, 'from corehq.apps.accounting.models import SoftwareProductType, Invoice, BillingAccount, CreditLine, Subscription, SoftwarePlanVersion, SoftwarePlan, CreditAdjustment\n'), ((15782, 15812), 'corehq.apps.accounting.forms.PlanInformationForm', 'PlanInformationForm', (['self.plan'], {}), '(self.plan)\n', (15801, 15812), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((17152, 17189), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {'args': 'self.args'}), '(self.urlname, args=self.args)\n', (17159, 17189), False, 'from django.core.urlresolvers import reverse\n'), ((18413, 18433), 'corehq.apps.accounting.forms.TriggerInvoiceForm', 'TriggerInvoiceForm', ([], {}), '()\n', (18431, 18433), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((18488, 18509), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {}), '(self.urlname)\n', (18495, 18509), False, 'from django.core.urlresolvers import reverse\n'), ((19700, 19728), 'corehq.apps.accounting.forms.TriggerBookkeeperEmailForm', 'TriggerBookkeeperEmailForm', ([], {}), '()\n', (19726, 19728), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((19783, 19804), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {}), '(self.urlname)\n', (19790, 19804), False, 'from django.core.urlresolvers import reverse\n'), ((20648, 20671), 'corehq.apps.accounting.forms.TestReminderEmailFrom', 'TestReminderEmailFrom', ([], {}), '()\n', (20669, 20671), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((20726, 20747), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {}), '(self.urlname)\n', (20733, 20747), False, 'from django.core.urlresolvers import reverse\n'), ((21339, 21384), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Not a valid product"""'], {}), "('Not a valid product')\n", (21361, 21384), False, 'from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest\n'), ((21457, 21508), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Not a supported language."""'], {}), "('Not a supported language.')\n", (21479, 21508), False, 'from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest\n'), ((21518, 21534), 'corehq.util.translation.localize', 'localize', (['locale'], {}), '(locale)\n', (21526, 21534), False, 'from corehq.util.translation import localize\n'), ((21552, 21594), 'corehq.apps.accounting.user_text.PricingTable.get_table_by_product', 'PricingTable.get_table_by_product', (['product'], {}), '(product)\n', (21585, 21594), False, 'from corehq.apps.accounting.user_text import PricingTable\n'), ((21616, 21650), 'json.dumps', 'json.dumps', (['table'], {'cls': 'LazyEncoder'}), '(table, cls=LazyEncoder)\n', (21626, 21650), False, 'import json\n'), ((22460, 22496), 'corehq.apps.accounting.models.Invoice.objects.get', 'Invoice.objects.get', ([], {'id': 'self.args[0]'}), '(id=self.args[0])\n', (22479, 22496), False, 'from corehq.apps.accounting.models import SoftwareProductType, Invoice, BillingAccount, CreditLine, Subscription, SoftwarePlanVersion, SoftwarePlan, CreditAdjustment\n'), ((22651, 22688), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {'args': 'self.args'}), '(self.urlname, args=self.args)\n', (22658, 22688), False, 'from django.core.urlresolvers import reverse\n'), ((23043, 23074), 'corehq.apps.accounting.forms.AdjustBalanceForm', 'AdjustBalanceForm', (['self.invoice'], {}), '(self.invoice)\n', (23060, 23074), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((23161, 23214), 'corehq.apps.accounting.models.CreditAdjustment.objects.filter', 'CreditAdjustment.objects.filter', ([], {'invoice': 'self.invoice'}), '(invoice=self.invoice)\n', (23192, 23214), False, 'from corehq.apps.accounting.models import SoftwareProductType, Invoice, BillingAccount, CreditLine, Subscription, SoftwarePlanVersion, SoftwarePlan, CreditAdjustment\n'), ((23461, 23490), 'corehq.apps.accounting.forms.InvoiceInfoForm', 'InvoiceInfoForm', (['self.invoice'], {}), '(self.invoice)\n', (23476, 23490), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((23678, 23707), 'corehq.apps.accounting.forms.ResendEmailForm', 'ResendEmailForm', (['self.invoice'], {}), '(self.invoice)\n', (23693, 23707), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((25602, 25623), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {}), '(self.urlname)\n', (25609, 25623), False, 'from django.core.urlresolvers import reverse\n'), ((26851, 26868), 'corehq.apps.accounting.forms.CreateAdminForm', 'CreateAdminForm', ([], {}), '()\n', (26866, 26868), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((27312, 27340), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'id': 'item_id'}), '(id=item_id)\n', (27328, 27340), False, 'from django.contrib.auth.models import User\n'), ((27360, 27409), 'django_prbac.models.Role.objects.get', 'Role.objects.get', ([], {'slug': 'privileges.OPERATIONS_TEAM'}), '(slug=privileges.OPERATIONS_TEAM)\n', (27376, 27409), False, 'from django_prbac.models import Role, Grant\n'), ((27436, 27506), 'django_prbac.models.Grant.objects.filter', 'Grant.objects.filter', ([], {'from_role': 'user.prbac_role.role', 'to_role': 'ops_role'}), '(from_role=user.prbac_role.role, to_role=ops_role)\n', (27456, 27506), False, 'from django_prbac.models import Role, Grant\n'), ((28178, 28234), 'django_prbac.decorators.requires_privilege_raise404', 'requires_privilege_raise404', (['privileges.ACCOUNTING_ADMIN'], {}), '(privileges.ACCOUNTING_ADMIN)\n', (28205, 28234), False, 'from django_prbac.decorators import requires_privilege_raise404\n'), ((28517, 28567), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Please check your query."""'], {}), "('Please check your query.')\n", (28539, 28567), False, 'from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest\n'), ((3517, 3565), 'corehq.apps.accounting.forms.BillingAccountBasicForm', 'BillingAccountBasicForm', (['None', 'self.request.POST'], {}), '(None, self.request.POST)\n', (3540, 3565), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((4677, 4733), 'corehq.apps.accounting.forms.BillingAccountBasicForm', 'BillingAccountBasicForm', (['self.account', 'self.request.POST'], {}), '(self.account, self.request.POST)\n', (4700, 4733), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((4966, 5024), 'corehq.apps.accounting.forms.BillingAccountContactForm', 'BillingAccountContactForm', (['self.account', 'self.request.POST'], {}), '(self.account, self.request.POST)\n', (4991, 5024), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((5256, 5305), 'corehq.apps.accounting.forms.CreditForm', 'CreditForm', (['self.account', 'None', 'self.request.POST'], {}), '(self.account, None, self.request.POST)\n', (5266, 5305), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((5521, 5568), 'corehq.apps.accounting.models.CreditLine.objects.filter', 'CreditLine.objects.filter', ([], {'account': 'self.account'}), '(account=self.account)\n', (5546, 5568), False, 'from corehq.apps.accounting.models import SoftwareProductType, Invoice, BillingAccount, CreditLine, Subscription, SoftwarePlanVersion, SoftwarePlan, CreditAdjustment\n'), ((6432, 6490), 'django.contrib.messages.success', 'messages.success', (['request', '"""Account successfully updated."""'], {}), "(request, 'Account successfully updated.')\n", (6448, 6490), False, 'from django.contrib import messages\n'), ((6510, 6545), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['self.page_url'], {}), '(self.page_url)\n', (6530, 6545), False, 'from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest\n'), ((7969, 8060), 'corehq.apps.accounting.forms.SubscriptionForm', 'SubscriptionForm', (['None', 'self.account_id', 'self.request.user.username', 'self.request.POST'], {}), '(None, self.account_id, self.request.user.username, self.\n request.POST)\n', (7985, 8060), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((10119, 10212), 'corehq.apps.accounting.forms.SubscriptionForm', 'SubscriptionForm', (['self.subscription', 'None', 'self.request.user.username', 'self.request.POST'], {}), '(self.subscription, None, self.request.user.username, self.\n request.POST)\n', (10135, 10212), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((10512, 10605), 'corehq.apps.accounting.forms.ChangeSubscriptionForm', 'ChangeSubscriptionForm', (['self.subscription', 'self.request.user.username', 'self.request.POST'], {}), '(self.subscription, self.request.user.username, self.\n request.POST)\n', (10534, 10605), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((10928, 11003), 'corehq.apps.accounting.forms.CreditForm', 'CreditForm', (['self.subscription.account', 'self.subscription', 'self.request.POST'], {}), '(self.subscription.account, self.subscription, self.request.POST)\n', (10938, 11003), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((11284, 11313), 'corehq.apps.accounting.forms.CancelForm', 'CancelForm', (['self.request.POST'], {}), '(self.request.POST)\n', (11294, 11313), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((11658, 11715), 'corehq.apps.accounting.models.CreditLine.objects.filter', 'CreditLine.objects.filter', ([], {'subscription': 'self.subscription'}), '(subscription=self.subscription)\n', (11683, 11715), False, 'from corehq.apps.accounting.models import SoftwareProductType, Invoice, BillingAccount, CreditLine, Subscription, SoftwarePlanVersion, SoftwarePlan, CreditAdjustment\n'), ((11747, 11796), 'corehq.apps.accounting.utils.has_subscription_already_ended', 'has_subscription_already_ended', (['self.subscription'], {}), '(self.subscription)\n', (11777, 11796), False, 'from corehq.apps.accounting.utils import fmt_feature_rate_dict, fmt_product_rate_dict, has_subscription_already_ended\n'), ((12847, 12882), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['self.page_url'], {}), '(self.page_url)\n', (12867, 12882), False, 'from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest\n'), ((14375, 14419), 'corehq.apps.accounting.forms.PlanInformationForm', 'PlanInformationForm', (['None', 'self.request.POST'], {}), '(None, self.request.POST)\n', (14394, 14419), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((15717, 15766), 'corehq.apps.accounting.forms.PlanInformationForm', 'PlanInformationForm', (['self.plan', 'self.request.POST'], {}), '(self.plan, self.request.POST)\n', (15736, 15766), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((16911, 16928), 'corehq.apps.accounting.forms.FeatureRateForm', 'FeatureRateForm', ([], {}), '()\n', (16926, 16928), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((16963, 16980), 'corehq.apps.accounting.forms.ProductRateForm', 'ProductRateForm', ([], {}), '()\n', (16978, 16980), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((18360, 18397), 'corehq.apps.accounting.forms.TriggerInvoiceForm', 'TriggerInvoiceForm', (['self.request.POST'], {}), '(self.request.POST)\n', (18378, 18397), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((19639, 19684), 'corehq.apps.accounting.forms.TriggerBookkeeperEmailForm', 'TriggerBookkeeperEmailForm', (['self.request.POST'], {}), '(self.request.POST)\n', (19665, 19684), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((20092, 20147), 'django.contrib.messages.success', 'messages.success', (['request', '"""Sent the Bookkeeper email!"""'], {}), "(request, 'Sent the Bookkeeper email!')\n", (20108, 20147), False, 'from django.contrib import messages\n'), ((20592, 20632), 'corehq.apps.accounting.forms.TestReminderEmailFrom', 'TestReminderEmailFrom', (['self.request.POST'], {}), '(self.request.POST)\n', (20613, 20632), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((21037, 21091), 'django.contrib.messages.success', 'messages.success', (['request', '"""Sent the Reminder emails!"""'], {}), "(request, 'Sent the Reminder emails!')\n", (21053, 21091), False, 'from django.contrib import messages\n'), ((22977, 23027), 'corehq.apps.accounting.forms.AdjustBalanceForm', 'AdjustBalanceForm', (['self.invoice', 'self.request.POST'], {}), '(self.invoice, self.request.POST)\n', (22994, 23027), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((23614, 23662), 'corehq.apps.accounting.forms.ResendEmailForm', 'ResendEmailForm', (['self.invoice', 'self.request.POST'], {}), '(self.invoice, self.request.POST)\n', (23629, 23662), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((26250, 26263), 'django.utils.translation.ugettext', '_', (['"""Username"""'], {}), "('Username')\n", (26251, 26263), True, 'from django.utils.translation import ugettext as _, ugettext_noop\n'), ((26277, 26288), 'django.utils.translation.ugettext', '_', (['"""Action"""'], {}), "('Action')\n", (26278, 26288), True, 'from django.utils.translation import ugettext as _, ugettext_noop\n'), ((26801, 26835), 'corehq.apps.accounting.forms.CreateAdminForm', 'CreateAdminForm', (['self.request.POST'], {}), '(self.request.POST)\n', (26816, 26835), False, 'from corehq.apps.accounting.forms import BillingAccountBasicForm, BillingAccountContactForm, CreditForm, SubscriptionForm, CancelForm, PlanInformationForm, SoftwarePlanVersionForm, FeatureRateForm, ProductRateForm, TriggerInvoiceForm, InvoiceInfoForm, AdjustBalanceForm, ResendEmailForm, ChangeSubscriptionForm, TriggerBookkeeperEmailForm, TestReminderEmailFrom, CreateAdminForm\n'), ((3171, 3200), 'corehq.apps.accounting.interface.AccountingInterface.get_url', 'AccountingInterface.get_url', ([], {}), '()\n', (3198, 3200), False, 'from corehq.apps.accounting.interface import AccountingInterface, SubscriptionInterface, SoftwarePlanInterface, InvoiceInterface\n'), ((3987, 4040), 'django.core.urlresolvers.reverse', 'reverse', (['"""manage_billing_account"""'], {'args': '(account.id,)'}), "('manage_billing_account', args=(account.id,))\n", (3994, 4040), False, 'from django.core.urlresolvers import reverse\n'), ((6724, 6795), 'django.contrib.messages.success', 'messages.success', (['request', '"""Account Contact Info successfully updated."""'], {}), "(request, 'Account Contact Info successfully updated.')\n", (6740, 6795), False, 'from django.contrib import messages\n'), ((6815, 6850), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['self.page_url'], {}), '(self.page_url)\n', (6835, 6850), False, 'from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest\n'), ((8505, 8534), 'corehq.apps.accounting.interface.AccountingInterface.get_url', 'AccountingInterface.get_url', ([], {}), '()\n', (8532, 8534), False, 'from corehq.apps.accounting.interface import AccountingInterface, SubscriptionInterface, SoftwarePlanInterface, InvoiceInterface\n'), ((12252, 12283), 'corehq.apps.accounting.interface.SubscriptionInterface.get_url', 'SubscriptionInterface.get_url', ([], {}), '()\n', (12281, 12283), False, 'from corehq.apps.accounting.interface import AccountingInterface, SubscriptionInterface, SoftwarePlanInterface, InvoiceInterface\n'), ((12610, 12673), 'django.contrib.messages.success', 'messages.success', (['request', '"""The subscription has been updated."""'], {}), "(request, 'The subscription has been updated.')\n", (12626, 12673), False, 'from django.contrib import messages\n'), ((14787, 14818), 'corehq.apps.accounting.interface.SoftwarePlanInterface.get_url', 'SoftwarePlanInterface.get_url', ([], {}), '()\n', (14816, 14818), False, 'from corehq.apps.accounting.interface import AccountingInterface, SubscriptionInterface, SoftwarePlanInterface, InvoiceInterface\n'), ((15014, 15068), 'django.core.urlresolvers.reverse', 'reverse', (['EditSoftwarePlanView.urlname'], {'args': '(plan.id,)'}), '(EditSoftwarePlanView.urlname, args=(plan.id,))\n', (15021, 15068), False, 'from django.core.urlresolvers import reverse\n'), ((17319, 17350), 'corehq.apps.accounting.interface.SoftwarePlanInterface.get_url', 'SoftwarePlanInterface.get_url', ([], {}), '()\n', (17348, 17350), False, 'from corehq.apps.accounting.interface import AccountingInterface, SubscriptionInterface, SoftwarePlanInterface, InvoiceInterface\n'), ((17682, 17717), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['self.page_url'], {}), '(self.page_url)\n', (17702, 17717), False, 'from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest\n'), ((17830, 17926), 'django.contrib.messages.success', 'messages.success', (['request', "('The %s Software Plan was successfully updated.' % self.plan.name)"], {}), "(request, 'The %s Software Plan was successfully updated.' %\n self.plan.name)\n", (17846, 17926), False, 'from django.contrib import messages\n'), ((18883, 19005), 'django.contrib.messages.success', 'messages.success', (['request', "('Successfully triggered invoices for domain %s.' % self.trigger_form.\n cleaned_data['domain'])"], {}), "(request, 'Successfully triggered invoices for domain %s.' %\n self.trigger_form.cleaned_data['domain'])\n", (18899, 19005), False, 'from django.contrib import messages\n'), ((20188, 20209), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {}), '(self.urlname)\n', (20195, 20209), False, 'from django.core.urlresolvers import reverse\n'), ((21132, 21153), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {}), '(self.urlname)\n', (21139, 21153), False, 'from django.core.urlresolvers import reverse\n'), ((22813, 22839), 'corehq.apps.accounting.interface.InvoiceInterface.get_url', 'InvoiceInterface.get_url', ([], {}), '()\n', (22837, 22839), False, 'from corehq.apps.accounting.interface import AccountingInterface, SubscriptionInterface, SoftwarePlanInterface, InvoiceInterface\n'), ((24439, 24474), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['self.page_url'], {}), '(self.page_url)\n', (24459, 24474), False, 'from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest\n'), ((25906, 26011), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'prbac_role__role__memberships_granted__to_role__slug': 'privileges.OPERATIONS_TEAM'}), '(prbac_role__role__memberships_granted__to_role__slug=\n privileges.OPERATIONS_TEAM)\n', (25925, 26011), False, 'from django.contrib.auth.models import User\n'), ((5943, 5992), 'corehq.apps.accounting.models.Subscription.objects.filter', 'Subscription.objects.filter', ([], {'account': 'self.account'}), '(account=self.account)\n', (5970, 5992), False, 'from corehq.apps.accounting.models import SoftwareProductType, Invoice, BillingAccount, CreditLine, Subscription, SoftwarePlanVersion, SoftwarePlan, CreditAdjustment\n'), ((8880, 8954), 'django.core.urlresolvers.reverse', 'reverse', (['ManageBillingAccountView.urlname'], {'args': '(subscription.account.id,)'}), '(ManageBillingAccountView.urlname, args=(subscription.account.id,))\n', (8887, 8954), False, 'from django.core.urlresolvers import reverse\n'), ((9027, 9038), 'django.forms.util.ErrorList', 'ErrorList', ([], {}), '()\n', (9036, 9038), False, 'from django.forms.util import ErrorList\n'), ((12725, 12796), 'django.contrib.messages.error', 'messages.error', (['request', "('Could not update subscription due to: %s' % e)"], {}), "(request, 'Could not update subscription due to: %s' % e)\n", (12739, 12796), False, 'from django.contrib import messages\n'), ((13068, 13103), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['self.page_url'], {}), '(self.page_url)\n', (13088, 13103), False, 'from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest\n'), ((13260, 13325), 'django.contrib.messages.success', 'messages.success', (['request', '"""The subscription has been cancelled."""'], {}), "(request, 'The subscription has been cancelled.')\n", (13276, 13325), False, 'from django.contrib import messages\n'), ((17011, 17061), 'corehq.apps.accounting.models.SoftwarePlanVersion.objects.filter', 'SoftwarePlanVersion.objects.filter', ([], {'plan': 'self.plan'}), '(plan=self.plan)\n', (17045, 17061), False, 'from corehq.apps.accounting.models import SoftwareProductType, Invoice, BillingAccount, CreditLine, Subscription, SoftwarePlanVersion, SoftwarePlan, CreditAdjustment\n'), ((19096, 19117), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {}), '(self.urlname)\n', (19103, 19117), False, 'from django.core.urlresolvers import reverse\n'), ((19192, 19252), 'django.contrib.messages.error', 'messages.error', (['request', "('Error generating invoices: %s' % e)"], {}), "(request, 'Error generating invoices: %s' % e)\n", (19206, 19252), False, 'from django.contrib import messages\n'), ((15992, 16027), 'corehq.apps.accounting.utils.fmt_feature_rate_dict', 'fmt_feature_rate_dict', (['r.feature', 'r'], {}), '(r.feature, r)\n', (16013, 16027), False, 'from corehq.apps.accounting.utils import fmt_feature_rate_dict, fmt_product_rate_dict, has_subscription_already_ended\n'), ((16179, 16214), 'corehq.apps.accounting.utils.fmt_product_rate_dict', 'fmt_product_rate_dict', (['r.product', 'r'], {}), '(r.product, r)\n', (16200, 16214), False, 'from corehq.apps.accounting.utils import fmt_feature_rate_dict, fmt_product_rate_dict, has_subscription_already_ended\n'), ((24681, 24716), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['self.page_url'], {}), '(self.page_url)\n', (24701, 24716), False, 'from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest\n'), ((7070, 7128), 'django.contrib.messages.success', 'messages.success', (['request', '"""Successfully adjusted credit."""'], {}), "(request, 'Successfully adjusted credit.')\n", (7086, 7128), False, 'from django.contrib import messages\n'), ((7156, 7191), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['self.page_url'], {}), '(self.page_url)\n', (7176, 7191), False, 'from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest\n'), ((7397, 7451), 'django.contrib.messages.error', 'messages.error', (['request', "('Issue adding credit: %s' % e)"], {}), "(request, 'Issue adding credit: %s' % e)\n", (7411, 7451), False, 'from django.contrib import messages\n'), ((24776, 24839), 'django.contrib.messages.error', 'messages.error', (['request', "('Could not send emails due to: %s' % e)"], {}), "(request, 'Could not send emails due to: %s' % e)\n", (24790, 24839), False, 'from django.contrib import messages\n'), ((5844, 5884), 'corehq.apps.accounting.models.Invoice.objects.filter', 'Invoice.objects.filter', ([], {'subscription': 'sub'}), '(subscription=sub)\n', (5866, 5884), False, 'from corehq.apps.accounting.models import SoftwareProductType, Invoice, BillingAccount, CreditLine, Subscription, SoftwarePlanVersion, SoftwarePlan, CreditAdjustment\n'), ((13597, 13637), 'django.core.urlresolvers.reverse', 'reverse', (['self.urlname'], {'args': '[new_sub.id]'}), '(self.urlname, args=[new_sub.id])\n', (13604, 13637), False, 'from django.core.urlresolvers import reverse\n'), ((13690, 13761), 'django.contrib.messages.error', 'messages.error', (['request', "('Could not change subscription due to: %s' % e)"], {}), "(request, 'Could not change subscription due to: %s' % e)\n", (13704, 13761), False, 'from django.contrib import messages\n'), ((5725, 5765), 'corehq.apps.accounting.models.Invoice.objects.filter', 'Invoice.objects.filter', ([], {'subscription': 'sub'}), '(subscription=sub)\n', (5747, 5765), False, 'from corehq.apps.accounting.models import SoftwareProductType, Invoice, BillingAccount, CreditLine, Subscription, SoftwarePlanVersion, SoftwarePlan, CreditAdjustment\n')]
|
# -*- coding: utf-8 -*-
"""This file contains an Outlook search MRU Registry parser."""
from __future__ import unicode_literals
from plaso.containers import events
from plaso.containers import time_events
from plaso.lib import definitions
from plaso.parsers import winreg
from plaso.parsers.winreg_plugins import interface
class OutlookSearchMRUEventData(events.EventData):
"""Outlook search MRU event data attribute container.
Attributes:
entries (str): most recently used (MRU) entries.
key_path (str): Windows Registry key path.
"""
DATA_TYPE = 'windows:registry:outlook_search_mru'
def __init__(self):
"""Initializes event data."""
super(OutlookSearchMRUEventData, self).__init__(data_type=self.DATA_TYPE)
self.entries = None
self.key_path = None
class OutlookSearchMRUPlugin(interface.WindowsRegistryPlugin):
"""Windows Registry plugin parsing Outlook Search MRU keys."""
NAME = 'microsoft_outlook_mru'
DATA_FORMAT = 'Microsoft Outlook search MRU Registry data'
FILTERS = frozenset([
interface.WindowsRegistryKeyPathFilter(
'HKEY_CURRENT_USER\\Software\\Microsoft\\Office\\14.0\\Outlook\\'
'Search'),
interface.WindowsRegistryKeyPathFilter(
'HKEY_CURRENT_USER\\Software\\Microsoft\\Office\\15.0\\Outlook\\'
'Search')])
# TODO: The catalog for Office 2013 (15.0) contains binary values not
# dword values. Check if Office 2007 and 2010 have the same. Re-enable the
# plug-ins once confirmed and OutlookSearchMRUPlugin has been extended to
# handle the binary data or create a OutlookSearchCatalogMRUPlugin.
# Registry keys for:
# MS Outlook 2007 Search Catalog:
# 'HKEY_CURRENT_USER\\Software\\Microsoft\\Office\\12.0\\Outlook\\'
# 'Catalog'
# MS Outlook 2010 Search Catalog:
# 'HKEY_CURRENT_USER\\Software\\Microsoft\\Office\\14.0\\Outlook\\'
# 'Search\\Catalog'
# MS Outlook 2013 Search Catalog:
# 'HKEY_CURRENT_USER\\Software\\Microsoft\\Office\\15.0\\Outlook\\'
# 'Search\\Catalog'
def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
"""Extracts events from a Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
"""
entries = []
for registry_value in registry_key.GetValues():
# Ignore the default value.
if not registry_value.name:
continue
# Ignore any value that is empty or that does not contain an integer.
if not registry_value.data or not registry_value.DataIsInteger():
continue
# TODO: change this 32-bit integer into something meaningful, for now
# the value name is the most interesting part.
value_integer = registry_value.GetDataAsObject()
value_string = '{0:s}: 0x{1:08x}'.format(
registry_value.name, value_integer)
entries.append(value_string)
event_data = OutlookSearchMRUEventData()
event_data.entries = ' '.join(entries) or None
event_data.key_path = registry_key.path
event = time_events.DateTimeValuesEvent(
registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)
parser_mediator.ProduceEventWithEventData(event, event_data)
winreg.WinRegistryParser.RegisterPlugin(OutlookSearchMRUPlugin)
|
[
"plaso.containers.time_events.DateTimeValuesEvent",
"plaso.parsers.winreg_plugins.interface.WindowsRegistryKeyPathFilter",
"plaso.parsers.winreg.WinRegistryParser.RegisterPlugin"
] |
[((3366, 3429), 'plaso.parsers.winreg.WinRegistryParser.RegisterPlugin', 'winreg.WinRegistryParser.RegisterPlugin', (['OutlookSearchMRUPlugin'], {}), '(OutlookSearchMRUPlugin)\n', (3405, 3429), False, 'from plaso.parsers import winreg\n'), ((3188, 3294), 'plaso.containers.time_events.DateTimeValuesEvent', 'time_events.DateTimeValuesEvent', (['registry_key.last_written_time', 'definitions.TIME_DESCRIPTION_WRITTEN'], {}), '(registry_key.last_written_time, definitions\n .TIME_DESCRIPTION_WRITTEN)\n', (3219, 3294), False, 'from plaso.containers import time_events\n'), ((1048, 1164), 'plaso.parsers.winreg_plugins.interface.WindowsRegistryKeyPathFilter', 'interface.WindowsRegistryKeyPathFilter', (['"""HKEY_CURRENT_USER\\\\Software\\\\Microsoft\\\\Office\\\\14.0\\\\Outlook\\\\Search"""'], {}), "(\n 'HKEY_CURRENT_USER\\\\Software\\\\Microsoft\\\\Office\\\\14.0\\\\Outlook\\\\Search')\n", (1086, 1164), False, 'from plaso.parsers.winreg_plugins import interface\n'), ((1191, 1307), 'plaso.parsers.winreg_plugins.interface.WindowsRegistryKeyPathFilter', 'interface.WindowsRegistryKeyPathFilter', (['"""HKEY_CURRENT_USER\\\\Software\\\\Microsoft\\\\Office\\\\15.0\\\\Outlook\\\\Search"""'], {}), "(\n 'HKEY_CURRENT_USER\\\\Software\\\\Microsoft\\\\Office\\\\15.0\\\\Outlook\\\\Search')\n", (1229, 1307), False, 'from plaso.parsers.winreg_plugins import interface\n')]
|
#get spectral coefficients for omega
#script for plotting stuff directly from hard disk and not to be used with the bash script
import os
import sys
import glob
import time
import pathlib
import logging
import numpy as np
from mpi4py import MPI
comm = MPI.COMM_WORLD
from scipy.sparse import linalg as spla
from dedalus.tools.config import config
from simple_sphere import SimpleSphere, TensorField, TensorSystem
import equations
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
from dedalus.extras import plot_tools
import logging
from matplotlib.animation import FFMpegWriter
logger = logging.getLogger(__name__)
from matplotlib.patches import Rectangle
from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable
#add path to data folder
sim_number = 110
input_folder = "/Volumes/ExtDrive/data/sphere%i" %(sim_number)
output_folder = "videos"
first_frame = 1
last_frame = len(glob.glob1("".join([input_folder,'/']),"*.npz"))
last_frame = 3000
dpi = 300
FPS = 20
fields = ['om']
ell_max = 20 #for plotting
marker_size = 0.5
step = 5 #number of frames to skip
vphlim = 10
axs = [None for i in range(3)]
w, h = 0.4, 0.6
#plotting
#plt.rc('font', size=15)
plt.rc('text', usetex=True)
plt.rc('font', **{'family': 'serif', 'serif': ['Computer Modern Roman'], 'size': 13})
fig = plt.figure(figsize=(8,4.5))
axs[0] = plt.axes((0.1, 0.2, 0.45, h))
axs[1] = plt.axes((0.63, 0.2, 0.33, h))
# Setup output folder
if comm.rank == 0:
if not os.path.exists(output_folder):
os.makedirs(output_folder)
comm.barrier()
max_vals = {key: 0 for key in fields}
clims = {key: 0 for key in fields}
#for field in fields:
# for i in range(first_frame + comm.rank+1, last_frame + 1, comm.size):
# with np.load("".join([input_folder, '/output_%i.npz' %i])) as file:
# fieldval = file[field]
# max_vals[field] = max(max_vals[field], np.max(fieldval))
#for field in fields:
# clims[field] = 0.75*max_vals[field]
clims['om'] = 150.0
metadata = dict(title='Movie', artist='Matplotlib', comment='Movie support!')
writer = FFMpegWriter(fps=FPS, metadata=metadata)
with writer.saving(fig, "%s/sphere%i_om_coeffs.mp4" %(output_folder, sim_number), dpi):
for ind in range(first_frame + comm.rank + 1, last_frame + 1, step):
if ind%1==0: logger.info("Frame: %i" %(ind))
with np.load(os.path.join(input_folder, 'output_%i.npz' %(ind))) as file:
if ind == first_frame + comm.rank +1:
phi = file['phi']
theta = file['theta']
L_max = len(theta)-1
S_max = 4
simplesphere = SimpleSphere(L_max, S_max)
omega = TensorField(simplesphere, rank=0)
om = file['om']
vph = np.mean(file['v_ph'], axis=0)
print(np.max(vph))
time = file['t'][0]
# assign loaded data
omega.component_fields[0]['g'] = om
# spectral transform
omega.forward_phi()
omega.forward_theta()
coeffs = omega.coeffs
#assign coeffs to a numpy array
coeffs_arr = np.zeros([L_max+1, L_max+1], dtype=complex)
for m in range(len(coeffs)):
coeffs_arr[m,m:] = coeffs[m]
mag = np.abs(coeffs_arr)
phase = np.angle(coeffs_arr)
if ind == first_frame + comm.rank +1:
mag_fac = marker_size/np.max(mag)
m = np.arange(0,L_max+1)
ell = np.arange(0,L_max+1)
ellell, mm = np.meshgrid(ell, m)
if ind == first_frame + comm.rank +1:
title = fig.suptitle(r'$t/\tau = %.4f$' %time, usetex=True)
ax = axs[0]
img0 = ax.pcolormesh(phi, np.pi/2-theta, om.T, cmap='RdBu_r', shading='garoud', rasterized=True)
ax.set_ylabel(r'Latitude $(\pi/2-\theta)$', usetex=True);
ax.set_yticks([-np.pi/2, 0, np.pi/2])
ax.set_yticklabels([r'$-\frac{\pi}{2}$', r'$0$', r'$\frac{\pi}{2}$'])
ax.set_xlabel(r'Longitude $\phi$', usetex=True)
ax.set_xticks([0, np.pi, 2*np.pi])
ax.set_xticklabels([r'$0$', r'$\pi$', r'$2 \pi$'])
img0.set_clim([-clims['om'], clims['om']])
#add colorbar
ax_divider = make_axes_locatable(ax)
cax = ax_divider.append_axes("top", size="7%", pad="4%")
cb = plt.colorbar(img0, cax=cax, orientation="horizontal")
cax.xaxis.set_ticks_position("top")
cb.set_ticks([-clims['om'], 0, clims['om']])
#add axis for v_ph
vph_ax = ax_divider.append_axes("right", size="30%", pad="17%")
line, = vph_ax.plot(vph, np.pi/2-theta, 'k', linewidth=1)
vph_ax.set_yticks([]); vph_ax.set_ylim([-np.pi/2, np.pi/2])
vph_ax.set_xlim([-vphlim, vphlim]);
vph_ax.set_xticks([-vphlim, 0, vphlim]);
vph_ax.axvline(linestyle='--',color='k',linewidth=0.5)
vph_ax.set_xlabel(r'$\langle v_\phi\rangle_\phi/(R/\tau)$')
ax = axs[1]
img1 = ax.scatter(mm.flatten(), ellell.flatten(), mag_fac*mag.flatten(), c=phase.flatten(), \
cmap='hsv', edgecolor='none')
rect = Rectangle((-1, 10.4), ell_max+1, 4, facecolor='k', alpha=0.2)
ax.add_patch(rect)
ax.set_xlim(-1, ell_max), ax.set_ylim(-1, ell_max)
ax.set_xlabel('$m$', usetex=True), ax.set_ylabel('$\ell$', usetex=True, rotation=0)
img1.set_clim(0, 2*np.pi)
ax_divider = make_axes_locatable(ax)
# add an axes above the main axes.
cax = ax_divider.append_axes("top", size="7%", pad="4%")
cb = plt.colorbar(img1, cax=cax, orientation="horizontal")
cb.set_ticks([0, np.pi, 2*np.pi])
cb.set_ticklabels(['$0$', r'$\pi$', r'$2 \pi$'])
cax.xaxis.set_ticks_position("top")
#fig.tight_layout()
else:
title.set_text(r'$t/\tau = %.4f$' %time)
img0.set_array(om.T.ravel())
img1.set_sizes(mag_fac*mag.flatten())
img1.set_array(phase.flatten())
line.set_xdata(vph)
writer.grab_frame()
|
[
"numpy.abs",
"matplotlib.pyplot.axes",
"numpy.angle",
"simple_sphere.SimpleSphere",
"matplotlib.pyplot.figure",
"numpy.mean",
"numpy.arange",
"os.path.join",
"numpy.meshgrid",
"matplotlib.patches.Rectangle",
"os.path.exists",
"matplotlib.pyplot.colorbar",
"numpy.max",
"matplotlib.pyplot.rc",
"mpl_toolkits.axes_grid1.axes_divider.make_axes_locatable",
"simple_sphere.TensorField",
"matplotlib.use",
"os.makedirs",
"numpy.zeros",
"matplotlib.animation.FFMpegWriter",
"logging.getLogger"
] |
[((449, 470), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (463, 470), False, 'import matplotlib\n'), ((638, 665), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (655, 665), False, 'import logging\n'), ((1218, 1245), 'matplotlib.pyplot.rc', 'plt.rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (1224, 1245), True, 'import matplotlib.pyplot as plt\n'), ((1246, 1335), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {}), "('font', **{'family': 'serif', 'serif': ['Computer Modern Roman'],\n 'size': 13})\n", (1252, 1335), True, 'import matplotlib.pyplot as plt\n'), ((1339, 1367), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 4.5)'}), '(figsize=(8, 4.5))\n', (1349, 1367), True, 'import matplotlib.pyplot as plt\n'), ((1376, 1405), 'matplotlib.pyplot.axes', 'plt.axes', (['(0.1, 0.2, 0.45, h)'], {}), '((0.1, 0.2, 0.45, h))\n', (1384, 1405), True, 'import matplotlib.pyplot as plt\n'), ((1415, 1445), 'matplotlib.pyplot.axes', 'plt.axes', (['(0.63, 0.2, 0.33, h)'], {}), '((0.63, 0.2, 0.33, h))\n', (1423, 1445), True, 'import matplotlib.pyplot as plt\n'), ((2107, 2147), 'matplotlib.animation.FFMpegWriter', 'FFMpegWriter', ([], {'fps': 'FPS', 'metadata': 'metadata'}), '(fps=FPS, metadata=metadata)\n', (2119, 2147), False, 'from matplotlib.animation import FFMpegWriter\n'), ((1499, 1528), 'os.path.exists', 'os.path.exists', (['output_folder'], {}), '(output_folder)\n', (1513, 1528), False, 'import os\n'), ((1538, 1564), 'os.makedirs', 'os.makedirs', (['output_folder'], {}), '(output_folder)\n', (1549, 1564), False, 'import os\n'), ((3142, 3189), 'numpy.zeros', 'np.zeros', (['[L_max + 1, L_max + 1]'], {'dtype': 'complex'}), '([L_max + 1, L_max + 1], dtype=complex)\n', (3150, 3189), True, 'import numpy as np\n'), ((3279, 3297), 'numpy.abs', 'np.abs', (['coeffs_arr'], {}), '(coeffs_arr)\n', (3285, 3297), True, 'import numpy as np\n'), ((3314, 3334), 'numpy.angle', 'np.angle', (['coeffs_arr'], {}), '(coeffs_arr)\n', (3322, 3334), True, 'import numpy as np\n'), ((3441, 3464), 'numpy.arange', 'np.arange', (['(0)', '(L_max + 1)'], {}), '(0, L_max + 1)\n', (3450, 3464), True, 'import numpy as np\n'), ((3476, 3499), 'numpy.arange', 'np.arange', (['(0)', '(L_max + 1)'], {}), '(0, L_max + 1)\n', (3485, 3499), True, 'import numpy as np\n'), ((3518, 3537), 'numpy.meshgrid', 'np.meshgrid', (['ell', 'm'], {}), '(ell, m)\n', (3529, 3537), True, 'import numpy as np\n'), ((2795, 2824), 'numpy.mean', 'np.mean', (["file['v_ph']"], {'axis': '(0)'}), "(file['v_ph'], axis=0)\n", (2802, 2824), True, 'import numpy as np\n'), ((4271, 4294), 'mpl_toolkits.axes_grid1.axes_divider.make_axes_locatable', 'make_axes_locatable', (['ax'], {}), '(ax)\n', (4290, 4294), False, 'from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable\n'), ((4381, 4434), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['img0'], {'cax': 'cax', 'orientation': '"""horizontal"""'}), "(img0, cax=cax, orientation='horizontal')\n", (4393, 4434), True, 'import matplotlib.pyplot as plt\n'), ((5230, 5293), 'matplotlib.patches.Rectangle', 'Rectangle', (['(-1, 10.4)', '(ell_max + 1)', '(4)'], {'facecolor': '"""k"""', 'alpha': '(0.2)'}), "((-1, 10.4), ell_max + 1, 4, facecolor='k', alpha=0.2)\n", (5239, 5293), False, 'from matplotlib.patches import Rectangle\n'), ((5545, 5568), 'mpl_toolkits.axes_grid1.axes_divider.make_axes_locatable', 'make_axes_locatable', (['ax'], {}), '(ax)\n', (5564, 5568), False, 'from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable\n'), ((5702, 5755), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['img1'], {'cax': 'cax', 'orientation': '"""horizontal"""'}), "(img1, cax=cax, orientation='horizontal')\n", (5714, 5755), True, 'import matplotlib.pyplot as plt\n'), ((2386, 2435), 'os.path.join', 'os.path.join', (['input_folder', "('output_%i.npz' % ind)"], {}), "(input_folder, 'output_%i.npz' % ind)\n", (2398, 2435), False, 'import os\n'), ((2663, 2689), 'simple_sphere.SimpleSphere', 'SimpleSphere', (['L_max', 'S_max'], {}), '(L_max, S_max)\n', (2675, 2689), False, 'from simple_sphere import SimpleSphere, TensorField, TensorSystem\n'), ((2714, 2747), 'simple_sphere.TensorField', 'TensorField', (['simplesphere'], {'rank': '(0)'}), '(simplesphere, rank=0)\n', (2725, 2747), False, 'from simple_sphere import SimpleSphere, TensorField, TensorSystem\n'), ((2843, 2854), 'numpy.max', 'np.max', (['vph'], {}), '(vph)\n', (2849, 2854), True, 'import numpy as np\n'), ((3416, 3427), 'numpy.max', 'np.max', (['mag'], {}), '(mag)\n', (3422, 3427), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
import unittest
import numpy as np
from datetime import date
from funcat import FOURWEEK, FOURWEEKQTY
from funcat.api import T, S, set_current_freq, symbol, \
CLOSE
from funcat.utils import FuncatTestCase
class TestTurtle(FuncatTestCase):
@classmethod
def setUp(cls) -> None:
T("20210506")
S("000001.XSHG")
set_current_freq("1d")
def test_four_week_qty(self):
n = 20
last_high, last_low = FOURWEEKQTY()
print(last_high, last_low.series[-10:])
print(last_high.series[n - 1:n + 20], last_low.series[:10])
print(last_high, last_low.series[-10:])
print(tuple(zip(last_high.series, last_low.series)))
for h, l in tuple(zip(last_high.series, last_low.series)):
self.assertTrue((h > l) or not(h > 0), f"四周规则上轨应该比下轨大:{h},{l} ; {type(h)}")
self.assertTrue(len(CLOSE) == len(last_high), f"{len(CLOSE)} == {len(last_high)}")
def test_four_week_qty_weeks(self):
"""周为单位计算四周规则"""
set_current_freq("W")
n = 4
last_high, last_low = FOURWEEKQTY(CLOSE, CLOSE, n, n)
print(last_high, last_low.series[-10:])
print(last_high.series[n - 1:n + 20], last_low.series[:10])
print(last_high, last_low.series[-10:])
print(tuple(zip(last_high.series, last_low.series)))
for h, l in tuple(zip(last_high.series, last_low.series)):
self.assertTrue((h > l) or not(h > 0), f"四周规则上轨应该比下轨大:{h},{l} ; {type(h)}")
self.assertTrue(len(CLOSE) == len(last_high), f"{len(CLOSE)} == {len(last_high)}")
def test_four_week(self):
n = 20
fakedata = self.fakeMarketData()
hh, ll = FOURWEEK(fakedata, fakedata, n, n)
data = hh + ll
print(data.series[n - 1:n + 20])
last_high, last_low = FOURWEEKQTY(fakedata, fakedata, n, n)
for count, item in enumerate(data.tolist()):
if count >= n - 1:
if data.series[count] > 0:
self.assertTrue(fakedata.series[count] > last_high.series[count - 1],
f"{count}: { data.series[count]} --> {fakedata.series[count]}, {last_high.series[count-1]} --> {last_low.series[count-1]}")
elif data.series[count] < 0:
self.assertTrue(fakedata.series[count] < last_low.series[count - 1],
f"{count}: { data.series[count]} --> {fakedata.series[count]}, {last_high.series[count-1]} --> {last_low.series[count-1]}")
def test_four_week2(self):
n = 20
hh, ll = FOURWEEK()
data = hh + ll
print(data.series[n - 1:n + 20])
last_high, last_low = FOURWEEKQTY()
print(f"CLose: {len(CLOSE)}\n", CLOSE.series[n - 1:20])
print(f"high series: {len(last_high)}\n", last_high.series[n - 1:n + 20], "\nlow series:\n", last_low.series[n - 1:n + 20])
print(data.series[-10:])
for count, item in enumerate(data.tolist()):
if count >= n - 1:
if data.series[count]:
self.assertTrue(CLOSE.series[count] > last_high.series[count - 1]
or CLOSE.series[count] < last_low.series[count - 1] ,
f"{count}: { data.series[count]} --> {CLOSE.series[count]}, {last_high.series[count-1]} --> {last_low.series[count-1]}")
def test_four_week_002124(self):
n = 20
code = "002124"
S(code)
hh, ll = FOURWEEK(high_n=n, low_n=n)
data = hh + ll
print(data.series[n - 1:])
print(f"{symbol(code)}:", data.series[data.series == 1])
last_high, last_low = FOURWEEKQTY(high_n=n, low_n=n)
fakedata, _ = FOURWEEKQTY.__self__.default_quantity()
for count, item in enumerate(data.tolist()):
if count >= n - 1:
if data.series[count] > 0:
self.assertTrue(fakedata.series[count] > last_high.series[count - 1],
f"{count}: { data.series[count]} --> {fakedata.series[count]}, {last_high.series[count-1]} --> {last_low.series[count-1]}")
# print()
elif data.series[count] < 0:
self.assertTrue(fakedata.series[count] < last_low.series[count - 1],
f"{count}: { data.series[count]} --> {fakedata.series[count]}, {last_high.series[count-1]} --> {last_low.series[count-1]}")
# print(fakedata.__class__)
self.assertTrue(type(fakedata) == type(CLOSE), f"类型不匹配:{type(fakedata)}")
def test_four_week_399673(self):
n = 20
T("20210520")
code = "399673.XSHE"
S(code)
hh, ll = FOURWEEK(high_n=n, low_n=n)
data = hh + ll
print(data.series[n - 1:])
print(f"{symbol(code)}:", data.series[data.series == 1])
# print(data.series[hh.series == 1])
print(data.series[data.series == -1])
last_high, last_low = FOURWEEKQTY(high_n=n, low_n=n)
fakedata, _ = FOURWEEKQTY.__self__.default_quantity()
for count, item in enumerate(data.tolist()):
if count >= n - 1:
if data.series[count] > 0:
self.assertTrue(fakedata.series[count] > last_high.series[count - 1],
f"{count}: { data.series[count]} --> {fakedata.series[count]}, {last_high.series[count-1]} --> {last_low.series[count-1]}")
# print()
elif data.series[count] < 0:
self.assertTrue(fakedata.series[count] < last_low.series[count - 1],
f"{count}: { data.series[count]} --> {fakedata.series[count]}, {last_high.series[count-1]} --> {last_low.series[count-1]}")
expect_result = [ 0, 0, 0, 0, 0, 0, 1, 0, 1, 1]
self.assertListEqual(data.tolist()[-10:], expect_result, f"和预期不同:{data.tolist()[-10:]}\n{expect_result}")
|
[
"funcat.api.S",
"funcat.api.T",
"funcat.api.symbol",
"funcat.FOURWEEKQTY",
"funcat.FOURWEEK",
"funcat.api.set_current_freq",
"funcat.FOURWEEKQTY.__self__.default_quantity"
] |
[((321, 334), 'funcat.api.T', 'T', (['"""20210506"""'], {}), "('20210506')\n", (322, 334), False, 'from funcat.api import T, S, set_current_freq, symbol, CLOSE\n'), ((343, 359), 'funcat.api.S', 'S', (['"""000001.XSHG"""'], {}), "('000001.XSHG')\n", (344, 359), False, 'from funcat.api import T, S, set_current_freq, symbol, CLOSE\n'), ((368, 390), 'funcat.api.set_current_freq', 'set_current_freq', (['"""1d"""'], {}), "('1d')\n", (384, 390), False, 'from funcat.api import T, S, set_current_freq, symbol, CLOSE\n'), ((479, 492), 'funcat.FOURWEEKQTY', 'FOURWEEKQTY', ([], {}), '()\n', (490, 492), False, 'from funcat import FOURWEEK, FOURWEEKQTY\n'), ((1038, 1059), 'funcat.api.set_current_freq', 'set_current_freq', (['"""W"""'], {}), "('W')\n", (1054, 1059), False, 'from funcat.api import T, S, set_current_freq, symbol, CLOSE\n'), ((1104, 1135), 'funcat.FOURWEEKQTY', 'FOURWEEKQTY', (['CLOSE', 'CLOSE', 'n', 'n'], {}), '(CLOSE, CLOSE, n, n)\n', (1115, 1135), False, 'from funcat import FOURWEEK, FOURWEEKQTY\n'), ((1719, 1753), 'funcat.FOURWEEK', 'FOURWEEK', (['fakedata', 'fakedata', 'n', 'n'], {}), '(fakedata, fakedata, n, n)\n', (1727, 1753), False, 'from funcat import FOURWEEK, FOURWEEKQTY\n'), ((1848, 1885), 'funcat.FOURWEEKQTY', 'FOURWEEKQTY', (['fakedata', 'fakedata', 'n', 'n'], {}), '(fakedata, fakedata, n, n)\n', (1859, 1885), False, 'from funcat import FOURWEEK, FOURWEEKQTY\n'), ((2599, 2609), 'funcat.FOURWEEK', 'FOURWEEK', ([], {}), '()\n', (2607, 2609), False, 'from funcat import FOURWEEK, FOURWEEKQTY\n'), ((2704, 2717), 'funcat.FOURWEEKQTY', 'FOURWEEKQTY', ([], {}), '()\n', (2715, 2717), False, 'from funcat import FOURWEEK, FOURWEEKQTY\n'), ((3466, 3473), 'funcat.api.S', 'S', (['code'], {}), '(code)\n', (3467, 3473), False, 'from funcat.api import T, S, set_current_freq, symbol, CLOSE\n'), ((3491, 3518), 'funcat.FOURWEEK', 'FOURWEEK', ([], {'high_n': 'n', 'low_n': 'n'}), '(high_n=n, low_n=n)\n', (3499, 3518), False, 'from funcat import FOURWEEK, FOURWEEKQTY\n'), ((3672, 3702), 'funcat.FOURWEEKQTY', 'FOURWEEKQTY', ([], {'high_n': 'n', 'low_n': 'n'}), '(high_n=n, low_n=n)\n', (3683, 3702), False, 'from funcat import FOURWEEK, FOURWEEKQTY\n'), ((3725, 3764), 'funcat.FOURWEEKQTY.__self__.default_quantity', 'FOURWEEKQTY.__self__.default_quantity', ([], {}), '()\n', (3762, 3764), False, 'from funcat import FOURWEEK, FOURWEEKQTY\n'), ((4622, 4635), 'funcat.api.T', 'T', (['"""20210520"""'], {}), "('20210520')\n", (4623, 4635), False, 'from funcat.api import T, S, set_current_freq, symbol, CLOSE\n'), ((4673, 4680), 'funcat.api.S', 'S', (['code'], {}), '(code)\n', (4674, 4680), False, 'from funcat.api import T, S, set_current_freq, symbol, CLOSE\n'), ((4698, 4725), 'funcat.FOURWEEK', 'FOURWEEK', ([], {'high_n': 'n', 'low_n': 'n'}), '(high_n=n, low_n=n)\n', (4706, 4725), False, 'from funcat import FOURWEEK, FOURWEEKQTY\n'), ((4970, 5000), 'funcat.FOURWEEKQTY', 'FOURWEEKQTY', ([], {'high_n': 'n', 'low_n': 'n'}), '(high_n=n, low_n=n)\n', (4981, 5000), False, 'from funcat import FOURWEEK, FOURWEEKQTY\n'), ((5023, 5062), 'funcat.FOURWEEKQTY.__self__.default_quantity', 'FOURWEEKQTY.__self__.default_quantity', ([], {}), '()\n', (5060, 5062), False, 'from funcat import FOURWEEK, FOURWEEKQTY\n'), ((3594, 3606), 'funcat.api.symbol', 'symbol', (['code'], {}), '(code)\n', (3600, 3606), False, 'from funcat.api import T, S, set_current_freq, symbol, CLOSE\n'), ((4801, 4813), 'funcat.api.symbol', 'symbol', (['code'], {}), '(code)\n', (4807, 4813), False, 'from funcat.api import T, S, set_current_freq, symbol, CLOSE\n')]
|
import signal
class McmderError(Exception):
"""Parent of Errors in this module."""
pass
class McmdError(McmderError):
"""Raised when run() is called with check=True and the process.
returns a non-zero exit status.
Attributes:
cmd, returncode, stderr, output
"""
def __init__(self, returncode, cmd, output=None, stderr=None):
self.returncode = returncode
self.cmd = cmd
self.output = output
self.stderr = stderr
def __str__(self):
if self.returncode and self.returncode < 0:
try:
return "Command '%s' died with %r.\n %s" % (
self.cmd, signal.Signals(-self.returncode), self.stderr)
except ValueError:
return "Command '%s' died with unknown signal %d.\n %s" % (
self.cmd, -self.returncode, self.stderr)
else:
return "Command '%s' returned non-zero exit status %d.\n %s" % (
self.cmd, self.returncode, self.stderr)
|
[
"signal.Signals"
] |
[((667, 699), 'signal.Signals', 'signal.Signals', (['(-self.returncode)'], {}), '(-self.returncode)\n', (681, 699), False, 'import signal\n')]
|
import random
import pprint
import logging
import config
logger = logging.getLogger('root')
class AbstractStrategy():
def prepare(self, relax_operators, search_operators):
"""
prepares the strategy by providing the (non-empty) lists of relax and search operators.
needs to be called before the strategy is used
"""
if relax_operators is None or len(relax_operators) == 0:
raise ValueError('there has to be at least one relax operator')
if search_operators is None or len(search_operators) == 0:
raise ValueError('there has to be at least one search operator')
self._relax_operators = relax_operators
self._search_operators = search_operators
def get_portfolio(self):
"""
returns a tuple containing the used relax and search operators
"""
return (self._relax_operators, self._search_operators)
def select_operators(self):
"""
returns a pair of relax and search operator
"""
pass
def on_move_finished(self, operators, prev_cost, result, time_used):
"""
called after the finish of a move to allow for statistics and adaptability
"""
pass
def supports_intensification(self):
"""
whether or not the strategy supports intensification i.e. if the assumptions+operators are allowed to to be kept until no improvement can be achieved
the default is no intensification
"""
return False
class RandomStrategy(AbstractStrategy):
def __init__(self, supports_intensification=False):
self.__supports_intensification = supports_intensification
def prepare(self, relax_operators, search_operators):
super().prepare(relax_operators, search_operators)
relax_operators = []
for op in self._relax_operators:
relax_operators += op.flatten()
self._relax_operators = relax_operators
search_operators = []
for op in self._search_operators:
search_operators += op.flatten()
self._search_operators = search_operators
logger.debug('random strategy selected')
logger.debug('relax operators: ' + str([ o.name() for o in relax_operators ]))
logger.debug('search operators: ' + str([ o.name() for o in search_operators ]))
def select_operators(self):
"""
returns a random pair of relax and search operator
"""
relax_operator = random.choice(self._relax_operators)
search_operator = random.choice(self._search_operators)
return relax_operator, search_operator
def supports_intensification(self):
return self.__supports_intensification
class DynamicStrategy(AbstractStrategy):
def __init__(self, unsat_strike_limit=3, timeout_strike_limit=1):
self.__unsat_strike_limit = unsat_strike_limit
self.__timeout_strike_limit = timeout_strike_limit
def prepare(self, relax_operators, search_operators):
super().prepare(relax_operators, search_operators)
self.__current_relax_operator = random.choice(self._relax_operators)
self.__current_search_operator = random.choice(self._search_operators)
self.__unsat_strikes = 0
self.__timeout_strikes = 0
logger.debug('variable strategy selected')
logger.debug('relax operators: ' + str([ o.name() for o in relax_operators ]))
logger.debug('search operators: ' + str([ o.name() for o in search_operators ]))
def select_operators(self):
return self.__current_relax_operator, self.__current_search_operator
def on_move_finished(self, operators, prev_cost, result, time_used):
if not result.sat:
if result.exhausted:
# UNSAT
self.__unsat_strikes += 1
if self.__unsat_strikes >= self.__unsat_strike_limit:
if not self.__current_relax_operator.increase_size():
self.__select_new_pair()
else:
self.__unsat_strikes = 0
logger.debug('increased relax size')
else:
# TIMEOUT
self.__timeout_strikes += 1
if self.__timeout_strikes >= self.__timeout_strike_limit:
if random.random() > 0.5:
# increase search time
if not self.__current_search_operator.increase_size():
self.__select_new_pair()
else:
self.__timeout_strikes = 0
logger.debug('increased search size')
else:
# reset relax size
self.__current_relax_operator.reset_size()
logger.debug('reset relax size')
else:
# IMPROVEMENT
self.__unsat_strikes = 0
self.__timeout_strikes = 0
def supports_intensification(self):
return True
def __select_new_pair(self):
logger.debug('selecting new operators')
if len(self._relax_operators) > 1:
relax_choices = [ o for o in self._relax_operators if o != self.__current_relax_operator ]
self.__current_relax_operator = random.choice(relax_choices)
self.__current_search_operator = random.choice(self._search_operators)
self.__current_relax_operator.reset_size()
self.__current_search_operator.reset_size()
self.__unsat_strikes = 0
self.__timeout_strikes = 0
logger.debug('relax operator: ' + self.__current_relax_operator.name())
logger.debug('search operator: ' + self.__current_search_operator.name())
class RouletteStrategy(AbstractStrategy):
def __init__(self, alpha=0.5, lex_weight=1000):
self.__alpha = alpha
self.__lex_weight = lex_weight
def prepare(self, relax_operators, search_operators):
super().prepare(relax_operators, search_operators)
relax_operators = []
for op in self._relax_operators:
relax_operators += op.flatten()
self._relax_operators = relax_operators
search_operators = []
for op in self._search_operators:
search_operators += op.flatten()
self._search_operators = search_operators
self._weights = {}
for r_op in self._relax_operators:
for s_op in self._search_operators:
self._weights[(r_op, s_op)] = 1
self._to_initialize = True
logger.debug('roulette strategy selected')
logger.debug('relax operators: ' + str([ o.name() for o in relax_operators ]))
logger.debug('search operators: ' + str([ o.name() for o in search_operators ]))
def select_operators(self):
"""
returns a pair of relax and search operators depending on the weights
"""
# logger.debug('weights: ' + str([ ((r.name(), s.name()), self._weights[(r,s)]) for r, s in self._weights ]))
# logger.debug('cummulative sum of weights: ' + str(sum(self._weights.values())))
weights = [float(w)/max(self._weights.values()) for w in self._weights.values()]
relax_operator, search_operator = random.choices(list(self._weights.keys()), weights=weights, k=1)[0]
logger.debug('selected relax operator: ' + relax_operator.name())
logger.debug('selected search operator: ' + search_operator.name())
return relax_operator, search_operator
def on_move_finished(self, operators, prev_cost, result, time_used):
cost = result.cost
if cost is not None:
if type(cost) == list:
cost = self.calculate_weighted_sum(cost)
if type(prev_cost) == list:
prev_cost = self.calculate_weighted_sum(prev_cost)
if self._to_initialize:
for s_r_pair in self._weights:
self._weights[s_r_pair] = cost
self._to_initialize = False
ratio = (cost - prev_cost) / time_used
self.update_weights(operators, ratio)
else:
self.update_weights(operators, 0)
logger.debug('roulette weights: \n' + str([ ((r.name(), s.name()), self._weights[(r,s)]) for r, s in self._weights ]))
def update_weights(self, operators, ratio):
new_weight = (1 - self.__alpha) * self._weights[operators] - self.__alpha * ratio
if new_weight < 0.001:
new_weight = 0.001
logger.debug('updating weight of %s: %f -> %f' % ((operators[0].name(), operators[1].name()), self._weights[operators], new_weight))
self._weights[operators] = new_weight
def calculate_weighted_sum(self, list):
size = len(list) - 1
cost = 0
for i in range(len(list)):
cost += list[i]*(self.__lex_weight**(size-i))
return cost
# Strategy Factory
def get_strategy(type, args):
"""
returns a new strategy of the given type
"""
if type == 'random':
return RandomStrategy()
elif type == 'roulette':
alpha = None
if 'alpha' in args:
alpha = args['alpha']
lex_weight = None
if 'lexWeight' in args:
lex_weight = args['lexWeight']
return RouletteStrategy(alpha=alpha, lex_weight=lex_weight)
elif type == 'dynamic':
unsat_strikes = None
if 'unsatStrikes' in args:
unsat_strikes = args['unsatStrikes']
timeout_strikes = None
if 'timeoutStrikes' in args:
timeout_strikes = args['timeoutStrikes']
return DynamicStrategy(unsat_strike_limit=unsat_strikes, timeout_strike_limit=timeout_strikes)
else:
raise ValueError("no strategy '%s'" % type)
class InteractiveStrategy(AbstractStrategy):
def __init__(self, supports_intensification=False):
self.__supports_intensification = supports_intensification
def prepare(self, relax_operators, search_operators):
super().prepare(relax_operators, search_operators)
relax_operators = []
for op in self._relax_operators:
relax_operators += op.flatten()
self._relax_operators = relax_operators
search_operators = []
for op in self._search_operators:
search_operators += op.flatten()
self._search_operators = search_operators
self.__search_operator = search_operators[0]
self.__relax_operator = relax_operators[0]
logger.debug('interactive strategy selected (Ctrl-C to interrupt search)')
logger.debug('relax operators: ' + str([o.name() for o in relax_operators]))
logger.debug('search operators: ' + str([o.name() for o in search_operators]))
def select_operators(self):
"""
returns current pair of relax and search operator
"""
return self.__relax_operator, self.__search_operator
def supports_intensification(self):
return self.__supports_intensification
def set_operators(self, relax_operator, search_operator):
self.__relax_operator = relax_operator
self.__search_operator = search_operator
|
[
"random.random",
"random.choice",
"logging.getLogger"
] |
[((69, 94), 'logging.getLogger', 'logging.getLogger', (['"""root"""'], {}), "('root')\n", (86, 94), False, 'import logging\n'), ((2519, 2555), 'random.choice', 'random.choice', (['self._relax_operators'], {}), '(self._relax_operators)\n', (2532, 2555), False, 'import random\n'), ((2582, 2619), 'random.choice', 'random.choice', (['self._search_operators'], {}), '(self._search_operators)\n', (2595, 2619), False, 'import random\n'), ((3155, 3191), 'random.choice', 'random.choice', (['self._relax_operators'], {}), '(self._relax_operators)\n', (3168, 3191), False, 'import random\n'), ((3233, 3270), 'random.choice', 'random.choice', (['self._search_operators'], {}), '(self._search_operators)\n', (3246, 3270), False, 'import random\n'), ((5526, 5563), 'random.choice', 'random.choice', (['self._search_operators'], {}), '(self._search_operators)\n', (5539, 5563), False, 'import random\n'), ((5455, 5483), 'random.choice', 'random.choice', (['relax_choices'], {}), '(relax_choices)\n', (5468, 5483), False, 'import random\n'), ((4420, 4435), 'random.random', 'random.random', ([], {}), '()\n', (4433, 4435), False, 'import random\n')]
|
# -*- coding: utf-8 -*-
import sys,os
import re
import scrapy
import logging
import urllib.parse
from scrapybot.items.freelancer import *
from scrapybot.spiders import *
from scrapybot.util import *
from scrapy.utils.misc import load_object
logger = logging.getLogger(__name__)
class FreelancerSpider(ArgsSupport, scrapy.Spider):
'''
spider for freelancer.com
'''
name = "freelancer"
allowed_domains = ["freelancer.com"]
def __init__(self, *args, **kwargs):
super(FreelancerSpider, self).__init__(*args, **kwargs)
def callback_from_url(self, url):
"""
determine parse method from url
"""
if re.search("https?://(.*).freelancer.com/jobs/.*$", url):
return self.parse_jobs_page
def parse_jobs_page(self, response):
# jobs
for sel in response.css('table.ProjectTable tbody tr'):
tds = sel.css('td')
# skip the empty oen
if not tds[0].xpath('text()'):
continue
j = Job()
j['name'] = tds[0].xpath('text()').extract_first().strip()
j['description'] = tds[1].xpath('text()').extract_first()
j['bid_count'] = tds[2].xpath('text()').extract_first()
j['cat_names'] = tds[3].css('a::text').extract()
#budget
j['date_started'] = tds[4].xpath('text()').extract_first()
j['date_end'] = tds[5].xpath('text()').extract_first()
j['bid_avg'] = tds[6].xpath('text()').extract_first()
j['url'] = response.urljoin('/projects/%s/%s' % (j['cat_names'][0], j['name'].replace(' ', '-')))
if self.go('model', 'job', True):
yield j
# next
if self.go('next', 'jlist', True):
url = response.css('.paginate_button.next a::attr("href")').extract_first()
yield scrapy.Request(response.urljoin(url), callback=self.parse_jobs_page)
|
[
"re.search",
"logging.getLogger"
] |
[((252, 279), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (269, 279), False, 'import logging\n'), ((667, 722), 're.search', 're.search', (['"""https?://(.*).freelancer.com/jobs/.*$"""', 'url'], {}), "('https?://(.*).freelancer.com/jobs/.*$', url)\n", (676, 722), False, 'import re\n')]
|
from django.contrib.auth.models import AbstractUser
from django.db import models
from dicom_models.staging.models import RadiologyStudy
from prioritizers import registry as prioritizers
from solo.models import SingletonModel
class StudyList(models.Model):
name = models.CharField(max_length=100, blank=True, null=True)
studies = models.ManyToManyField(RadiologyStudy)
def __unicode__(self):
if self.name:
return u'%s' % self.name
return u'Study List Object'
class Reviewer(AbstractUser):
prioritizer = models.CharField(max_length=100, blank=True, null=True,
choices=prioritizers.choices)
study_list = models.ForeignKey(StudyList, null=True, blank=True)
class Config(SingletonModel):
default_prioritizer = models.CharField(max_length=100, blank=True, null=True,
choices=prioritizers.choices)
default_study_list = models.ForeignKey(StudyList, null=True, blank=True)
def __unicode__(self):
return u'App configuration'
class Meta:
verbose_name = "App Configuration"
verbose_name_plural = "App Configuration"
|
[
"django.db.models.CharField",
"django.db.models.ManyToManyField",
"django.db.models.ForeignKey"
] |
[((268, 323), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'null': '(True)'}), '(max_length=100, blank=True, null=True)\n', (284, 323), False, 'from django.db import models\n'), ((338, 376), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['RadiologyStudy'], {}), '(RadiologyStudy)\n', (360, 376), False, 'from django.db import models\n'), ((549, 639), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'null': '(True)', 'choices': 'prioritizers.choices'}), '(max_length=100, blank=True, null=True, choices=\n prioritizers.choices)\n', (565, 639), False, 'from django.db import models\n'), ((660, 711), 'django.db.models.ForeignKey', 'models.ForeignKey', (['StudyList'], {'null': '(True)', 'blank': '(True)'}), '(StudyList, null=True, blank=True)\n', (677, 711), False, 'from django.db import models\n'), ((769, 859), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'null': '(True)', 'choices': 'prioritizers.choices'}), '(max_length=100, blank=True, null=True, choices=\n prioritizers.choices)\n', (785, 859), False, 'from django.db import models\n'), ((888, 939), 'django.db.models.ForeignKey', 'models.ForeignKey', (['StudyList'], {'null': '(True)', 'blank': '(True)'}), '(StudyList, null=True, blank=True)\n', (905, 939), False, 'from django.db import models\n')]
|
# -*- coding: utf-8 -*-
from datetime import datetime
from .media import _media_upload
from .type import RequestType
def complaint_list_query(self, begin_date=None, end_date=None, limit=10, offset=0, complainted_mchid=None):
"""查询投诉单列表
:param begin_date: 开始日期,投诉发生的开始日期,格式为YYYY-MM-DD。注意,查询日期跨度不超过30天,当前查询为实时查询。示例值:'2019-01-01'
:param end_date: 结束日期,投诉发生的结束日期,格式为YYYY-MM-DD。注意,查询日期跨度不超过30天,当前查询为实时查询。示例值:'2019-01-01'
:param limit: 分页大小,设置该次请求返回的最大投诉条数,范围【1,50】,商户自定义字段,不传默认为10。示例值:5
:param offset: 分页开始位置,该次请求的分页开始位置,从0开始计数,例如offset=10,表示从第11条记录开始返回,不传默认为0 。示例值:10
:param complainted_mchid: 被诉商户号,投诉单对应的被诉商户号。示例值:'1900012181'
"""
if not begin_date:
begin_date = datetime.now().strftime("%Y-%m-%d")
if not end_date:
end_date = begin_date
if not complainted_mchid:
complainted_mchid = self._mchid
path = '/v3/merchant-service/complaints-v2?limit=%s&offset=%s&begin_date=%s&end_date=%s&complainted_mchid=%s'
path = path % (limit, offset, begin_date, end_date, complainted_mchid)
return self._core.request(path)
def complaint_detail_query(self, complaint_id):
"""查询投诉单详情
:param complaint_id: 投诉单对应的投诉单号。示例值:'200201820200101080076610000'
"""
if not complaint_id:
raise Exception('complaint_id is not assigned.')
path = '/v3/merchant-service/complaints-v2/%s' % complaint_id
return self._core.request(path)
def complaint_history_query(self, complaint_id, limit=100, offset=0):
"""查询投诉协商历史
:param complaint_id: 投诉单对应的投诉单号。示例值:'200201820200101080076610000'
:param limit: 分页大小,设置该次请求返回的最大协商历史条数,范围[1,300],不传默认为100。。示例值:5
:param offset: 分页开始位置,该次请求的分页开始位置,从0开始计数,例如offset=10,表示从第11条记录开始返回,不传默认为0。示例值:10
"""
if not complaint_id:
raise Exception('complaint_id is not assigned.')
if limit not in range(1, 301):
limit = 100
path = '/v3/merchant-service/complaints-v2/%s/negotiation-historys?limit=%s&offset=%s' % (complaint_id, limit, offset)
return self._core.request(path)
def complaint_notification_create(self, url):
"""创建投诉通知回调地址
:param: url: 通知地址,仅支持https。示例值:'https://www.xxx.com/notify'
"""
params = {}
if url:
params.update({'url': url})
else:
raise Exception('url is not assigned.')
path = '/v3/merchant-service/complaint-notifications'
return self._core.request(path, method=RequestType.POST, data=params)
def complaint_notification_query(self):
"""查询投诉通知回调地址
:param: url: 通知地址,仅支持https。示例值:'https://www.xxx.com/notify'
"""
path = '/v3/merchant-service/complaint-notifications'
return self._core.request(path)
def complaint_notification_update(self, url):
"""更新投诉通知回调地址
:param: url: 通知地址,仅支持https。示例值:'https://www.xxx.com/notify'
"""
params = {}
if url:
params.update({'url': url})
else:
raise Exception('url is not assigned.')
path = '/v3/merchant-service/complaint-notifications'
return self._core.request(path, method=RequestType.PUT, data=params)
def complaint_notification_delete(self):
"""删除投诉通知回调地址
:param: url: 通知地址,仅支持https。示例值:'https://www.xxx.com/notify'
"""
path = '/v3/merchant-service/complaint-notifications'
return self._core.request(path, method=RequestType.DELETE)
def complaint_response(self, complaint_id, response_content, response_images=None, jump_url=None, jump_url_text=None):
"""提交投诉回复
:param complaint_id: 投诉单对应的投诉单号。示例值:'200201820200101080076610000'
:param response_content: 回复内容,具体的投诉处理方案,限制200个字符以内。示例值:'已与用户沟通解决'
:param response_images: 回复图片,传入调用商户上传反馈图片接口返回的media_id,最多上传4张图片凭证。示例值:['file23578_21798531.jpg', 'file23578_21798532.jpg']
:param jump_url: 跳转链接,附加跳转链接,引导用户跳转至商户客诉处理页面,链接需满足https格式。示例值:"https://www.xxx.com/notify"
:param jump_url_text: 转链接文案,展示给用户的文案,附在回复内容之后。用户点击文案,即可进行跳转。示例值:"查看订单详情"
"""
params = {}
if not complaint_id:
raise Exception('complaint_id is not assigned')
if response_content:
params.update({'response_content': response_content})
else:
raise Exception('response_content is not assigned')
params.update({'complainted_mchid': self._core._mchid})
if response_images:
params.update({'response_images': response_images})
if jump_url:
params.update({'jump_url': jump_url})
if jump_url_text:
params.update({'jump_url_text': jump_url_text})
path = '/v3/merchant-service/complaints-v2/%s/response' % complaint_id
return self._core.request(path, method=RequestType.POST, data=params)
def complaint_complete(self, complaint_id):
"""反馈投诉处理完成
:param complaint_id: 投诉单对应的投诉单号。示例值:'200201820200101080076610000'
"""
params = {}
if not complaint_id:
raise Exception('complaint_id is not assigned')
params.update({'complainted_mchid': self._core._mchid})
path = '/v3/merchant-service/complaints-v2/%s/complete' % complaint_id
return self._core.request(path, method=RequestType.POST, data=params)
def complaint_image_upload(self, filepath, filename=None):
"""商户上传投诉反馈图片
:param filepath: 图片文件路径
:param filename: 文件名称,未指定则从filepath参数中截取
"""
return _media_upload(self, filepath, filename, '/v3/merchant-service/images/upload')
def complaint_image_download(self, media_url):
"""下载客户投诉图片
:param media_url: 图片下载地址,示例值:'https://api.mch.weixin.qq.com/v3/merchant-service/images/xxxxx'
"""
path = media_url[len(self._core._gate_way):] if media_url.startswith(self._core._gate_way) else media_url
return self._core.request(path, skip_verify=True)
|
[
"datetime.datetime.now"
] |
[((705, 719), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (717, 719), False, 'from datetime import datetime\n')]
|
import logging
from abc import ABC, abstractmethod
logger = logging.getLogger(__name__)
class PaddownException(Exception):
pass
class Paddown(ABC):
@abstractmethod
def has_valid_padding(self, ciphertext: bytes) -> bool:
"""
Override this method and send off the ciphertext to check for valid padding.
:param bytes ciphertext: The ciphertext to check, send this to your padding oracle.
:rtype: True for valid padding, False otherwise.
"""
raise PaddownException("Not implemented")
def __init__(self, ciphertext: bytes, blocksize: int = 16):
if not isinstance(ciphertext, bytes):
raise Exception(f"Ciphertext {type(ciphertext)} not an instance of {bytes}")
self.ciphertext = ciphertext
self.blocksize = blocksize
def find_c_prime_at_index(self, ciphertext: bytearray, index: int):
if not isinstance(ciphertext, bytearray):
raise PaddownException(f"ciphertext not an instance of {bytearray}")
# Replace ciphertext at index with a guessed byte
ciphertext_temp = ciphertext
for c_prime in range(256):
ciphertext_temp[index] = c_prime
if self.has_valid_padding(ciphertext_temp):
return c_prime
raise PaddownException(f"No valid padding found, is .has_valid_padding(...) implemented correctly?")
def decrypt_block(self, c_i):
if not isinstance(c_i, bytearray):
raise PaddownException(f"block c_i not an instance of {bytearray}")
c_previous = bytearray(b"\x00" * self.blocksize)
intermediate = bytearray(b"\x00" * self.blocksize)
for i in range(self.blocksize):
for j in range(i):
c_previous[(self.blocksize - 1) - j] = intermediate[(self.blocksize - 1) - j] ^ (i + 1)
c_prime = self.find_c_prime_at_index(c_previous + c_i, (self.blocksize - 1) - i)
intermediate[(self.blocksize - 1) - i] = c_prime ^ (i + 1)
logger.debug(f"intermediate: {[hex(x)[2:] for x in intermediate]}")
return intermediate
def get_intermediate(self, ciphertext) -> bytes:
key = b""
blocks = len(ciphertext) // self.blocksize
# Iterate blocks last to first
for i in range(blocks):
block_start = len(ciphertext) - (i + 1) * self.blocksize
block_end = len(ciphertext) - (i * self.blocksize)
key = self.decrypt_block(ciphertext[block_start:block_end]) + key
return key
def decrypt(self) -> bytes:
logger.debug(f"Ciphertext length: {len(self.ciphertext)}")
logger.debug(f"Blocks to decrypt: {len(self.ciphertext) // self.blocksize}")
# Convert self.ciphertext to mutable bytearray
self.ciphertext = bytearray(self.ciphertext)
key = self.get_intermediate(self.ciphertext)
plaintext = bytearray()
for i in range(len(self.ciphertext) - self.blocksize):
b = self.ciphertext[i] ^ key[i + self.blocksize]
plaintext += (b).to_bytes(1, byteorder="big")
return plaintext
|
[
"logging.getLogger"
] |
[((61, 88), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (78, 88), False, 'import logging\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Sep 25 13:53:31 2018
@author: alechat
"""
import os, sys
if os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) not in sys.path:
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))))
import numpy as np
import datetime as dt
from keras.callbacks import ModelCheckpoint, Callback
from keras import backend as K
from keras.models import load_model
from keras.utils.generic_utils import get_custom_objects
from keras import metrics
from DeepDeconv.utils.batch_utils import get_batch_from_fits, dynamic_batches, npy_batches
import tensorflow as tf
#Used for loading the model
import json
import h5py
import keras.optimizers as optimizers
from keras.layers import Input
from keras.utils.io_utils import H5Dict
# Write to a file
def write_log(s, filetxt):
with open(filetxt, 'a') as f:
f.write(s)
f.write("\n")
class LoggingCallback(Callback):
"""Callback that logs message at end of epoch."""
def __init__(self, filetxt='log.txt', log=write_log):
Callback.__init__(self)
self.log = log
self.filetxt = filetxt
def on_epoch_end(self, epoch, logs={}):
msg = dt.datetime.now().strftime('%Y-%m-%d_%H:%M:%S - ') + str("Epoch: %i, "%(epoch+1)) + str(", ".join("%s: %f" % (k, v) for k, v in logs.items()))
self.log(msg, self.filetxt)
print(msg)
class ModelCheckpointExtraSave(ModelCheckpoint):
"""ModelCheckpoint wiht extra information."""
def __init__(self, filepath, monitor='val_loss', verbose=0,
save_best_only=False, save_weights_only=False,
mode='auto',nepochs=1, period=1,best_epoch=None,best_val=None):
super(ModelCheckpointExtraSave, self).__init__(filepath, monitor=monitor, verbose=verbose,
save_best_only=save_best_only, save_weights_only=save_weights_only,
mode=mode, period=period)
self.nepochs=nepochs
if (best_epoch!=None) and (best_val!=None):
self.best=best_val
self.best_epoch=best_epoch
def on_epoch_end(self,epoch,logs=None):
"""This is essentially the same as ModelCheckpoint, except for the 2 np.savetxt"""
self.epochs_since_last_save += 1
if self.epochs_since_last_save >= self.period:
self.epochs_since_last_save = 0
filepath = self.filepath.format(epoch=epoch + 1, **logs)
if self.save_best_only:
current = logs.get(self.monitor)
if current is None:
warnings.warn('Can save best model only with %s available, '
'skipping.' % (self.monitor), RuntimeWarning)
else:
if self.monitor_op(current, self.best):
if self.verbose > 0:
print('\nEpoch %05d: %s improved from %0.5f to %0.5f,'
' saving model to %s'
% (epoch + 1, self.monitor, self.best,
current, filepath))
self.best = current
self.best_epoch=epoch + 1
if self.save_weights_only:
self.model.save_weights(filepath, overwrite=True)
else:
self.model.save(filepath, overwrite=True)
np.savetxt(filepath+".best_params",np.asarray([self.best_epoch,self.best,self.nepochs]))
else:
if self.verbose > 0:
print('\nEpoch %05d: %s did not improve from %0.5f' %
(epoch + 1, self.monitor, self.best))
else:
if self.verbose > 0:
print('\nEpoch %05d: saving model to %s' % (epoch + 1, filepath))
if self.save_weights_only:
self.model.save_weights(filepath, overwrite=True)
else:
self.model.save(filepath, overwrite=True)
self.best_epoch=epoch + 1
np.savetxt(filepath+".best_params",np.asarray([self.best_epoch,self.best,nmax_epoch,self.nepochs]))
#%%DATA INITIALIZATION
import numpy as np
from AlphaTransform import AlphaShearletTransform as AST
import shape_constraint.cadmos_lib as cl
import os
row,column = np.array([96,96])
U = cl.makeUi(row,column)
# Get shearlet elements
#Step 1 : create a shearlet transform instance
trafo = AST(column, row, [0.5]*3,real=True,parseval=True,verbose=False)
#Step 2 : get shearlets filters
shearlets = trafo.shearlets
#Step 3 : get the adjoints
adjoints = cl.get_adjoint_coeff(trafo)
#Normalize shearlets filter banks
#/!\ The order is important/!\
adjoints = cl.shear_norm(adjoints,shearlets)
shearlets = cl.shear_norm(shearlets,shearlets)
#Compute moments constraint normalization coefficients
#the $\Psi^*_j$ are noted adj_U
adj_U = cl.comp_adj(U,adjoints).reshape(6,27,1,96,96,1)
mu = cl.comp_mu(adj_U)
def custom_loss(y_true, y_pred):
weights = y_true[:,:,:,1]
y_true = y_true[:,:,:,0]
return K.mean(K.tf.multiply(weights, K.square(y_pred - y_true)), axis=-1)
def swish(x):
return (K.sigmoid(x) * x)
get_custom_objects().update({'swish': swish})
def get_model_memory_usage(batch_size, model):
'''Compute memory usage for the model and one batch of data'''
shapes_mem_count = 0
for l in model.layers:
single_layer_mem = 1
for s in l.output_shape:
if s is None:
continue
single_layer_mem *= s
shapes_mem_count += single_layer_mem
trainable_count = np.sum([K.count_params(p) for p in set(model.trainable_weights)])
non_trainable_count = np.sum([K.count_params(p) for p in set(model.non_trainable_weights)])
total_memory = 4.0*batch_size*(shapes_mem_count + trainable_count + non_trainable_count)
gbytes = np.round(total_memory / (1024.0 ** 3), 3)
return gbytes
def makeU1(n,m):
"""Create a n x m numpy array with (i)_{i,j} entries where i is the ith
line and j is the jth column
INPUT: n positive integer (number of lines),
m positive (integer number of columns)
OUTPUT: n x m numpy array"""
U1 = np.tile(np.arange(n),(m,1)).T
return U1
def makeU3(n,m):
"""Create a n x m numpy array with (1)_{i,j} entries where i is the ith
line and j is the jth column
INPUT: n positive integer (number of lines),
m positive (integer number of columns)
OUTPUT: n x m numpy array"""
U3 = np.ones((n,m))
U3=add_extra_dimension(U3)
return U3
def makeU6(n,m):
"""Create a n x m numpy array with (i*j)_{i,j} entries where i is the ith
line and j is the jth column
INPUT: n positive integer (number of lines),
m positive (integer number of columns)
OUTPUT: n x m numpy array"""
U6 = np.outer(np.arange(n),np.arange(m))
U6=add_extra_dimension(U6)
return U6
def add_extra_dimension(U1):
lns=tuple(list(np.shape(U1))+[1])
return np.reshape(U1,lns)
def makeUi(n,m):
"""Create a 6 x n x m numpy array containing U1, U2, U3, U4, U5 and U6
INPUT: n positive integer (number of lines),
m positive (integer number of columns)
OUTPUT: 6 x n x m numpy array"""
U1 = makeU1(n,m)
Ul = U1**2
Uc = Ul.T
U1T=U1.T
U1=add_extra_dimension(U1)
U1T=add_extra_dimension(U1T)
Uc=add_extra_dimension(Uc)
Ul=add_extra_dimension(Ul)
return np.array([U1,U1T,makeU3(n,m),Ul+Uc,Ul-Uc,makeU6(n,m)])
class DeepNet(object):
def __init__(self, network_name = 'DNN', img_rows = 96, img_cols = 96, model_file='', verbose=False,shape_constraint=False, gamma=0,shearlet=False):
self.network_name = network_name
self.img_rows = img_rows
self.img_cols = img_cols
self.U=makeUi(img_rows,img_cols)
self.shape_constraint=shape_constraint
self.gamma=gamma
self.model_file=model_file
self.shearlet=shearlet
self.build_model(model_file, verbose)
def build_model(self, model_file = '', verbose = False):
if model_file == '':
raise ValueError('No model provided')
else:
print('Loading model...')
print(model_file)
print('Renaming as...')
if self.network_name=="DNN":
new_name=model_file.rsplit(".hdf5")[0]
self.network_name = new_name
print(self.network_name)
if self.shearlet:
#Load the structure of the model
custom_objects={'shearlet_loss': self.shearlet_loss,'shearlet_metric':self.shearlet_metric}
self.model = load_model(model_file, custom_objects=custom_objects,compile=True)
if not self.shearlet and not self.shape_constraint:
self.model = load_model(model_file,compile=True)
if self.shape_constraint:
#Load the structure of the model
custom_objects={'shape_loss': self.shape_loss,'shape_metric':self.shape_metric}
self.model = load_model(model_file, custom_objects=custom_objects,compile=False)
#The non-connected to output input placeholder layers are not present. Need to add them and register them
#START with window
window_layer=Input(shape=(self.img_rows, self.img_cols,1),name='window')
self.model.inputs.append(window_layer)
self.model.input_names.append("window")
self.model._feed_inputs.append(window_layer)
self.model._feed_input_names.append("window")
self.model._feed_input_shapes.append(K.int_shape(window_layer))
#Then with norm
norm_layer=Input(shape=(6, 1,1),name='norm')
self.model.inputs.append(norm_layer)
self.model.input_names.append("norm")
self.model._feed_inputs.append(norm_layer)
self.model._feed_input_names.append("norm")
self.model._feed_input_shapes.append(K.int_shape(norm_layer))
#Now we need to compile the model
def convert_custom_objects(obj):
"""Handles custom object lookup.
# Arguments
obj: object, dict, or list.
# Returns
The same structure, where occurrences
of a custom object name have been replaced
with the custom object.
"""
if isinstance(obj, list):
deserialized = []
for value in obj:
deserialized.append(convert_custom_objects(value))
return deserialized
if isinstance(obj, dict):
deserialized = {}
for key, value in obj.items():
deserialized[key] = convert_custom_objects(value)
return deserialized
if obj in custom_objects:
return custom_objects[obj]
return obj
#Now we update all optimization parameters (compile=True)
h5dict=H5Dict(model_file)
training_config = h5dict.get('training_config')
if training_config is None:
warnings.warn('No training configuration found in save file: '
'the model was *not* compiled. '
'Compile it manually.')
else:
training_config = json.loads(training_config.decode('utf-8'))
optimizer_config = training_config['optimizer_config']
optimizer = optimizers.deserialize(optimizer_config,
custom_objects=custom_objects)
# Recover loss functions and metrics.
loss = convert_custom_objects(training_config['loss'])
net_metrics = convert_custom_objects(training_config['metrics'])
if len(net_metrics)==0:
net_metrics=[metrics.mse,self.shape_metric]
sample_weight_mode = training_config['sample_weight_mode']
loss_weights = training_config['loss_weights']
# Compile model.
self.model.compile(optimizer=optimizer,
loss=loss,
weighted_metrics=net_metrics,
loss_weights=loss_weights,
sample_weight_mode=sample_weight_mode)
# Set optimizer weights.
if 'optimizer_weights' in h5dict:
# Build train function (to get weight updates).
self.model._make_train_function()
optimizer_weights_group = h5dict['optimizer_weights']
optimizer_weight_names = [
n.decode('utf8') for n in
optimizer_weights_group['weight_names']]
optimizer_weight_values = [optimizer_weights_group[n] for n in
optimizer_weight_names]
try:
self.model.optimizer.set_weights(optimizer_weight_values)
except ValueError:
warnings.warn('Error in loading the saved optimizer '
'state. As a result, your model is '
'starting with a freshly initialized '
'optimizer.')
if verbose:
print(self.model.summary())
def train(self, train_data, model_file = '', epochs=20, batch_size=32, validation_split=0.1, logfile='log.txt'):
#TO BE UPDATED SOME TIMES FOR SHAPE CONSTRAINT
if self.model is None:
raise Exception("No model found, please use build_model()")
if model_file == '':
model_file = self.network_name + '.hdf5'
print('Model will be saved at %s/%s'%(os.getcwd(), model_file))
model_checkpoint = ModelCheckpoint(model_file, monitor='val_loss',verbose=1, save_best_only=True)
print('Fitting model...')
self.model.fit(train_data[0], train_data[1], batch_size=batch_size, epochs=epochs, verbose=1,
validation_split=validation_split, shuffle=True,
callbacks=[model_checkpoint, LoggingCallback(filetxt=logfile, log=write_log)])
def train_generator(self, train_files, validation_file, epochs=20, batch_size=32, model_file = '',
nb_img_per_file=10000, validation_set_size=10000,
noise_std=None, SNR=None,
noiseless_img_hdu=0, targets_hdu=2, psf_hdu=1,
image_dim=96, image_per_row=100,
deconv_mode=None, rho_fista=1e-3,
risktype="GCV",reg="Dirac",reg_frac=1.0,tol=1e-12,
win_filename=None, win_hdu=0, mom_hdu=1,
logfile='log.txt',win_validation_filename=None,initial_epoch=0,keep_best_loss=False):
if self.model is None:
raise Exception("No model found, please use build_model()")
if model_file == '':
model_file = self.network_name + '.hdf5'
print('Model will be saved at %s/%s'%(os.getcwd(), model_file))
print('Memory usage for the model + one batch (GB): %f'%(get_model_memory_usage(batch_size, self.model)))
with open(logfile, 'a') as f:
f.write(self.network_name)
f.write("\n")
validation_data = get_batch_from_fits(validation_file,
idx_list=np.arange(validation_set_size),
noise_std=noise_std, SNR=SNR,
noiseless_img_hdu=noiseless_img_hdu,
targets_hdu=targets_hdu, psf_hdu=psf_hdu,
image_dim=image_dim, image_per_row=image_per_row,
deconv_mode=deconv_mode, rho_fista=rho_fista,
risktype=risktype,reg=reg,tol=tol,shape_constraint=self.shape_constraint,
win_filename=win_validation_filename, win_hdu=0,mom_hdu=1)
samples_per_epoch = int(len(train_files)*np.ceil(nb_img_per_file/batch_size))
if keep_best_loss:
best_params_file=self.model_file.replace(".hdf5",".hdf5.best_params")
if os.path.isfile(best_params_file):
best_epoch,best_val,nepoch=np.loadtxt(best_params_file)
print("Current best_parameters:",int(best_epoch),best_val)
model_checkpoint = ModelCheckpointExtraSave(model_file, monitor='val_loss', verbose=1, save_best_only=True,nepochs=epochs,best_epoch=int(best_epoch),best_val=best_val)
else:
print("Cannot have access to best parameters for monitor for checkpoint")
model_checkpoint = ModelCheckpointExtraSave(model_file, monitor='val_loss', verbose=1, save_best_only=True,nepochs=epochs)
else:
print("Not using any previous monitored value for checkpoint")
model_checkpoint = ModelCheckpointExtraSave(model_file, monitor='val_loss', verbose=1, save_best_only=True,nepochs=epochs)
gen = dynamic_batches(train_files, batch_size=batch_size, nb_img_per_file=nb_img_per_file,
noise_std=noise_std, SNR=SNR, noiseless_img_hdu=noiseless_img_hdu,
targets_hdu=targets_hdu, psf_hdu=psf_hdu,
image_dim=image_dim, image_per_row=image_per_row,
deconv_mode=deconv_mode, rho_fista=rho_fista,
risktype=risktype,reg=reg,tol=tol,reg_frac=reg_frac,
shape_constraint = self.shape_constraint,
win_filename=win_filename, win_hdu=0,mom_hdu=1)
history = self.model.fit_generator(gen, samples_per_epoch=samples_per_epoch, epochs=epochs,
validation_data=validation_data, verbose=1,
callbacks=[model_checkpoint, LoggingCallback(filetxt=logfile, log=write_log)],
initial_epoch=initial_epoch)
return history
def train_generator_npy(self, train_files, validation_file, epochs=20, batch_size=32,
nb_img_per_file=10000, model_file = '', logfile='log.txt'):
#TO BE UPDATED SOME TIMES FOR SHAPE CONSTRAINT
if self.model is None:
raise Exception("No model found, please use build_model()")
if model_file == '':
model_file = self.network_name + '.hdf5'
print('Model will be saved at %s/%s'%(os.getcwd(), model_file))
print('Memory usage for the model + one batch (GB): %f'%(get_model_memory_usage(batch_size, self.model)))
with open(logfile, 'a') as f:
f.write(self.network_name)
f.write("\n")
validation_data = np.load(validation_file)
samples_per_epoch = int(len(train_files)*np.ceil(nb_img_per_file/batch_size))
model_checkpoint = ModelCheckpoint(model_file, monitor='val_loss', verbose=1, save_best_only=True)
gen = npy_batches(train_files, batch_size=batch_size, nb_img_per_file=nb_img_per_file)
history = self.model.fit_generator(gen, samples_per_epoch=samples_per_epoch, epochs=epochs, validation_data=validation_data, verbose=1, callbacks=[model_checkpoint, LoggingCallback(write_log)])
return history
def predict(self, test_data, verbose=1):
if self.model is None:
raise Exception("No model found, please use build_model()")
output_test = self.model.predict(test_data, batch_size=1, verbose=verbose)
return output_test
def get_layer_output(self, test_data, layer_idx):
if self.model is None:
raise Exception("No model found, please use build_model()")
get_output = K.function([self.model.layers[0].input], [self.model.layers[layer_idx].output])
return get_output([test_data])[0]
def shape_loss(self,y_true,y_pred):
#window = y_true[1]
#mu=y_true[2]
#print(K.int_shape(y_true),K.int_shape(y_pred))
residual=y_true-y_pred
M=K.mean(K.square(y_true-y_pred), axis=-1)
window=self.model.input[1]
mu=self.model.input[2]
print(M[0],window,mu,residual)
#print(K.eval(mu[0,0,:,:]))
#M1=K.eval(M[0])
#print("MSE=",K.int_shape(M))
#print(self.model.input[1])
#print(self.model.input[2])
#print('WIN=',K.int_shape(window),'\n','RES=',K.int_shape(residual),'\n','U=',np.shape((self.U)[0]),'\n','MU=',K.int_shape(mu[:,0,:,:]))
#print(K.sum(y_true * y_pred, axis=-1))
#for i in range(6):
#M=M+self.gamma*mu[:,i,:,:]*K.square(K.sum(residual*window*self.U[i],axis=-1))/2.0
temp=0
for i in range(6):
temp+=self.gamma*mu[:,i,0,0]*(K.square((K.sum((residual)*window*self.U[i],axis=(1,2,3)))))/2
#print("MSE+SHAPE",K.int_shape(K.expand_dims(temp, axis=-1)))
temp=temp/(self.img_rows*self.img_cols)
temp=K.expand_dims((K.expand_dims(temp, axis=-1)),axis=-1)
#print(M1,'\n',"MSE+SHAPE",K.int_shape(M1))
return M+temp
# np.array(
# [m*((residual*window*u).sum())**2
# for m,u in zip(mu,self.U)])/2.).sum()
def shape_metric(self,y_true,y_pred):
temp=0
residual=y_true-y_pred
window=self.model.input[1]
mu=self.model.input[2]
temp=0
for i in range(6):
temp=temp+self.gamma*mu[:,i,0,0]*(K.square((K.sum(residual*window*self.U[i],axis=(1,2,3)))))/2
temp=temp/(self.img_rows*self.img_cols)
temp=K.expand_dims((K.expand_dims(temp, axis=-1)),axis=-1)
return temp
def shearlet_loss(self,ytrue,ypred):
@tf.custom_gradient
def closs(ypred):
residual=ypred-ytrue
temp=0
temp_grad=0
loss=K.mean(K.square(ytrue-ypred),axis=-1)
print('loss',K.int_shape(loss))
for i in range(6):
for j in range(27):
temp+=mu[i,j]*K.square(K.sum(residual*adj_U[i,j],axis=(1,2,3)))
temp_grad+=mu[i,j]*K.sum(residual*adj_U[i,j],axis=(1,2,3))*adj_U[i,j]
temp=temp*self.gamma/(self.img_rows*self.img_cols)
temp_grad=temp_grad*self.gamma/(self.img_rows*self.img_cols)
temp=K.expand_dims((K.expand_dims(temp, axis=-1)),axis=-1)
temp_grad=K.permute_dimensions(temp_grad,(3,1,2,0))
print('temp',K.int_shape(temp))
loss+=temp
def grad(dy):
return (2*(ypred-ytrue)+temp_grad)*K.expand_dims(dy,axis=-1)
return loss,grad
loss=closs(ypred)
print(type(loss))
return closs(ypred)
def shearlet_metric(self, ytrue,ypred):
residual=ypred-ytrue
temp=0
for i in range(6):
for j in range(27):
temp+=mu[i,j]*K.square(K.sum(residual*adj_U[i,j],axis=(1,2,3)))
temp=temp*self.gamma/(self.img_rows*self.img_cols)
temp=K.expand_dims((K.expand_dims(temp, axis=-1)),axis=-1)
return temp
def custom_mse_3(self,y_true,y_pred):
print(K.int_shape(y_true),K.int_shape(y_pred))
@tf.custom_gradient
def closs(y_pred):
loss=K.square(y_true-y_pred)
def grad(dy):
print(K.int_shape(dy))
return 2*dy*(y_pred-y_true)
print(K.int_shape(loss))
return loss,grad
return closs(y_pred)
|
[
"keras.models.load_model",
"keras.utils.io_utils.H5Dict",
"numpy.load",
"numpy.ones",
"keras.utils.generic_utils.get_custom_objects",
"numpy.shape",
"os.path.isfile",
"numpy.arange",
"shape_constraint.cadmos_lib.shear_norm",
"keras.layers.Input",
"numpy.round",
"AlphaTransform.AlphaShearletTransform",
"keras.backend.permute_dimensions",
"shape_constraint.cadmos_lib.comp_mu",
"numpy.reshape",
"DeepDeconv.utils.batch_utils.dynamic_batches",
"numpy.loadtxt",
"keras.optimizers.deserialize",
"datetime.datetime.now",
"shape_constraint.cadmos_lib.comp_adj",
"DeepDeconv.utils.batch_utils.npy_batches",
"numpy.ceil",
"keras.callbacks.ModelCheckpoint",
"keras.backend.expand_dims",
"os.path.realpath",
"keras.backend.function",
"shape_constraint.cadmos_lib.makeUi",
"numpy.asarray",
"keras.callbacks.Callback.__init__",
"keras.backend.sigmoid",
"keras.backend.int_shape",
"shape_constraint.cadmos_lib.get_adjoint_coeff",
"os.getcwd",
"keras.backend.sum",
"keras.backend.count_params",
"numpy.array",
"keras.backend.square"
] |
[((4534, 4552), 'numpy.array', 'np.array', (['[96, 96]'], {}), '([96, 96])\n', (4542, 4552), True, 'import numpy as np\n'), ((4556, 4578), 'shape_constraint.cadmos_lib.makeUi', 'cl.makeUi', (['row', 'column'], {}), '(row, column)\n', (4565, 4578), True, 'import shape_constraint.cadmos_lib as cl\n'), ((4658, 4726), 'AlphaTransform.AlphaShearletTransform', 'AST', (['column', 'row', '([0.5] * 3)'], {'real': '(True)', 'parseval': '(True)', 'verbose': '(False)'}), '(column, row, [0.5] * 3, real=True, parseval=True, verbose=False)\n', (4661, 4726), True, 'from AlphaTransform import AlphaShearletTransform as AST\n'), ((4820, 4847), 'shape_constraint.cadmos_lib.get_adjoint_coeff', 'cl.get_adjoint_coeff', (['trafo'], {}), '(trafo)\n', (4840, 4847), True, 'import shape_constraint.cadmos_lib as cl\n'), ((4925, 4959), 'shape_constraint.cadmos_lib.shear_norm', 'cl.shear_norm', (['adjoints', 'shearlets'], {}), '(adjoints, shearlets)\n', (4938, 4959), True, 'import shape_constraint.cadmos_lib as cl\n'), ((4971, 5006), 'shape_constraint.cadmos_lib.shear_norm', 'cl.shear_norm', (['shearlets', 'shearlets'], {}), '(shearlets, shearlets)\n', (4984, 5006), True, 'import shape_constraint.cadmos_lib as cl\n'), ((5155, 5172), 'shape_constraint.cadmos_lib.comp_mu', 'cl.comp_mu', (['adj_U'], {}), '(adj_U)\n', (5165, 5172), True, 'import shape_constraint.cadmos_lib as cl\n'), ((6087, 6126), 'numpy.round', 'np.round', (['(total_memory / 1024.0 ** 3)', '(3)'], {}), '(total_memory / 1024.0 ** 3, 3)\n', (6095, 6126), True, 'import numpy as np\n'), ((6728, 6743), 'numpy.ones', 'np.ones', (['(n, m)'], {}), '((n, m))\n', (6735, 6743), True, 'import numpy as np\n'), ((7218, 7237), 'numpy.reshape', 'np.reshape', (['U1', 'lns'], {}), '(U1, lns)\n', (7228, 7237), True, 'import numpy as np\n'), ((1126, 1149), 'keras.callbacks.Callback.__init__', 'Callback.__init__', (['self'], {}), '(self)\n', (1143, 1149), False, 'from keras.callbacks import ModelCheckpoint, Callback\n'), ((5102, 5126), 'shape_constraint.cadmos_lib.comp_adj', 'cl.comp_adj', (['U', 'adjoints'], {}), '(U, adjoints)\n', (5113, 5126), True, 'import shape_constraint.cadmos_lib as cl\n'), ((5371, 5383), 'keras.backend.sigmoid', 'K.sigmoid', (['x'], {}), '(x)\n', (5380, 5383), True, 'from keras import backend as K\n'), ((5390, 5410), 'keras.utils.generic_utils.get_custom_objects', 'get_custom_objects', ([], {}), '()\n', (5408, 5410), False, 'from keras.utils.generic_utils import get_custom_objects\n'), ((7067, 7079), 'numpy.arange', 'np.arange', (['n'], {}), '(n)\n', (7076, 7079), True, 'import numpy as np\n'), ((7080, 7092), 'numpy.arange', 'np.arange', (['m'], {}), '(m)\n', (7089, 7092), True, 'import numpy as np\n'), ((14544, 14623), 'keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', (['model_file'], {'monitor': '"""val_loss"""', 'verbose': '(1)', 'save_best_only': '(True)'}), "(model_file, monitor='val_loss', verbose=1, save_best_only=True)\n", (14559, 14623), False, 'from keras.callbacks import ModelCheckpoint, Callback\n'), ((17948, 18411), 'DeepDeconv.utils.batch_utils.dynamic_batches', 'dynamic_batches', (['train_files'], {'batch_size': 'batch_size', 'nb_img_per_file': 'nb_img_per_file', 'noise_std': 'noise_std', 'SNR': 'SNR', 'noiseless_img_hdu': 'noiseless_img_hdu', 'targets_hdu': 'targets_hdu', 'psf_hdu': 'psf_hdu', 'image_dim': 'image_dim', 'image_per_row': 'image_per_row', 'deconv_mode': 'deconv_mode', 'rho_fista': 'rho_fista', 'risktype': 'risktype', 'reg': 'reg', 'tol': 'tol', 'reg_frac': 'reg_frac', 'shape_constraint': 'self.shape_constraint', 'win_filename': 'win_filename', 'win_hdu': '(0)', 'mom_hdu': '(1)'}), '(train_files, batch_size=batch_size, nb_img_per_file=\n nb_img_per_file, noise_std=noise_std, SNR=SNR, noiseless_img_hdu=\n noiseless_img_hdu, targets_hdu=targets_hdu, psf_hdu=psf_hdu, image_dim=\n image_dim, image_per_row=image_per_row, deconv_mode=deconv_mode,\n rho_fista=rho_fista, risktype=risktype, reg=reg, tol=tol, reg_frac=\n reg_frac, shape_constraint=self.shape_constraint, win_filename=\n win_filename, win_hdu=0, mom_hdu=1)\n', (17963, 18411), False, 'from DeepDeconv.utils.batch_utils import get_batch_from_fits, dynamic_batches, npy_batches\n'), ((19705, 19729), 'numpy.load', 'np.load', (['validation_file'], {}), '(validation_file)\n', (19712, 19729), True, 'import numpy as np\n'), ((19843, 19922), 'keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', (['model_file'], {'monitor': '"""val_loss"""', 'verbose': '(1)', 'save_best_only': '(True)'}), "(model_file, monitor='val_loss', verbose=1, save_best_only=True)\n", (19858, 19922), False, 'from keras.callbacks import ModelCheckpoint, Callback\n'), ((19937, 20022), 'DeepDeconv.utils.batch_utils.npy_batches', 'npy_batches', (['train_files'], {'batch_size': 'batch_size', 'nb_img_per_file': 'nb_img_per_file'}), '(train_files, batch_size=batch_size, nb_img_per_file=nb_img_per_file\n )\n', (19948, 20022), False, 'from DeepDeconv.utils.batch_utils import get_batch_from_fits, dynamic_batches, npy_batches\n'), ((20681, 20760), 'keras.backend.function', 'K.function', (['[self.model.layers[0].input]', '[self.model.layers[layer_idx].output]'], {}), '([self.model.layers[0].input], [self.model.layers[layer_idx].output])\n', (20691, 20760), True, 'from keras import backend as K\n'), ((5307, 5332), 'keras.backend.square', 'K.square', (['(y_pred - y_true)'], {}), '(y_pred - y_true)\n', (5315, 5332), True, 'from keras import backend as K\n'), ((5826, 5843), 'keras.backend.count_params', 'K.count_params', (['p'], {}), '(p)\n', (5840, 5843), True, 'from keras import backend as K\n'), ((5918, 5935), 'keras.backend.count_params', 'K.count_params', (['p'], {}), '(p)\n', (5932, 5935), True, 'from keras import backend as K\n'), ((6423, 6435), 'numpy.arange', 'np.arange', (['n'], {}), '(n)\n', (6432, 6435), True, 'import numpy as np\n'), ((17096, 17128), 'os.path.isfile', 'os.path.isfile', (['best_params_file'], {}), '(best_params_file)\n', (17110, 17128), False, 'import os\n'), ((21019, 21044), 'keras.backend.square', 'K.square', (['(y_true - y_pred)'], {}), '(y_true - y_pred)\n', (21027, 21044), True, 'from keras import backend as K\n'), ((21938, 21966), 'keras.backend.expand_dims', 'K.expand_dims', (['temp'], {'axis': '(-1)'}), '(temp, axis=-1)\n', (21951, 21966), True, 'from keras import backend as K\n'), ((22574, 22602), 'keras.backend.expand_dims', 'K.expand_dims', (['temp'], {'axis': '(-1)'}), '(temp, axis=-1)\n', (22587, 22602), True, 'from keras import backend as K\n'), ((23378, 23423), 'keras.backend.permute_dimensions', 'K.permute_dimensions', (['temp_grad', '(3, 1, 2, 0)'], {}), '(temp_grad, (3, 1, 2, 0))\n', (23398, 23423), True, 'from keras import backend as K\n'), ((24020, 24048), 'keras.backend.expand_dims', 'K.expand_dims', (['temp'], {'axis': '(-1)'}), '(temp, axis=-1)\n', (24033, 24048), True, 'from keras import backend as K\n'), ((24144, 24163), 'keras.backend.int_shape', 'K.int_shape', (['y_true'], {}), '(y_true)\n', (24155, 24163), True, 'from keras import backend as K\n'), ((24164, 24183), 'keras.backend.int_shape', 'K.int_shape', (['y_pred'], {}), '(y_pred)\n', (24175, 24183), True, 'from keras import backend as K\n'), ((24257, 24282), 'keras.backend.square', 'K.square', (['(y_true - y_pred)'], {}), '(y_true - y_pred)\n', (24265, 24282), True, 'from keras import backend as K\n'), ((175, 201), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (191, 201), False, 'import os\n'), ((7188, 7200), 'numpy.shape', 'np.shape', (['U1'], {}), '(U1)\n', (7196, 7200), True, 'import numpy as np\n'), ((8913, 8980), 'keras.models.load_model', 'load_model', (['model_file'], {'custom_objects': 'custom_objects', 'compile': '(True)'}), '(model_file, custom_objects=custom_objects, compile=True)\n', (8923, 8980), False, 'from keras.models import load_model\n'), ((9073, 9109), 'keras.models.load_model', 'load_model', (['model_file'], {'compile': '(True)'}), '(model_file, compile=True)\n', (9083, 9109), False, 'from keras.models import load_model\n'), ((9321, 9389), 'keras.models.load_model', 'load_model', (['model_file'], {'custom_objects': 'custom_objects', 'compile': '(False)'}), '(model_file, custom_objects=custom_objects, compile=False)\n', (9331, 9389), False, 'from keras.models import load_model\n'), ((9576, 9637), 'keras.layers.Input', 'Input', ([], {'shape': '(self.img_rows, self.img_cols, 1)', 'name': '"""window"""'}), "(shape=(self.img_rows, self.img_cols, 1), name='window')\n", (9581, 9637), False, 'from keras.layers import Input\n'), ((10019, 10054), 'keras.layers.Input', 'Input', ([], {'shape': '(6, 1, 1)', 'name': '"""norm"""'}), "(shape=(6, 1, 1), name='norm')\n", (10024, 10054), False, 'from keras.layers import Input\n'), ((11587, 11605), 'keras.utils.io_utils.H5Dict', 'H5Dict', (['model_file'], {}), '(model_file)\n', (11593, 11605), False, 'from keras.utils.io_utils import H5Dict\n'), ((16191, 16221), 'numpy.arange', 'np.arange', (['validation_set_size'], {}), '(validation_set_size)\n', (16200, 16221), True, 'import numpy as np\n'), ((16935, 16972), 'numpy.ceil', 'np.ceil', (['(nb_img_per_file / batch_size)'], {}), '(nb_img_per_file / batch_size)\n', (16942, 16972), True, 'import numpy as np\n'), ((17174, 17202), 'numpy.loadtxt', 'np.loadtxt', (['best_params_file'], {}), '(best_params_file)\n', (17184, 17202), True, 'import numpy as np\n'), ((19779, 19816), 'numpy.ceil', 'np.ceil', (['(nb_img_per_file / batch_size)'], {}), '(nb_img_per_file / batch_size)\n', (19786, 19816), True, 'import numpy as np\n'), ((22833, 22856), 'keras.backend.square', 'K.square', (['(ytrue - ypred)'], {}), '(ytrue - ypred)\n', (22841, 22856), True, 'from keras import backend as K\n'), ((22889, 22906), 'keras.backend.int_shape', 'K.int_shape', (['loss'], {}), '(loss)\n', (22900, 22906), True, 'from keras import backend as K\n'), ((23317, 23345), 'keras.backend.expand_dims', 'K.expand_dims', (['temp'], {'axis': '(-1)'}), '(temp, axis=-1)\n', (23330, 23345), True, 'from keras import backend as K\n'), ((23445, 23462), 'keras.backend.int_shape', 'K.int_shape', (['temp'], {}), '(temp)\n', (23456, 23462), True, 'from keras import backend as K\n'), ((24408, 24425), 'keras.backend.int_shape', 'K.int_shape', (['loss'], {}), '(loss)\n', (24419, 24425), True, 'from keras import backend as K\n'), ((290, 316), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (306, 316), False, 'import os\n'), ((9933, 9958), 'keras.backend.int_shape', 'K.int_shape', (['window_layer'], {}), '(window_layer)\n', (9944, 9958), True, 'from keras import backend as K\n'), ((10343, 10366), 'keras.backend.int_shape', 'K.int_shape', (['norm_layer'], {}), '(norm_layer)\n', (10354, 10366), True, 'from keras import backend as K\n'), ((12125, 12196), 'keras.optimizers.deserialize', 'optimizers.deserialize', (['optimizer_config'], {'custom_objects': 'custom_objects'}), '(optimizer_config, custom_objects=custom_objects)\n', (12147, 12196), True, 'import keras.optimizers as optimizers\n'), ((15830, 15841), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (15839, 15841), False, 'import os\n'), ((23566, 23592), 'keras.backend.expand_dims', 'K.expand_dims', (['dy'], {'axis': '(-1)'}), '(dy, axis=-1)\n', (23579, 23592), True, 'from keras import backend as K\n'), ((24329, 24344), 'keras.backend.int_shape', 'K.int_shape', (['dy'], {}), '(dy)\n', (24340, 24344), True, 'from keras import backend as K\n'), ((1263, 1280), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (1278, 1280), True, 'import datetime as dt\n'), ((4302, 4368), 'numpy.asarray', 'np.asarray', (['[self.best_epoch, self.best, nmax_epoch, self.nepochs]'], {}), '([self.best_epoch, self.best, nmax_epoch, self.nepochs])\n', (4312, 4368), True, 'import numpy as np\n'), ((14491, 14502), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (14500, 14502), False, 'import os\n'), ((19436, 19447), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (19445, 19447), False, 'import os\n'), ((21739, 21791), 'keras.backend.sum', 'K.sum', (['(residual * window * self.U[i])'], {'axis': '(1, 2, 3)'}), '(residual * window * self.U[i], axis=(1, 2, 3))\n', (21744, 21791), True, 'from keras import backend as K\n'), ((23892, 23937), 'keras.backend.sum', 'K.sum', (['(residual * adj_U[i, j])'], {'axis': '(1, 2, 3)'}), '(residual * adj_U[i, j], axis=(1, 2, 3))\n', (23897, 23937), True, 'from keras import backend as K\n'), ((22446, 22498), 'keras.backend.sum', 'K.sum', (['(residual * window * self.U[i])'], {'axis': '(1, 2, 3)'}), '(residual * window * self.U[i], axis=(1, 2, 3))\n', (22451, 22498), True, 'from keras import backend as K\n'), ((23018, 23063), 'keras.backend.sum', 'K.sum', (['(residual * adj_U[i, j])'], {'axis': '(1, 2, 3)'}), '(residual * adj_U[i, j], axis=(1, 2, 3))\n', (23023, 23063), True, 'from keras import backend as K\n'), ((23098, 23143), 'keras.backend.sum', 'K.sum', (['(residual * adj_U[i, j])'], {'axis': '(1, 2, 3)'}), '(residual * adj_U[i, j], axis=(1, 2, 3))\n', (23103, 23143), True, 'from keras import backend as K\n'), ((3584, 3638), 'numpy.asarray', 'np.asarray', (['[self.best_epoch, self.best, self.nepochs]'], {}), '([self.best_epoch, self.best, self.nepochs])\n', (3594, 3638), True, 'import numpy as np\n')]
|
import sqlite3
from sqlite3 import Error
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
class SqliteHelper:
def __init__(self, file):
self.__file = file
self.__conn = None
def connect(self):
try:
self.__conn = sqlite3.connect(self.__file)
self.__conn.row_factory = dict_factory
print("Connection is established: Sqlite Database is created ")
return self.__conn
except Error:
print(Error)
def execute(self, sql):
cursorobj = self.__conn.cursor()
cursorobj.execute(sql)
self.__conn.commit()
cursorobj.close()
def fetch(self, sql):
cursorobj = self.__conn.cursor()
cursorobj.execute(sql)
rows = cursorobj.fetchall()
cursorobj.close()
return rows
def close(self):
if self.__conn is not None:
self.__conn.close()
|
[
"sqlite3.connect"
] |
[((349, 377), 'sqlite3.connect', 'sqlite3.connect', (['self.__file'], {}), '(self.__file)\n', (364, 377), False, 'import sqlite3\n')]
|
import torch
import torch.nn.utils.prune as prune
import numpy as np
from copy import deepcopy
import sys
sys.path.append('../src')
from train import train
from evaluate import test
class PruneModel:
def __init__(self, network, batch_size, train_loader, val_loader, test_loader, optimizer, epochs, scheduler, device, pruning_rounds=1):
"""
Class for pruning a model.
Args:
network (nn.Module): the network/model to be pruned
pruning_rounds (int): the number of rounds in iterative pruning (1 if One Shot pruning)
"""
self.network = network
self.original_network = deepcopy(network)
print('ORIGINAL NETWORK:')
for name, param in self.original_network.named_parameters():
print(name, param)
break
self.batch_size = batch_size
self.train_loader = train_loader
self.val_loader = val_loader
self.test_loader = test_loader
self.optimizer = optimizer
self.epochs = epochs
self.scheduler = scheduler
self.device = device
self.pruning_rounds = pruning_rounds
self.layers = []
self.masks = {}
self.p = 0.002 # p as in the paper
self.pruning_rate = self.p ** (1/self.pruning_rounds)
self.percent_remaining_weights_list = []
# "Connections to outputs are pruned at half the rate of the rest of the network"
# todo: not sure if this is done the same as the paper
# should it be self.pruning_rate_output_layer = self.pruning_rate*2.0?
self.p_output_layer = self.p*2.0
self.pruning_rate_output_layer = self.p_output_layer ** (1/self.pruning_rounds)
print(self.pruning_rate_output_layer)
self.percent_remaining_weights_output_layer_list = []
# predetermine % of weights at each pruning iteration
for i in range(self.pruning_rounds+1):
self.percent_remaining_weights_list.append(self.pruning_rate ** i)
self.percent_remaining_weights_output_layer_list.append(self.pruning_rate_output_layer ** i)
# print('\nRemaining weights: {}'.format(self.percent_remaining_weights_list))
# print('Remaining weights output layer: {}'.format(self.percent_remaining_weights_output_layer_list))
def prune(self):
"""
Prune a network for pruning_rounds # of iterations. This function is the main driver of pruning, calling other
functions such as _compute_masks, _apply_masks, and _retrain.
"""
for pruning_iteration in range(self.pruning_rounds):
print('-'*30)
print('Pruning iteration:', pruning_iteration)
print('-' * 30)
print()
# print('Percent Remain:', self.percent_remaining_weights_list[pruning_iteration])
# print('Percent Remain:', self.percent_remaining_weights_output_layer_list[pruning_iteration])
# compute masks
self.masks = self._compute_masks()
# reinit
self.network = self._reinitialize(random=False)
# apply the masks
self._apply_masks()
# verifying correct amount of parameters were pruned and correct amount is remaining
self._test_pruning(pruning_iteration)
# retrain after prune
self._retrain()
def _compute_masks(self):
"""
Computes masks on self.network for a given iteration
Returns:
masks (Dict[str, torch.Tensor]: the masks for each layer
a tensor of 0s and 1s having the same dimension as the parameter
"""
masks = {}
for idx, (name, param) in enumerate(self.network.named_parameters()):
# todo: check linear, conv, etc. (isinstance())
# todo: random sparse networks, so prune randomly, not based on magnitude
if 'weight' in name:
# get unpruned weights (nonzero)
unpruned_weights = param[param != 0]
# not output layer
if idx < len(list(self.network.named_parameters())) - 1:
num_to_keep = int(self.pruning_rate * len(unpruned_weights))
# output layer
else:
num_to_keep = int(self.pruning_rate_output_layer * len(unpruned_weights))
# find largest magnitude weights
topk = torch.topk(torch.abs(param).view(-1), k=num_to_keep, largest=True)
# create mask, keep largest magnitude weights by setting them to 1
# remove smallest magnitude weights by setting them to 0
mask = torch.zeros_like(param)
mask.view(-1)[topk.indices] = 1
masks[name] = mask
return masks
def _reinitialize(self, random=False):
"""
Reinitialize the parameters. If random=True, reinitialize the parameters randomly
Else: reinitialize parameters to original parameters (theta_0 in the paper)
"""
if random:
# create another instance of the neural network model class (randomly reinit)
network_class = self.network.__class__
new_random_network = network_class().to(self.device)
return new_random_network
else:
# reinit to original weights
return deepcopy(self.original_network)
def _apply_masks(self):
"""
Applies masks to self.network parameters.
e.g. if this is a parameter [.1, -.2, .3, -.15, .05] and its mask is [0, 1, 0, 1, 1],
the result is [0, -.2, 0, -.15, 0.5]
"""
for name, param in self.network.named_parameters():
if name in self.masks.keys():
param.requires_grad_(requires_grad=False)
param.mul_(self.masks[name])
param.requires_grad_(requires_grad=True)
# print(name)
# print(param)
# print(self.masks[name])
# print()
def _test_pruning(self, pruning_iteration):
"""
Verify correct amount of weights have been pruned
"""
for idx, (name, param) in enumerate(self.network.named_parameters()):
if name in self.masks.keys():
# not output layer
if idx < len(list(self.network.named_parameters())) - 1:
theoretical_unpruned = int(
(self.pruning_rate ** (pruning_iteration + 1) * len(param.view(-1)))
)
# output layer
else:
theoretical_unpruned = int(
(self.pruning_rate_output_layer ** (pruning_iteration + 1) * len(param.view(-1)))
)
actual_unpruned_param = len(param[param != 0])
actual_nonzero_mask = torch.sum(self.masks[name])
# all these should tell us how many weights/params still remain at a given pruning iteration
diff = (theoretical_unpruned - actual_unpruned_param)
assert (abs(diff) < 3)
diff2 = (actual_unpruned_param - actual_nonzero_mask)
assert (abs(diff2) < 3)
def _retrain(self):
"""
Retrains the network after pruning and weight reinitialization.
"""
# run the training loop
for epoch in range(1, self.epochs + 1):
stop, stopping_iteration = train(
self.network, self.device, self.train_loader, self.val_loader, self.test_loader, self.optimizer, epoch
)
self.scheduler.step()
# test after each epoch
test(self.network, self.device, self.test_loader)
if stop:
print('Stopped at overall iteration {}\n'.format(
stopping_iteration + ((len(self.train_loader.dataset) / self.batch_size) * (epoch - 1))))
break
# if save_model:
# torch.save(model.state_dict(), model.__class__.__name__ + '_' + dataset + ".pt")
|
[
"sys.path.append",
"copy.deepcopy",
"torch.zeros_like",
"evaluate.test",
"torch.abs",
"train.train",
"torch.sum"
] |
[((107, 132), 'sys.path.append', 'sys.path.append', (['"""../src"""'], {}), "('../src')\n", (122, 132), False, 'import sys\n'), ((644, 661), 'copy.deepcopy', 'deepcopy', (['network'], {}), '(network)\n', (652, 661), False, 'from copy import deepcopy\n'), ((5405, 5436), 'copy.deepcopy', 'deepcopy', (['self.original_network'], {}), '(self.original_network)\n', (5413, 5436), False, 'from copy import deepcopy\n'), ((7528, 7642), 'train.train', 'train', (['self.network', 'self.device', 'self.train_loader', 'self.val_loader', 'self.test_loader', 'self.optimizer', 'epoch'], {}), '(self.network, self.device, self.train_loader, self.val_loader, self.\n test_loader, self.optimizer, epoch)\n', (7533, 7642), False, 'from train import train\n'), ((7752, 7801), 'evaluate.test', 'test', (['self.network', 'self.device', 'self.test_loader'], {}), '(self.network, self.device, self.test_loader)\n', (7756, 7801), False, 'from evaluate import test\n'), ((4692, 4715), 'torch.zeros_like', 'torch.zeros_like', (['param'], {}), '(param)\n', (4708, 4715), False, 'import torch\n'), ((6931, 6958), 'torch.sum', 'torch.sum', (['self.masks[name]'], {}), '(self.masks[name])\n', (6940, 6958), False, 'import torch\n'), ((4456, 4472), 'torch.abs', 'torch.abs', (['param'], {}), '(param)\n', (4465, 4472), False, 'import torch\n')]
|
#File Read Write Functions
#Python v3.7.1
#Date: 12/09/18
#Importing the operating system for file handling functions
import os
#Checks to see if a file exists
def fileExists(filePath):
return os.path.exists(filePath)
#Reads from a specified file
def readFile(filePath):
data = ''
if not fileExists(filePath):
print('The file,', filePath, 'does not exist - cannot read it.')
else:
fileHandle = open(filePath, 'r')
data = fileHandle.read()
fileHandle.close()
return data
#Write to a specified file
def writeFile(filePath, textToWrite):
fileHandle = open(filePath, 'w')
fileHandle.write(textToWrite)
fileHandle.close()
|
[
"os.path.exists"
] |
[((199, 223), 'os.path.exists', 'os.path.exists', (['filePath'], {}), '(filePath)\n', (213, 223), False, 'import os\n')]
|
"""
Models for test execution (runs, results).
"""
import datetime
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.db import connection, transaction, models
from django.db.models import Q, Count, Max
from model_utils import Choices
from ..mtmodel import MTModel, TeamModel, DraftStatusModel
from ..core.auth import User
from ..core.models import ProductVersion
from ..environments.models import Environment, HasEnvironmentsModel
from ..library.models import CaseVersion, Suite, CaseStep
class Run(MTModel, TeamModel, DraftStatusModel, HasEnvironmentsModel):
"""A test run."""
productversion = models.ForeignKey(ProductVersion, related_name="runs")
name = models.CharField(max_length=200)
description = models.TextField(blank=True)
start = models.DateField(default=datetime.date.today)
end = models.DateField(blank=True, null=True)
build = models.TextField(null=True, blank=True)
is_series = models.BooleanField(default=False)
series = models.ForeignKey("self", null=True, blank=True)
caseversions = models.ManyToManyField(
CaseVersion, through="RunCaseVersion", related_name="runs")
suites = models.ManyToManyField(
Suite, through="RunSuite", related_name="runs")
def __unicode__(self):
"""Return unicode representation."""
return self.name
def clean(self):
"""Validate instance field values."""
if self.end is not None and self.start > self.end:
raise ValidationError("Start date must be prior to end date.")
class Meta:
permissions = [("manage_runs", "Can add/edit/delete test runs.")]
@property
def parent(self):
return self.productversion
@classmethod
def cascade_envs_to(cls, objs, adding):
if adding:
return {}
return {RunCaseVersion: RunCaseVersion.objects.filter(run__in=objs)}
def clone(self, *args, **kwargs):
"""Clone this Run with default cascade behavior."""
kwargs.setdefault(
"cascade", ["runsuites", "environments", "team"])
overrides = kwargs.setdefault("overrides", {})
overrides["status"] = self.STATUS.draft
overrides.setdefault("name", "Cloned: {0}".format(self.name))
return super(Run, self).clone(*args, **kwargs)
def clone_for_series(self, *args, **kwargs):
"""Clone this Run to create a new series item."""
build = kwargs.pop("build", None)
kwargs.setdefault(
"cascade", ["runsuites", "environments", "team"])
overrides = kwargs.setdefault("overrides", {})
overrides.setdefault("name", "{0} - Build: {1}".format(
self.name, build))
overrides["status"] = self.STATUS.draft
overrides.setdefault("is_series", False)
overrides.setdefault("build", build)
overrides.setdefault("series", self)
overrides.setdefault(
"start",
datetime.date.today().strftime("%Y-%m-%d"),
)
return super(Run, self).clone(*args, **kwargs)
def activate(self, *args, **kwargs):
"""Make run active, locking in runcaseversions for all suites."""
if self.status == self.STATUS.draft:
self.update_case_versions()
super(Run, self).activate(*args, **kwargs)
def refresh(self, *args, **kwargs):
"""Update all the runcaseversions while the run is active."""
if self.status == self.STATUS.active:
self.update_case_versions()
def update_case_versions(self):
"""
Update the runcaseversions with any changes to suites.
This can happen while the run is still active.
"""
# we don't need all the runcaseversions for a series. It is the
# series member runs that will use them. So only lock the caseversions
# if this is NOT a series.
if not self.is_series:
self._lock_case_versions()
@transaction.commit_on_success
def _lock_case_versions(self):
"""
Select caseversions from suites, create runcaseversions.
WARNING: Testing this code in the PyCharm debugger will give an
incorrect number of queries, because for the debugger to show all the
information it wants, it must do queries itself. When testing with
assertNumQueries, don't use the PyCharm debugger.
"""
# get the list of environments for this run
run_env_ids = self.environments.values_list("id", flat=True)
# make a list of cvs in order by RunSuite, then SuiteCase.
# This list is built from the run / suite / env combination and has
# no knowledge of any possibly existing runcaseversions yet.
if len(run_env_ids):
cursor = connection.cursor()
sql = """SELECT DISTINCT cv.id as id
FROM execution_run as r
INNER JOIN execution_runsuite as rs
ON rs.run_id = r.id
INNER JOIN library_suitecase as sc
ON rs.suite_id = sc.suite_id
INNER JOIN library_suite as s
ON sc.suite_id = s.id
INNER JOIN library_caseversion as cv
ON cv.case_id = sc.case_id
AND cv.productversion_id = r.productversion_id
INNER JOIN library_caseversion_environments as cve
ON cv.id = cve.caseversion_id
WHERE cv.status = 'active'
AND cv.deleted_on IS NULL
AND s.status = 'active'
AND rs.run_id = {0}
AND cve.environment_id IN ({1})
ORDER BY rs.order, sc.order
""".format(self.id, ",".join(map(str, run_env_ids)))
cursor.execute(sql)
cv_list = [x[0] for x in cursor.fetchall()]
# @@@ do we need to check for duplicates?
# use itertools.unique_everseen
#if len(set(cv_list)) != len(cv_list):
# cv_list = itertools.unique_everseen(cv_list)
else:
cv_list = []
# delete rcvs that we won't be needing anymore
self._delete_runcaseversions(cv_list)
# audit for duplicate rcvs for the same cv.id
dups = self.runcaseversions.values("caseversion_id").annotate(
num_records=Count("caseversion")).filter(num_records__gt=1)
if len(dups) > 0:
for dup in dups:
# get the runcaseversions, and sort descending by the id
# of the results. So the first one is the one with the latest
# result. We keep that one and delete the rest.
rcv_to_save = self.runcaseversions.annotate(
latest_result=Max("results__id")).filter(
caseversion=dup["caseversion_id"]).order_by(
"-latest_result")[0]
self.runcaseversions.filter(
caseversion=dup["caseversion_id"]).exclude(
id=rcv_to_save.id).delete()
# remaining rcvs should be ones we want to keep, and we need to inject
# those ids into the insert/update list for bulk_insert. So create
# a dict mapping cv_id: rcv_id. If one exists, its order field will
# be updated in the build_update cmd.
existing_rcv_map = {}
for map_item in self.runcaseversions.values("id", "caseversion_id"):
existing_rcv_map[map_item["caseversion_id"]] = map_item["id"]
# build the list of rcvs that we DO need. Be sure to include the ids
# for rcvs that already exist so that we will just be updating the
# order and not replacing it. We will use a special manager that does
# an update on insert error.
# runcaseversion objects we will use to bulk create
rcv_to_update = []
rcv_proxies_to_create = []
order = 1
for cv in cv_list:
if cv in existing_rcv_map:
# we will just update the order value
rcv_to_update.append({"caseversion_id": cv, "order": order})
else:
# we need to create a new one
kwargs = {
"run_id": self.id,
"caseversion_id": cv,
"order": order
}
rcv_proxies_to_create.append(RunCaseVersion(**kwargs))
order += 1
# update existing rcvs
for rcv in rcv_to_update:
self.runcaseversions.filter(
caseversion=rcv["caseversion_id"]).update(order=rcv["order"])
# insert these rcvs in bulk
self._bulk_insert_new_runcaseversions(rcv_proxies_to_create)
self._bulk_update_runcaseversion_environments_for_lock()
self._lock_caseversions_complete()
def _delete_runcaseversions(self, cv_list):
"""Hook to delete runcaseversions we know we don't need anymore."""
self.runcaseversions.exclude(caseversion__in=cv_list).delete(
permanent=True)
def _bulk_insert_new_runcaseversions(self, rcv_proxies):
"""Hook to bulk-insert runcaseversions we know we DO need."""
self.runcaseversions.bulk_create(rcv_proxies)
def _bulk_update_runcaseversion_environments_for_lock(self):
"""
update runcaseversion_environment records with latest state.
Approach:
do another raw sql query to get all existing_rcv_envs for this run
existing_rcv_envs - needed_rcv_envs = list to delete (no longer needed)
needed_rcv_envs - existing_rcv_envs = list to create
build a list of RunCaseVersion_environment objects
and use bulk_create.
"""
# re-query all the rcvs (including newly created) for this run
final_rcvs = RunCaseVersion.objects.filter(run=self).select_related(
"caseversion").prefetch_related("caseversion__environments")
final_rcv_ids = [x.id for x in final_rcvs]
# runcaseversion_environments that were there prior to our changes
prev_rcv_envs_set = set(RunCaseVersion.environments.through.objects.filter(
runcaseversion_id__in=final_rcv_ids).values_list(
"runcaseversion_id", "environment_id"))
# runcaseversion_environment objects we will use to bulk create
# loop through all cvs and fetch the env intersection with this run
needed_rcv_envs_tuples = []
run_env_ids = set(
self.environments.values_list("id", flat=True))
for rcv in final_rcvs:
case_env_ids = set([x.id for x in rcv.caseversion.environments.all()])
for env in run_env_ids.intersection(case_env_ids):
needed_rcv_envs_tuples.append((rcv.id, env))
needed_rcv_envs_set = set(needed_rcv_envs_tuples)
# get the set of rcv_envs we need to delete because they don't belong
# to the needed set.
delete_rcv_envs = prev_rcv_envs_set - needed_rcv_envs_set
if len(delete_rcv_envs):
delquery = Q()
for combo in delete_rcv_envs:
delquery = delquery | Q(
**{"runcaseversion_id": combo[0],
"environment_id": combo[1]})
RunCaseVersion.environments.through.objects.filter(delquery).delete()
# get the set of rcv_envs we need to create that don't already exist
needed_rcv_envs_set = needed_rcv_envs_set - prev_rcv_envs_set
# build all the objects to pass to bulk_create
needed_rcv_envs = [RunCaseVersion.environments.through(
runcaseversion_id=needed[0],
environment_id=needed[1]) for needed in needed_rcv_envs_set]
RunCaseVersion.environments.through.objects.bulk_create(needed_rcv_envs)
def _lock_caseversions_complete(self):
"""Hook for doing any post-processing after doing the rcv lock."""
pass
def result_summary(self):
"""Return a dict summarizing status of results."""
return result_summary(Result.objects.filter(runcaseversion__run=self))
def completion(self):
"""
Return fraction of case/env combos that have a completed result.
Have to specify deleted_on=None for the through, because the
default manager doesn't go through our MT model manager.
"""
total = RunCaseVersion.environments.through.objects.filter(
runcaseversion__run=self,
runcaseversion__deleted_on=None,
).count()
skipped = Result.objects.filter(
runcaseversion__run=self,
is_latest=True,
status=Result.STATUS.skipped).count()
completed = Result.objects.filter(
status__in=Result.COMPLETED_STATES,
is_latest=True,
runcaseversion__run=self).values(
"runcaseversion", "environment").distinct().count()
try:
return float(completed) / (total - skipped)
except ZeroDivisionError:
return 0
def completion_single_env(self, env_id):
"""Return fraction of cases that have a completed result for an env."""
total = RunCaseVersion.objects.filter(
environments=env_id,
run=self).count()
skipped = Result.objects.filter(
runcaseversion__run=self,
environment=env_id,
is_latest=True,
status=Result.STATUS.skipped).count()
completed = Result.objects.filter(
status__in=Result.COMPLETED_STATES,
is_latest=True,
environment=env_id,
runcaseversion__run=self).values(
"runcaseversion", "environment").distinct().count()
try:
return float(completed) / (total - skipped)
except ZeroDivisionError:
return 0.0
def _environment_intersection(run, caseversion):
"""Intersection of run/caseversion environment IDs."""
run_env_ids = set(
run.environments.values_list("id", flat=True))
case_env_ids = set(
caseversion.environments.values_list("id", flat=True))
return run_env_ids.intersection(case_env_ids)
class RunCaseVersion(HasEnvironmentsModel, MTModel):
"""
An ordered association between a Run and a CaseVersion.
RunCaseVersion objects are created to lock in the specific case-versions in
a run when the run is activated.
"""
run = models.ForeignKey(Run, related_name="runcaseversions")
caseversion = models.ForeignKey(CaseVersion, related_name="runcaseversions")
order = models.IntegerField(default=0, db_index=True)
def __unicode__(self):
"""Return unicode representation."""
return "Case '%s' included in run '%s'" % (self.caseversion, self.run)
def bug_urls(self):
"""Returns set of bug URLs associated with this run/caseversion."""
return set(
StepResult.objects.filter(
result__runcaseversion=self).exclude(
bug_url="").values_list("bug_url", flat=True).distinct()
)
class Meta:
ordering = ["order"]
permissions = [
("execute", "Can run tests and report results."),
]
def save(self, *args, **kwargs):
"""
Save instance; new instances get intersection of run/case environments.
"""
adding = False
if self.id is None:
adding = True
inherit_envs = kwargs.pop("inherit_envs", True)
ret = super(RunCaseVersion, self).save(*args, **kwargs)
if adding and inherit_envs:
self.environments.add(
*_environment_intersection(self.run, self.caseversion))
return ret
def result_summary(self):
"""Return a dict summarizing status of results."""
return result_summary(self.results.all())
def completion(self):
"""Return fraction of environments that have a completed result."""
total = self.environments.count()
skipped = self.results.filter(
is_latest=True,
status=Result.STATUS.skipped).count()
completed = self.results.filter(
is_latest=True,
status__in=Result.COMPLETED_STATES).values(
"environment").distinct().count()
try:
return float(completed) / (total - skipped)
except ZeroDivisionError:
return 0
def testers(self):
"""Return list of testers with assigned / executed results."""
return User.objects.filter(
pk__in=self.results.values_list("tester", flat=True).distinct())
def start(self, environment=None, user=None):
"""Mark this result started."""
# if we are restarted a case that was skipped, we want to restart
# for ALL envs, not just this one.
envs = [environment]
try:
latest = self.results.get(
is_latest=True,
tester=user,
environment=environment,
)
if latest.status == Result.STATUS.skipped:
envs = self.environments.all()
except ObjectDoesNotExist:
pass
for env in envs:
Result.objects.create(
runcaseversion=self,
tester=user,
environment=env,
status=Result.STATUS.started,
user=user
)
def get_result_method(self, status):
"""Find the appropriate result generator for the given status."""
status_methods = {
"passed": self.result_pass,
"failed": self.result_fail,
"invalidated": self.result_invalid,
"blocked": self.result_block,
"skipped": self.result_skip,
}
return status_methods[status]
def result_pass(self, environment=None, user=None):
"""Create a passed result for this case."""
Result.objects.create(
runcaseversion=self,
tester=user,
environment=environment,
status=Result.STATUS.passed,
user=user
)
def result_skip(self, environment=None, user=None):
"""
Create a skipped result for this case.
If no environment is specified, then skip for all envs.
"""
envs = self.environments.all()
for env in envs:
Result.objects.create(
runcaseversion=self,
tester=user,
environment=env,
status=Result.STATUS.skipped,
user=user
)
def result_invalid(self, environment=None, comment="", user=None):
"""Create an invalidated result for this case."""
Result.objects.create(
runcaseversion=self,
tester=user,
environment=environment,
status=Result.STATUS.invalidated,
comment=comment,
user=user,
)
def result_block(self, environment=None, comment="", user=None):
"""Create an invalidated result for this case."""
Result.objects.create(
runcaseversion=self,
tester=user,
environment=environment,
status=Result.STATUS.blocked,
comment=comment,
user=user,
)
def result_fail(self, environment=None, comment="", stepnumber=None, bug="", user=None):
"""Create a failed result for this case."""
result = Result.objects.create(
runcaseversion=self,
tester=user,
environment=environment,
status=Result.STATUS.failed,
comment=comment,
user=user,
)
if stepnumber is not None:
try:
step = self.caseversion.steps.get(
number=stepnumber)
except CaseStep.DoesNotExist:
pass
else:
stepresult = StepResult(result=result, step=step)
stepresult.status = StepResult.STATUS.failed
stepresult.bug_url = bug
stepresult.save(user=user)
self.save(force_update=True, user=user)
class RunSuite(MTModel):
"""
An ordered association between a Run and a Suite.
The only direct impact of RunSuite instances is that they determine which
RunCaseVersions (and in what order) are created when the run is activated.
"""
run = models.ForeignKey(Run, related_name="runsuites")
suite = models.ForeignKey(Suite, related_name="runsuites")
order = models.IntegerField(default=0, db_index=True)
def __unicode__(self):
"""Return unicode representation."""
return "Suite '%s' included in run '%s'" % (self.suite, self.run)
class Meta:
ordering = ["order"]
class Result(MTModel):
"""A result of a User running a RunCaseVersion in an Environment."""
STATUS = Choices("assigned", "started", "passed", "failed", "invalidated",
"blocked", "skipped")
REVIEW = Choices("pending", "reviewed")
ALL_STATES = STATUS._full
PENDING_STATES = [STATUS.assigned, STATUS.started]
COMPLETED_STATES = [STATUS.passed, STATUS.failed, STATUS.invalidated,
STATUS.blocked]
FAILED_STATES = [STATUS.failed, STATUS.blocked]
tester = models.ForeignKey(User, related_name="results")
runcaseversion = models.ForeignKey(
RunCaseVersion, related_name="results")
environment = models.ForeignKey(Environment, related_name="results")
status = models.CharField(
max_length=50, db_index=True, choices=STATUS, default=STATUS.assigned)
comment = models.TextField(blank=True)
is_latest = models.BooleanField(default=True)
review = models.CharField(
max_length=50, db_index=True, choices=REVIEW, default=REVIEW.pending)
reviewed_by = models.ForeignKey(
User, related_name="reviews", blank=True, null=True)
def __unicode__(self):
"""Return unicode representation."""
return "%s, run by %s in %s: %s" % (
self.runcaseversion, self.tester, self.environment, self.status)
class Meta:
permissions = [("review_results", "Can review/edit test results.")]
def bug_urls(self):
"""Returns set of bug URLs associated with this result."""
return set(
self.stepresults.exclude(
bug_url="").values_list("bug_url", flat=True).distinct()
)
def save(self, *args, **kwargs):
if self.pk is None:
self.set_latest()
super(Result, self).save(*args, **kwargs)
def set_latest(self):
"""
Set this result to latest, and unset all others with this env/user/rcv
"""
Result.objects.filter(
tester=self.tester,
runcaseversion=self.runcaseversion,
environment=self.environment,
is_latest=True,
).exclude(pk=self.pk).update(is_latest=False)
self.is_latest = True
class StepResult(MTModel):
"""A result of a particular step in a test case."""
STATUS = Choices("passed", "failed", "invalidated", "skipped", "blocked")
result = models.ForeignKey(Result, related_name="stepresults")
step = models.ForeignKey(CaseStep, related_name="stepresults")
status = models.CharField(
max_length=50, db_index=True, choices=STATUS, default=STATUS.passed)
bug_url = models.URLField(blank=True)
def __unicode__(self):
"""Return unicode representation."""
return "%s (%s: %s)" % (self.result, self.step, self.status)
def result_summary(results):
"""
Given a queryset of results, return a dict summarizing their states.
"""
states = Result.COMPLETED_STATES
result_ids = results.filter(is_latest=True).values_list("id", flat=True)
if not result_ids:
return dict((s, 0) for s in states)
cols = ["COUNT(CASE WHEN status=%s THEN 1 ELSE NULL END)"] * len(states)
sql = "SELECT {0} FROM {1} WHERE id IN ({2})".format(
",".join(cols), Result._meta.db_table, ",".join(map(str, result_ids))
)
cursor = connection.cursor()
cursor.execute(sql, states)
return dict(zip(states, cursor.fetchone()))
|
[
"django.db.models.Max",
"django.db.models.TextField",
"django.db.models.URLField",
"django.db.models.ManyToManyField",
"django.core.exceptions.ValidationError",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.Q",
"django.db.models.BooleanField",
"django.db.connection.cursor",
"datetime.date.today",
"django.db.models.IntegerField",
"model_utils.Choices",
"django.db.models.DateField",
"django.db.models.Count"
] |
[((643, 697), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ProductVersion'], {'related_name': '"""runs"""'}), "(ProductVersion, related_name='runs')\n", (660, 697), False, 'from django.db import connection, transaction, models\n'), ((709, 741), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (725, 741), False, 'from django.db import connection, transaction, models\n'), ((760, 788), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (776, 788), False, 'from django.db import connection, transaction, models\n'), ((801, 846), 'django.db.models.DateField', 'models.DateField', ([], {'default': 'datetime.date.today'}), '(default=datetime.date.today)\n', (817, 846), False, 'from django.db import connection, transaction, models\n'), ((857, 896), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (873, 896), False, 'from django.db import connection, transaction, models\n'), ((909, 948), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (925, 948), False, 'from django.db import connection, transaction, models\n'), ((965, 999), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (984, 999), False, 'from django.db import connection, transaction, models\n'), ((1013, 1061), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""self"""'], {'null': '(True)', 'blank': '(True)'}), "('self', null=True, blank=True)\n", (1030, 1061), False, 'from django.db import connection, transaction, models\n'), ((1082, 1169), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['CaseVersion'], {'through': '"""RunCaseVersion"""', 'related_name': '"""runs"""'}), "(CaseVersion, through='RunCaseVersion', related_name=\n 'runs')\n", (1104, 1169), False, 'from django.db import connection, transaction, models\n'), ((1187, 1257), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Suite'], {'through': '"""RunSuite"""', 'related_name': '"""runs"""'}), "(Suite, through='RunSuite', related_name='runs')\n", (1209, 1257), False, 'from django.db import connection, transaction, models\n'), ((14609, 14663), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Run'], {'related_name': '"""runcaseversions"""'}), "(Run, related_name='runcaseversions')\n", (14626, 14663), False, 'from django.db import connection, transaction, models\n'), ((14682, 14744), 'django.db.models.ForeignKey', 'models.ForeignKey', (['CaseVersion'], {'related_name': '"""runcaseversions"""'}), "(CaseVersion, related_name='runcaseversions')\n", (14699, 14744), False, 'from django.db import connection, transaction, models\n'), ((14757, 14802), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'db_index': '(True)'}), '(default=0, db_index=True)\n', (14776, 14802), False, 'from django.db import connection, transaction, models\n'), ((20693, 20741), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Run'], {'related_name': '"""runsuites"""'}), "(Run, related_name='runsuites')\n", (20710, 20741), False, 'from django.db import connection, transaction, models\n'), ((20754, 20804), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Suite'], {'related_name': '"""runsuites"""'}), "(Suite, related_name='runsuites')\n", (20771, 20804), False, 'from django.db import connection, transaction, models\n'), ((20817, 20862), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'db_index': '(True)'}), '(default=0, db_index=True)\n', (20836, 20862), False, 'from django.db import connection, transaction, models\n'), ((21170, 21261), 'model_utils.Choices', 'Choices', (['"""assigned"""', '"""started"""', '"""passed"""', '"""failed"""', '"""invalidated"""', '"""blocked"""', '"""skipped"""'], {}), "('assigned', 'started', 'passed', 'failed', 'invalidated', 'blocked',\n 'skipped')\n", (21177, 21261), False, 'from model_utils import Choices\n'), ((21292, 21322), 'model_utils.Choices', 'Choices', (['"""pending"""', '"""reviewed"""'], {}), "('pending', 'reviewed')\n", (21299, 21322), False, 'from model_utils import Choices\n'), ((21589, 21636), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'related_name': '"""results"""'}), "(User, related_name='results')\n", (21606, 21636), False, 'from django.db import connection, transaction, models\n'), ((21658, 21715), 'django.db.models.ForeignKey', 'models.ForeignKey', (['RunCaseVersion'], {'related_name': '"""results"""'}), "(RunCaseVersion, related_name='results')\n", (21675, 21715), False, 'from django.db import connection, transaction, models\n'), ((21743, 21797), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Environment'], {'related_name': '"""results"""'}), "(Environment, related_name='results')\n", (21760, 21797), False, 'from django.db import connection, transaction, models\n'), ((21811, 21903), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'db_index': '(True)', 'choices': 'STATUS', 'default': 'STATUS.assigned'}), '(max_length=50, db_index=True, choices=STATUS, default=\n STATUS.assigned)\n', (21827, 21903), False, 'from django.db import connection, transaction, models\n'), ((21922, 21950), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (21938, 21950), False, 'from django.db import connection, transaction, models\n'), ((21967, 22000), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (21986, 22000), False, 'from django.db import connection, transaction, models\n'), ((22015, 22106), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'db_index': '(True)', 'choices': 'REVIEW', 'default': 'REVIEW.pending'}), '(max_length=50, db_index=True, choices=REVIEW, default=\n REVIEW.pending)\n', (22031, 22106), False, 'from django.db import connection, transaction, models\n'), ((22129, 22199), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'related_name': '"""reviews"""', 'blank': '(True)', 'null': '(True)'}), "(User, related_name='reviews', blank=True, null=True)\n", (22146, 22199), False, 'from django.db import connection, transaction, models\n'), ((23386, 23450), 'model_utils.Choices', 'Choices', (['"""passed"""', '"""failed"""', '"""invalidated"""', '"""skipped"""', '"""blocked"""'], {}), "('passed', 'failed', 'invalidated', 'skipped', 'blocked')\n", (23393, 23450), False, 'from model_utils import Choices\n'), ((23465, 23518), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Result'], {'related_name': '"""stepresults"""'}), "(Result, related_name='stepresults')\n", (23482, 23518), False, 'from django.db import connection, transaction, models\n'), ((23530, 23585), 'django.db.models.ForeignKey', 'models.ForeignKey', (['CaseStep'], {'related_name': '"""stepresults"""'}), "(CaseStep, related_name='stepresults')\n", (23547, 23585), False, 'from django.db import connection, transaction, models\n'), ((23599, 23689), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'db_index': '(True)', 'choices': 'STATUS', 'default': 'STATUS.passed'}), '(max_length=50, db_index=True, choices=STATUS, default=\n STATUS.passed)\n', (23615, 23689), False, 'from django.db import connection, transaction, models\n'), ((23708, 23735), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)'}), '(blank=True)\n', (23723, 23735), False, 'from django.db import connection, transaction, models\n'), ((24422, 24441), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (24439, 24441), False, 'from django.db import connection, transaction, models\n'), ((1512, 1568), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Start date must be prior to end date."""'], {}), "('Start date must be prior to end date.')\n", (1527, 1568), False, 'from django.core.exceptions import ValidationError, ObjectDoesNotExist\n'), ((4807, 4826), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (4824, 4826), False, 'from django.db import connection, transaction, models\n'), ((11221, 11224), 'django.db.models.Q', 'Q', ([], {}), '()\n', (11222, 11224), False, 'from django.db.models import Q, Count, Max\n'), ((2972, 2993), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (2991, 2993), False, 'import datetime\n'), ((11305, 11369), 'django.db.models.Q', 'Q', ([], {}), "(**{'runcaseversion_id': combo[0], 'environment_id': combo[1]})\n", (11306, 11369), False, 'from django.db.models import Q, Count, Max\n'), ((6455, 6475), 'django.db.models.Count', 'Count', (['"""caseversion"""'], {}), "('caseversion')\n", (6460, 6475), False, 'from django.db.models import Q, Count, Max\n'), ((6870, 6888), 'django.db.models.Max', 'Max', (['"""results__id"""'], {}), "('results__id')\n", (6873, 6888), False, 'from django.db.models import Q, Count, Max\n')]
|
# Generated by Django 3.1.3 on 2022-04-04 10:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('onlinecourse', '0007_auto_20220216_1913'),
]
operations = [
migrations.RemoveField(
model_name='instructor',
name='total_learners',
),
]
|
[
"django.db.migrations.RemoveField"
] |
[((232, 302), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""instructor"""', 'name': '"""total_learners"""'}), "(model_name='instructor', name='total_learners')\n", (254, 302), False, 'from django.db import migrations\n')]
|
"""Verifies the 'common.py' module"""
from argparse import Namespace
import pkg_resources
from nose.tools import eq_, ok_, raises
from pronto import Term
from termlink.common import _to_coding, _to_equivalence_from_scope, _to_relationship, execute
from termlink.models import Coding, Relationship
@raises(ValueError)
def test_uri_scheme():
"""An unsupported URI scheme throws a ValueError"""
execute(Namespace(uri='foo://bar'))
def test_obo_format():
"""Tests the conversion of an .obo file"""
path = pkg_resources.resource_filename(__name__, "resources/ontology.obo")
uri = f"file://{path}"
system = 'https://lifeomic.github.io/termlink/'
output = execute(Namespace(uri=uri, system=system))
ok_(len(output) == 4)
def test_owl_format():
"""Tests the conversion of an .owl file"""
path = pkg_resources.resource_filename(__name__, "resources/ontology.owl")
uri = f"file://{path}"
system = 'https://lifeomic.github.io/termlink/'
output = execute(Namespace(uri=uri, system=system))
ok_(len(output) == 2)
def test_to_coding():
"""Checks that a term is properly converted"""
system = "http://snomed.info/sct"
term = Term(id='SNOMEDCT_US:25064002', name='Headache')
res = _to_coding(term, system)
exp = Coding(
system=system,
code='25064002',
display='Headache'
)
eq_(exp, res)
def test_to_coding_without_colon():
"""Checks that a term without a ':' is properly converted"""
system = "http://snomed.info/sct"
term = Term(id='25064002', name='Headache')
res = _to_coding(term, system)
exp = Coding(
system=system,
code='25064002',
display='Headache'
)
eq_(exp, res)
def test_to_coding_with_system_and_code():
"""Checks that a term with a system and code identifier is properly converted"""
system = "http://snomed.info/sct"
term = Term(id='http://snomed.info/sct:25064002', name='Headache')
res = _to_coding(term, system)
exp = Coding(
system=system,
code='25064002',
display='Headache'
)
eq_(exp, res)
def test_to_coding_with_system():
"""Checks that a term with a system identifier is properly converted"""
system = "http://snomed.info/sct"
term = Term(id='http://snomed.info/sct:', name='SNOMED-CT')
res = _to_coding(term, system)
exp = Coding(
system=system,
display='SNOMED-CT'
)
eq_(exp, res)
def test_to_json():
"""Checks that a source, equivalence and target and properly converted"""
system = "http://snomed.info/sct"
source = Term(id='SNOMEDCT_US:735938006', name='Acute headache')
equivalence = 'subsumes'
target = Term(id='SNOMEDCT_US:25064002', name='Headache')
res = _to_relationship(source, equivalence, target, system)
exp = Relationship(
equivalence='subsumes',
source=Coding(
system=system,
code='735938006',
display='Acute headache'
),
target=Coding(
system=system,
code='25064002',
display='Headache'
)
)
eq_(exp, res)
@raises(RuntimeError)
def test_to_equivalence_from_scope():
"""A RuntimeError is thrown for an unknown scope"""
_to_equivalence_from_scope("foobar")
|
[
"argparse.Namespace",
"termlink.common._to_relationship",
"termlink.models.Coding",
"termlink.common._to_equivalence_from_scope",
"pkg_resources.resource_filename",
"nose.tools.eq_",
"pronto.Term",
"nose.tools.raises",
"termlink.common._to_coding"
] |
[((305, 323), 'nose.tools.raises', 'raises', (['ValueError'], {}), '(ValueError)\n', (311, 323), False, 'from nose.tools import eq_, ok_, raises\n'), ((3176, 3196), 'nose.tools.raises', 'raises', (['RuntimeError'], {}), '(RuntimeError)\n', (3182, 3196), False, 'from nose.tools import eq_, ok_, raises\n'), ((526, 593), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['__name__', '"""resources/ontology.obo"""'], {}), "(__name__, 'resources/ontology.obo')\n", (557, 593), False, 'import pkg_resources\n'), ((838, 905), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['__name__', '"""resources/ontology.owl"""'], {}), "(__name__, 'resources/ontology.owl')\n", (869, 905), False, 'import pkg_resources\n'), ((1192, 1240), 'pronto.Term', 'Term', ([], {'id': '"""SNOMEDCT_US:25064002"""', 'name': '"""Headache"""'}), "(id='SNOMEDCT_US:25064002', name='Headache')\n", (1196, 1240), False, 'from pronto import Term\n'), ((1252, 1276), 'termlink.common._to_coding', '_to_coding', (['term', 'system'], {}), '(term, system)\n', (1262, 1276), False, 'from termlink.common import _to_coding, _to_equivalence_from_scope, _to_relationship, execute\n'), ((1288, 1346), 'termlink.models.Coding', 'Coding', ([], {'system': 'system', 'code': '"""25064002"""', 'display': '"""Headache"""'}), "(system=system, code='25064002', display='Headache')\n", (1294, 1346), False, 'from termlink.models import Coding, Relationship\n'), ((1382, 1395), 'nose.tools.eq_', 'eq_', (['exp', 'res'], {}), '(exp, res)\n', (1385, 1395), False, 'from nose.tools import eq_, ok_, raises\n'), ((1549, 1585), 'pronto.Term', 'Term', ([], {'id': '"""25064002"""', 'name': '"""Headache"""'}), "(id='25064002', name='Headache')\n", (1553, 1585), False, 'from pronto import Term\n'), ((1597, 1621), 'termlink.common._to_coding', '_to_coding', (['term', 'system'], {}), '(term, system)\n', (1607, 1621), False, 'from termlink.common import _to_coding, _to_equivalence_from_scope, _to_relationship, execute\n'), ((1633, 1691), 'termlink.models.Coding', 'Coding', ([], {'system': 'system', 'code': '"""25064002"""', 'display': '"""Headache"""'}), "(system=system, code='25064002', display='Headache')\n", (1639, 1691), False, 'from termlink.models import Coding, Relationship\n'), ((1727, 1740), 'nose.tools.eq_', 'eq_', (['exp', 'res'], {}), '(exp, res)\n', (1730, 1740), False, 'from nose.tools import eq_, ok_, raises\n'), ((1920, 1979), 'pronto.Term', 'Term', ([], {'id': '"""http://snomed.info/sct:25064002"""', 'name': '"""Headache"""'}), "(id='http://snomed.info/sct:25064002', name='Headache')\n", (1924, 1979), False, 'from pronto import Term\n'), ((1991, 2015), 'termlink.common._to_coding', '_to_coding', (['term', 'system'], {}), '(term, system)\n', (2001, 2015), False, 'from termlink.common import _to_coding, _to_equivalence_from_scope, _to_relationship, execute\n'), ((2027, 2085), 'termlink.models.Coding', 'Coding', ([], {'system': 'system', 'code': '"""25064002"""', 'display': '"""Headache"""'}), "(system=system, code='25064002', display='Headache')\n", (2033, 2085), False, 'from termlink.models import Coding, Relationship\n'), ((2121, 2134), 'nose.tools.eq_', 'eq_', (['exp', 'res'], {}), '(exp, res)\n', (2124, 2134), False, 'from nose.tools import eq_, ok_, raises\n'), ((2296, 2348), 'pronto.Term', 'Term', ([], {'id': '"""http://snomed.info/sct:"""', 'name': '"""SNOMED-CT"""'}), "(id='http://snomed.info/sct:', name='SNOMED-CT')\n", (2300, 2348), False, 'from pronto import Term\n'), ((2360, 2384), 'termlink.common._to_coding', '_to_coding', (['term', 'system'], {}), '(term, system)\n', (2370, 2384), False, 'from termlink.common import _to_coding, _to_equivalence_from_scope, _to_relationship, execute\n'), ((2396, 2438), 'termlink.models.Coding', 'Coding', ([], {'system': 'system', 'display': '"""SNOMED-CT"""'}), "(system=system, display='SNOMED-CT')\n", (2402, 2438), False, 'from termlink.models import Coding, Relationship\n'), ((2466, 2479), 'nose.tools.eq_', 'eq_', (['exp', 'res'], {}), '(exp, res)\n', (2469, 2479), False, 'from nose.tools import eq_, ok_, raises\n'), ((2632, 2687), 'pronto.Term', 'Term', ([], {'id': '"""SNOMEDCT_US:735938006"""', 'name': '"""Acute headache"""'}), "(id='SNOMEDCT_US:735938006', name='Acute headache')\n", (2636, 2687), False, 'from pronto import Term\n'), ((2730, 2778), 'pronto.Term', 'Term', ([], {'id': '"""SNOMEDCT_US:25064002"""', 'name': '"""Headache"""'}), "(id='SNOMEDCT_US:25064002', name='Headache')\n", (2734, 2778), False, 'from pronto import Term\n'), ((2790, 2843), 'termlink.common._to_relationship', '_to_relationship', (['source', 'equivalence', 'target', 'system'], {}), '(source, equivalence, target, system)\n', (2806, 2843), False, 'from termlink.common import _to_coding, _to_equivalence_from_scope, _to_relationship, execute\n'), ((3160, 3173), 'nose.tools.eq_', 'eq_', (['exp', 'res'], {}), '(exp, res)\n', (3163, 3173), False, 'from nose.tools import eq_, ok_, raises\n'), ((3295, 3331), 'termlink.common._to_equivalence_from_scope', '_to_equivalence_from_scope', (['"""foobar"""'], {}), "('foobar')\n", (3321, 3331), False, 'from termlink.common import _to_coding, _to_equivalence_from_scope, _to_relationship, execute\n'), ((415, 441), 'argparse.Namespace', 'Namespace', ([], {'uri': '"""foo://bar"""'}), "(uri='foo://bar')\n", (424, 441), False, 'from argparse import Namespace\n'), ((694, 727), 'argparse.Namespace', 'Namespace', ([], {'uri': 'uri', 'system': 'system'}), '(uri=uri, system=system)\n', (703, 727), False, 'from argparse import Namespace\n'), ((1006, 1039), 'argparse.Namespace', 'Namespace', ([], {'uri': 'uri', 'system': 'system'}), '(uri=uri, system=system)\n', (1015, 1039), False, 'from argparse import Namespace\n'), ((2916, 2981), 'termlink.models.Coding', 'Coding', ([], {'system': 'system', 'code': '"""735938006"""', 'display': '"""Acute headache"""'}), "(system=system, code='735938006', display='Acute headache')\n", (2922, 2981), False, 'from termlink.models import Coding, Relationship\n'), ((3044, 3102), 'termlink.models.Coding', 'Coding', ([], {'system': 'system', 'code': '"""25064002"""', 'display': '"""Headache"""'}), "(system=system, code='25064002', display='Headache')\n", (3050, 3102), False, 'from termlink.models import Coding, Relationship\n')]
|
from conans import ConanFile, CMake, tools
class SimperiumcConan(ConanFile):
name = "simperium-c"
version = "0.1.0"
license = "MIT"
url = "https://github.com/franc0is/simperium-c"
description = "A C library to integrate Simperium's service"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=False"
generators = "cmake"
requires = (("jansson/2.11@franc0is/stable"),
("jsondiff-c/0.1.0@franc0is/stable"),
("argtable3/3.0.3@franc0is/stable"),
("libcurl/7.56.1@bincrafters/stable"),
("libwebsockets/2.4.0@bincrafters/stable"),
("doctest/1.2.6@bincrafters/stable"))
exports_sources = "*"
def configure(self):
self.settings.compiler = 'gcc'
self.settings.compiler.version = 7
self.settings.compiler.libcxx = 'libstdc++11'
def build(self):
cmake = CMake(self)
cmake.configure(source_folder=".")
cmake.build()
def package(self):
self.copy("*.h", dst="include", src=".")
self.copy("*simperium.lib", dst="lib", keep_path=False)
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.so", dst="lib", keep_path=False)
self.copy("*.dylib", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["simperium"]
|
[
"conans.CMake"
] |
[((967, 978), 'conans.CMake', 'CMake', (['self'], {}), '(self)\n', (972, 978), False, 'from conans import ConanFile, CMake, tools\n')]
|
from datetime import datetime, timedelta, tzinfo
import socket
from django.conf import settings
from django.db import models
from django.utils.translation import gettext_lazy as _
try:
import pytz
except ImportError:
pytz = None
ZERO = timedelta(0)
class UTC(tzinfo):
"""
UTC implementation taken from Python's docs.
Used only when pytz isn't available.
"""
def __repr__(self):
return "<UTC>"
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
def now():
if getattr(settings, 'USE_TZ', False):
if pytz:
utc = pytz.utc
else:
utc = UTC()
return datetime.utcnow().replace(tzinfo=utc)
else:
return datetime.now()
class LogRecord(models.Model):
"""
A simple bucket for the usual logging info (logger, level, message) plus a
unique request ID for tracking all the log records made in a given request,
the server logging the request, the client's IP address, the name of the
application, and for requests with errors, a stack trace and a copy of
Django's server error page.
"""
timestamp = models.DateTimeField(db_index=True, default=now)
application = models.CharField(
max_length=256,
default=getattr(
settings,
'PEAVY_APP_NAME',
settings.ROOT_URLCONF.split('.')[0]
),
help_text=_("The application logging this record."),
db_index=True
)
origin_server = models.CharField(
max_length=256,
help_text=_("The server logging this record."),
default=socket.gethostname,
db_index=True
)
client_ip = models.CharField(
max_length=128,
help_text=_("The IP address of the client making the request."),
blank=True,
db_index=True
)
user_pk = models.IntegerField(
blank=True,
null=True,
db_index=True,
help_text=_("The primary key of the user making the request in which this record was logged."),
)
username = models.CharField(
max_length=256,
help_text=_("The username of the user making the request in which this record was logged."),
blank=True,
db_index=True
)
uuid = models.CharField(
max_length=256,
help_text=_("The UUID of the Django request in which this record was logged."),
blank=True,
db_index=True
)
logger = models.CharField(
max_length=1024,
help_text=_("The name of the logger of the record."),
db_index=True
)
level = models.CharField(
max_length=32,
help_text=_("The level of the log record (DEBUG, INFO...)"),
db_index=True
)
message = models.TextField()
stack_trace = models.TextField(blank=True)
debug_page = models.TextField(blank=True)
class Meta:
ordering = ('-timestamp',)
permissions = (
("view_logs", "Can view log records"),
)
def __unicode__(self):
return unicode(self.pk)
|
[
"django.db.models.TextField",
"django.utils.translation.gettext_lazy",
"django.conf.settings.ROOT_URLCONF.split",
"datetime.datetime.utcnow",
"datetime.timedelta",
"django.db.models.DateTimeField",
"datetime.datetime.now"
] |
[((247, 259), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (256, 259), False, 'from datetime import datetime, timedelta, tzinfo\n'), ((1217, 1265), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'db_index': '(True)', 'default': 'now'}), '(db_index=True, default=now)\n', (1237, 1265), False, 'from django.db import models\n'), ((2833, 2851), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2849, 2851), False, 'from django.db import models\n'), ((2870, 2898), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (2886, 2898), False, 'from django.db import models\n'), ((2916, 2944), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (2932, 2944), False, 'from django.db import models\n'), ((794, 808), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (806, 808), False, 'from datetime import datetime, timedelta, tzinfo\n'), ((1481, 1522), 'django.utils.translation.gettext_lazy', '_', (['"""The application logging this record."""'], {}), "('The application logging this record.')\n", (1482, 1522), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1633, 1669), 'django.utils.translation.gettext_lazy', '_', (['"""The server logging this record."""'], {}), "('The server logging this record.')\n", (1634, 1669), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1812, 1865), 'django.utils.translation.gettext_lazy', '_', (['"""The IP address of the client making the request."""'], {}), "('The IP address of the client making the request.')\n", (1813, 1865), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2031, 2120), 'django.utils.translation.gettext_lazy', '_', (['"""The primary key of the user making the request in which this record was logged."""'], {}), "('The primary key of the user making the request in which this record was logged.'\n )\n", (2032, 2120), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2199, 2285), 'django.utils.translation.gettext_lazy', '_', (['"""The username of the user making the request in which this record was logged."""'], {}), "('The username of the user making the request in which this record was logged.'\n )\n", (2200, 2285), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2402, 2470), 'django.utils.translation.gettext_lazy', '_', (['"""The UUID of the Django request in which this record was logged."""'], {}), "('The UUID of the Django request in which this record was logged.')\n", (2403, 2470), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2595, 2637), 'django.utils.translation.gettext_lazy', '_', (['"""The name of the logger of the record."""'], {}), "('The name of the logger of the record.')\n", (2596, 2637), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2739, 2788), 'django.utils.translation.gettext_lazy', '_', (['"""The level of the log record (DEBUG, INFO...)"""'], {}), "('The level of the log record (DEBUG, INFO...)')\n", (2740, 2788), True, 'from django.utils.translation import gettext_lazy as _\n'), ((731, 748), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (746, 748), False, 'from datetime import datetime, timedelta, tzinfo\n'), ((1416, 1448), 'django.conf.settings.ROOT_URLCONF.split', 'settings.ROOT_URLCONF.split', (['"""."""'], {}), "('.')\n", (1443, 1448), False, 'from django.conf import settings\n')]
|
# Copyright 2016 ASLP@NPU. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: <EMAIL> (zhangyuchao)
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import os
import math
import sonnet as snt
import tensorflow as tf
sys.path.append(os.path.dirname(sys.path[0]))
from io_funcs.tfrecords_io import get_padded_batch, get_seq2seq_batch
class SequenceDataset(snt.AbstractModule):
"""Sequence dataset provider."""
TRAIN = "train"
VALID = "valid"
TEST = "test"
def __init__(self, subset, config_dir, data_dir, batch_size,
input_size, output_size, num_enqueuing_threads=8,
num_epochs=None, infer=False, name="sequence_dataset"):
if subset not in [self.TRAIN, self.VALID, self.TEST]:
raise ValueError("subset should be %s, %s, or %s. Received %s instead."
% (self.TRAIN, self.VALID, self.TEST, subset))
super(SequenceDataset, self).__init__(name=name)
self._config_dir = config_dir
self._data_dir = data_dir
self._batch_size = batch_size
self._input_size = input_size
self._output_size = output_size
self._num_enqueuing_threads = num_enqueuing_threads
self._num_epochs = num_epochs
self._infer = infer
self._tfrecords_lst = self.read_config_file(subset)
self._num_batches = int(math.ceil(len(self._tfrecords_lst) / float(self._batch_size)))
def _build(self):
if not self._infer:
input_sequence, target_sequence, length = get_padded_batch(
file_list=self._tfrecords_lst,
batch_size=self._batch_size,
input_size=self._input_size,
output_size=self._output_size,
num_enqueuing_threads=self._num_enqueuing_threads,
num_epochs=self._num_epochs,
infer=self._infer)
return input_sequence, target_sequence, length
else:
input_sequence, length = get_padded_batch(
file_list=self._tfrecords_lst,
batch_size=self._batch_size,
input_size=self._input_size,
output_size=self._output_size,
num_enqueuing_threads=self._num_enqueuing_threads,
num_epochs=self._num_epochs,
infer=self._infer)
return input_sequence, length
def read_config_file(self, name):
file_name = os.path.join(self._config_dir, name + ".lst")
if not tf.gfile.Exists(file_name):
tf.logging.fatal('File does not exist %s', file_name)
sys.exit(-1)
config_file = open(file_name)
tfrecords_lst = []
for line in config_file:
utt_id = line.strip().split()[0]
tfrecords_name = os.path.join(
self._data_dir, name, utt_id + ".tfrecords")
if not tf.gfile.Exists(tfrecords_name):
tf.logging.fatal('TFrecords does not exist %s', tfrecords_name)
sys.exit(-1)
tfrecords_lst.append(tfrecords_name)
return tfrecords_lst
@property
def num_batches(self):
return self._num_batches
@property
def tfrecords_lst(self):
return self._tfrecords_lst
|
[
"tensorflow.gfile.Exists",
"tensorflow.logging.fatal",
"os.path.dirname",
"os.path.join",
"sys.exit",
"io_funcs.tfrecords_io.get_padded_batch"
] |
[((835, 863), 'os.path.dirname', 'os.path.dirname', (['sys.path[0]'], {}), '(sys.path[0])\n', (850, 863), False, 'import os\n'), ((3046, 3091), 'os.path.join', 'os.path.join', (['self._config_dir', "(name + '.lst')"], {}), "(self._config_dir, name + '.lst')\n", (3058, 3091), False, 'import os\n'), ((2137, 2385), 'io_funcs.tfrecords_io.get_padded_batch', 'get_padded_batch', ([], {'file_list': 'self._tfrecords_lst', 'batch_size': 'self._batch_size', 'input_size': 'self._input_size', 'output_size': 'self._output_size', 'num_enqueuing_threads': 'self._num_enqueuing_threads', 'num_epochs': 'self._num_epochs', 'infer': 'self._infer'}), '(file_list=self._tfrecords_lst, batch_size=self._batch_size,\n input_size=self._input_size, output_size=self._output_size,\n num_enqueuing_threads=self._num_enqueuing_threads, num_epochs=self.\n _num_epochs, infer=self._infer)\n', (2153, 2385), False, 'from io_funcs.tfrecords_io import get_padded_batch, get_seq2seq_batch\n'), ((2596, 2844), 'io_funcs.tfrecords_io.get_padded_batch', 'get_padded_batch', ([], {'file_list': 'self._tfrecords_lst', 'batch_size': 'self._batch_size', 'input_size': 'self._input_size', 'output_size': 'self._output_size', 'num_enqueuing_threads': 'self._num_enqueuing_threads', 'num_epochs': 'self._num_epochs', 'infer': 'self._infer'}), '(file_list=self._tfrecords_lst, batch_size=self._batch_size,\n input_size=self._input_size, output_size=self._output_size,\n num_enqueuing_threads=self._num_enqueuing_threads, num_epochs=self.\n _num_epochs, infer=self._infer)\n', (2612, 2844), False, 'from io_funcs.tfrecords_io import get_padded_batch, get_seq2seq_batch\n'), ((3107, 3133), 'tensorflow.gfile.Exists', 'tf.gfile.Exists', (['file_name'], {}), '(file_name)\n', (3122, 3133), True, 'import tensorflow as tf\n'), ((3147, 3200), 'tensorflow.logging.fatal', 'tf.logging.fatal', (['"""File does not exist %s"""', 'file_name'], {}), "('File does not exist %s', file_name)\n", (3163, 3200), True, 'import tensorflow as tf\n'), ((3213, 3225), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (3221, 3225), False, 'import sys\n'), ((3398, 3455), 'os.path.join', 'os.path.join', (['self._data_dir', 'name', "(utt_id + '.tfrecords')"], {}), "(self._data_dir, name, utt_id + '.tfrecords')\n", (3410, 3455), False, 'import os\n'), ((3492, 3523), 'tensorflow.gfile.Exists', 'tf.gfile.Exists', (['tfrecords_name'], {}), '(tfrecords_name)\n', (3507, 3523), True, 'import tensorflow as tf\n'), ((3541, 3604), 'tensorflow.logging.fatal', 'tf.logging.fatal', (['"""TFrecords does not exist %s"""', 'tfrecords_name'], {}), "('TFrecords does not exist %s', tfrecords_name)\n", (3557, 3604), True, 'import tensorflow as tf\n'), ((3621, 3633), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (3629, 3633), False, 'import sys\n')]
|
from pyvista import examples
grid = examples.load_explicit_structured() # doctest:+SKIP
grid.compute_connections() # doctest:+SKIP
grid.plot(show_edges=True) # doctest:+SKIP
|
[
"pyvista.examples.load_explicit_structured"
] |
[((36, 71), 'pyvista.examples.load_explicit_structured', 'examples.load_explicit_structured', ([], {}), '()\n', (69, 71), False, 'from pyvista import examples\n')]
|
#
# Copyright 2015 Cisco Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for ceilometer/publisher/kafka_broker.py
"""
import datetime
import uuid
import mock
from oslo_utils import netutils
from ceilometer.event.storage import models as event
from ceilometer.publisher.kafka_broker import KafkaBrokerPublisher
from ceilometer import sample
from ceilometer.tests import base as tests_base
class TestKafkaPublisher(tests_base.BaseTestCase):
test_event_data = [
event.Event(message_id=uuid.uuid4(),
event_type='event_%d' % i,
generated=datetime.datetime.utcnow(),
traits=[], raw={})
for i in range(0, 5)
]
test_data = [
sample.Sample(
name='test',
type=sample.TYPE_CUMULATIVE,
unit='',
volume=1,
user_id='test',
project_id='test',
resource_id='test_run_tasks',
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'name': 'TestPublish'},
),
sample.Sample(
name='test',
type=sample.TYPE_CUMULATIVE,
unit='',
volume=1,
user_id='test',
project_id='test',
resource_id='test_run_tasks',
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'name': 'TestPublish'},
),
sample.Sample(
name='test2',
type=sample.TYPE_CUMULATIVE,
unit='',
volume=1,
user_id='test',
project_id='test',
resource_id='test_run_tasks',
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'name': 'TestPublish'},
),
sample.Sample(
name='test2',
type=sample.TYPE_CUMULATIVE,
unit='',
volume=1,
user_id='test',
project_id='test',
resource_id='test_run_tasks',
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'name': 'TestPublish'},
),
sample.Sample(
name='test3',
type=sample.TYPE_CUMULATIVE,
unit='',
volume=1,
user_id='test',
project_id='test',
resource_id='test_run_tasks',
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'name': 'TestPublish'},
),
]
def setUp(self):
super(TestKafkaPublisher, self).setUp()
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer'))
with mock.patch.object(publisher, '_send') as fake_send:
fake_send.side_effect = mock.Mock()
publisher.publish_samples(mock.MagicMock(), self.test_data)
self.assertEqual(1, len(fake_send.mock_calls))
self.assertEqual(0, len(publisher.local_queue))
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_without_options(self, mock_method):
publisher = KafkaBrokerPublisher(
netutils.urlsplit('kafka://127.0.0.1:9092'))
with mock.patch.object(publisher, '_send') as fake_send:
fake_send.side_effect = mock.Mock()
publisher.publish_samples(mock.MagicMock(), self.test_data)
self.assertEqual(1, len(fake_send.mock_calls))
self.assertEqual(0, len(publisher.local_queue))
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_to_host_without_policy(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer'))
self.assertEqual('default', publisher.policy)
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer&policy=test'))
self.assertEqual('default', publisher.policy)
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_to_host_with_default_policy(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer&policy=default'))
with mock.patch.object(publisher, '_send') as fake_send:
fake_send.side_effect = TypeError
self.assertRaises(TypeError, publisher.publish_samples,
mock.MagicMock(), self.test_data)
self.assertEqual(100, len(fake_send.mock_calls))
self.assertEqual(0, len(publisher.local_queue))
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_to_host_with_drop_policy(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer&policy=drop'))
with mock.patch.object(publisher, '_send') as fake_send:
fake_send.side_effect = Exception("test")
publisher.publish_samples(mock.MagicMock(), self.test_data)
self.assertEqual(1, len(fake_send.mock_calls))
self.assertEqual(0, len(publisher.local_queue))
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_to_host_with_queue_policy(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer&policy=queue'))
with mock.patch.object(publisher, '_send') as fake_send:
fake_send.side_effect = Exception("test")
publisher.publish_samples(mock.MagicMock(), self.test_data)
self.assertEqual(1, len(fake_send.mock_calls))
self.assertEqual(1, len(publisher.local_queue))
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_to_down_host_with_default_queue_size(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer&policy=queue'))
for i in range(0, 2000):
for s in self.test_data:
s.name = 'test-%d' % i
publisher.publish_samples(mock.MagicMock(),
self.test_data)
self.assertEqual(1024, len(publisher.local_queue))
self.assertEqual(
'test-976',
publisher.local_queue[0][0]['counter_name']
)
self.assertEqual(
'test-1999',
publisher.local_queue[1023][0]['counter_name']
)
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_to_host_from_down_to_up_with_queue(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer&policy=queue'))
for i in range(0, 16):
for s in self.test_data:
s.name = 'test-%d' % i
publisher.publish_samples(mock.MagicMock(), self.test_data)
self.assertEqual(16, len(publisher.local_queue))
with mock.patch.object(publisher, '_send') as fake_send:
fake_send.return_value = mock.Mock()
for s in self.test_data:
s.name = 'test-%d' % 16
publisher.publish_samples(mock.MagicMock(), self.test_data)
self.assertEqual(0, len(publisher.local_queue))
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_event_with_default_policy(self, mock_method):
publisher = KafkaBrokerPublisher(
netutils.urlsplit('kafka://127.0.0.1:9092?topic=ceilometer'))
with mock.patch.object(KafkaBrokerPublisher, '_send') as fake_send:
publisher.publish_events(mock.MagicMock(), self.test_event_data)
self.assertEqual(1, len(fake_send.mock_calls))
with mock.patch.object(KafkaBrokerPublisher, '_send') as fake_send:
fake_send.side_effect = TypeError
self.assertRaises(TypeError, publisher.publish_events,
mock.MagicMock(), self.test_event_data)
self.assertEqual(100, len(fake_send.mock_calls))
self.assertEqual(0, len(publisher.local_queue))
|
[
"mock.patch.object",
"uuid.uuid4",
"datetime.datetime.utcnow",
"mock.Mock",
"mock.MagicMock",
"oslo_utils.netutils.urlsplit"
] |
[((3113, 3167), 'mock.patch.object', 'mock.patch.object', (['KafkaBrokerPublisher', '"""_get_client"""'], {}), "(KafkaBrokerPublisher, '_get_client')\n", (3130, 3167), False, 'import mock\n'), ((3636, 3690), 'mock.patch.object', 'mock.patch.object', (['KafkaBrokerPublisher', '"""_get_client"""'], {}), "(KafkaBrokerPublisher, '_get_client')\n", (3653, 3690), False, 'import mock\n'), ((4158, 4212), 'mock.patch.object', 'mock.patch.object', (['KafkaBrokerPublisher', '"""_get_client"""'], {}), "(KafkaBrokerPublisher, '_get_client')\n", (4175, 4212), False, 'import mock\n'), ((4636, 4690), 'mock.patch.object', 'mock.patch.object', (['KafkaBrokerPublisher', '"""_get_client"""'], {}), "(KafkaBrokerPublisher, '_get_client')\n", (4653, 4690), False, 'import mock\n'), ((5262, 5316), 'mock.patch.object', 'mock.patch.object', (['KafkaBrokerPublisher', '"""_get_client"""'], {}), "(KafkaBrokerPublisher, '_get_client')\n", (5279, 5316), False, 'import mock\n'), ((5828, 5882), 'mock.patch.object', 'mock.patch.object', (['KafkaBrokerPublisher', '"""_get_client"""'], {}), "(KafkaBrokerPublisher, '_get_client')\n", (5845, 5882), False, 'import mock\n'), ((6396, 6450), 'mock.patch.object', 'mock.patch.object', (['KafkaBrokerPublisher', '"""_get_client"""'], {}), "(KafkaBrokerPublisher, '_get_client')\n", (6413, 6450), False, 'import mock\n'), ((7180, 7234), 'mock.patch.object', 'mock.patch.object', (['KafkaBrokerPublisher', '"""_get_client"""'], {}), "(KafkaBrokerPublisher, '_get_client')\n", (7197, 7234), False, 'import mock\n'), ((8008, 8062), 'mock.patch.object', 'mock.patch.object', (['KafkaBrokerPublisher', '"""_get_client"""'], {}), "(KafkaBrokerPublisher, '_get_client')\n", (8025, 8062), False, 'import mock\n'), ((3250, 3310), 'oslo_utils.netutils.urlsplit', 'netutils.urlsplit', (['"""kafka://127.0.0.1:9092?topic=ceilometer"""'], {}), "('kafka://127.0.0.1:9092?topic=ceilometer')\n", (3267, 3310), False, 'from oslo_utils import netutils\n'), ((3339, 3376), 'mock.patch.object', 'mock.patch.object', (['publisher', '"""_send"""'], {}), "(publisher, '_send')\n", (3356, 3376), False, 'import mock\n'), ((3427, 3438), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (3436, 3438), False, 'import mock\n'), ((3802, 3845), 'oslo_utils.netutils.urlsplit', 'netutils.urlsplit', (['"""kafka://127.0.0.1:9092"""'], {}), "('kafka://127.0.0.1:9092')\n", (3819, 3845), False, 'from oslo_utils import netutils\n'), ((3861, 3898), 'mock.patch.object', 'mock.patch.object', (['publisher', '"""_send"""'], {}), "(publisher, '_send')\n", (3878, 3898), False, 'import mock\n'), ((3949, 3960), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (3958, 3960), False, 'import mock\n'), ((4318, 4378), 'oslo_utils.netutils.urlsplit', 'netutils.urlsplit', (['"""kafka://127.0.0.1:9092?topic=ceilometer"""'], {}), "('kafka://127.0.0.1:9092?topic=ceilometer')\n", (4335, 4378), False, 'from oslo_utils import netutils\n'), ((4489, 4561), 'oslo_utils.netutils.urlsplit', 'netutils.urlsplit', (['"""kafka://127.0.0.1:9092?topic=ceilometer&policy=test"""'], {}), "('kafka://127.0.0.1:9092?topic=ceilometer&policy=test')\n", (4506, 4561), False, 'from oslo_utils import netutils\n'), ((4801, 4876), 'oslo_utils.netutils.urlsplit', 'netutils.urlsplit', (['"""kafka://127.0.0.1:9092?topic=ceilometer&policy=default"""'], {}), "('kafka://127.0.0.1:9092?topic=ceilometer&policy=default')\n", (4818, 4876), False, 'from oslo_utils import netutils\n'), ((4905, 4942), 'mock.patch.object', 'mock.patch.object', (['publisher', '"""_send"""'], {}), "(publisher, '_send')\n", (4922, 4942), False, 'import mock\n'), ((5424, 5496), 'oslo_utils.netutils.urlsplit', 'netutils.urlsplit', (['"""kafka://127.0.0.1:9092?topic=ceilometer&policy=drop"""'], {}), "('kafka://127.0.0.1:9092?topic=ceilometer&policy=drop')\n", (5441, 5496), False, 'from oslo_utils import netutils\n'), ((5525, 5562), 'mock.patch.object', 'mock.patch.object', (['publisher', '"""_send"""'], {}), "(publisher, '_send')\n", (5542, 5562), False, 'import mock\n'), ((5991, 6064), 'oslo_utils.netutils.urlsplit', 'netutils.urlsplit', (['"""kafka://127.0.0.1:9092?topic=ceilometer&policy=queue"""'], {}), "('kafka://127.0.0.1:9092?topic=ceilometer&policy=queue')\n", (6008, 6064), False, 'from oslo_utils import netutils\n'), ((6093, 6130), 'mock.patch.object', 'mock.patch.object', (['publisher', '"""_send"""'], {}), "(publisher, '_send')\n", (6110, 6130), False, 'import mock\n'), ((6570, 6643), 'oslo_utils.netutils.urlsplit', 'netutils.urlsplit', (['"""kafka://127.0.0.1:9092?topic=ceilometer&policy=queue"""'], {}), "('kafka://127.0.0.1:9092?topic=ceilometer&policy=queue')\n", (6587, 6643), False, 'from oslo_utils import netutils\n'), ((7352, 7425), 'oslo_utils.netutils.urlsplit', 'netutils.urlsplit', (['"""kafka://127.0.0.1:9092?topic=ceilometer&policy=queue"""'], {}), "('kafka://127.0.0.1:9092?topic=ceilometer&policy=queue')\n", (7369, 7425), False, 'from oslo_utils import netutils\n'), ((7692, 7729), 'mock.patch.object', 'mock.patch.object', (['publisher', '"""_send"""'], {}), "(publisher, '_send')\n", (7709, 7729), False, 'import mock\n'), ((7781, 7792), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (7790, 7792), False, 'import mock\n'), ((8184, 8244), 'oslo_utils.netutils.urlsplit', 'netutils.urlsplit', (['"""kafka://127.0.0.1:9092?topic=ceilometer"""'], {}), "('kafka://127.0.0.1:9092?topic=ceilometer')\n", (8201, 8244), False, 'from oslo_utils import netutils\n'), ((8260, 8308), 'mock.patch.object', 'mock.patch.object', (['KafkaBrokerPublisher', '"""_send"""'], {}), "(KafkaBrokerPublisher, '_send')\n", (8277, 8308), False, 'import mock\n'), ((8473, 8521), 'mock.patch.object', 'mock.patch.object', (['KafkaBrokerPublisher', '"""_send"""'], {}), "(KafkaBrokerPublisher, '_send')\n", (8490, 8521), False, 'import mock\n'), ((1011, 1023), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1021, 1023), False, 'import uuid\n'), ((1102, 1128), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1126, 1128), False, 'import datetime\n'), ((3477, 3493), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3491, 3493), False, 'import mock\n'), ((3999, 4015), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (4013, 4015), False, 'import mock\n'), ((5101, 5117), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (5115, 5117), False, 'import mock\n'), ((5669, 5685), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (5683, 5685), False, 'import mock\n'), ((6237, 6253), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (6251, 6253), False, 'import mock\n'), ((6806, 6822), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (6820, 6822), False, 'import mock\n'), ((7586, 7602), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (7600, 7602), False, 'import mock\n'), ((7908, 7924), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (7922, 7924), False, 'import mock\n'), ((8360, 8376), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (8374, 8376), False, 'import mock\n'), ((8679, 8695), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (8693, 8695), False, 'import mock\n'), ((1478, 1504), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1502, 1504), False, 'import datetime\n'), ((1839, 1865), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1863, 1865), False, 'import datetime\n'), ((2201, 2227), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (2225, 2227), False, 'import datetime\n'), ((2563, 2589), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (2587, 2589), False, 'import datetime\n'), ((2925, 2951), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (2949, 2951), False, 'import datetime\n')]
|
# proc: Simple interface to Linux process information.
#
# Author: <NAME> <<EMAIL>>
# Last Change: June 1, 2016
# URL: https://proc.readthedocs.io
"""
The :mod:`proc.unix` module manipulates UNIX processes using signals.
This module contains no Linux-specific details, instead it relies only on
process IDs and common UNIX signal semantics to:
1. Determine whether a given process ID is still alive (signal 0);
2. gracefully (SIGTERM_) and forcefully (SIGKILL) terminate processes;
3. suspend (SIGSTOP_) and resume (SIGCONT_) processes.
.. _SIGTERM: http://en.wikipedia.org/wiki/Unix_signal#SIGTERM
.. _SIGKILL: http://en.wikipedia.org/wiki/Unix_signal#SIGKILL
.. _SIGSTOP: http://en.wikipedia.org/wiki/Unix_signal#SIGSTOP
.. _SIGCONT: http://en.wikipedia.org/wiki/Unix_signal#SIGCONT
"""
# Standard library modules.
import errno
import logging
import os
import signal
# External dependencies.
from executor.process import ControllableProcess
from property_manager import required_property
# Initialize a logger.
logger = logging.getLogger(__name__)
class UnixProcess(ControllableProcess):
"""
Integration between :class:`executor.process.ControllableProcess` and common UNIX signals.
:class:`UnixProcess` extends :class:`~executor.process.ControllableProcess` which means
all of the process manipulation supported by :class:`~executor.process.ControllableProcess`
is also supported by :class:`UnixProcess` objects.
"""
@required_property
def pid(self):
"""The process ID of the process (an integer)."""
@property
def is_running(self):
"""
:data:`True` if the process is currently running, :data:`False` otherwise.
This implementation sends the signal number zero to :attr:`pid` and
uses the result to infer whether the process is alive or not (this
technique is documented in `man kill`_):
- If the sending of the signal doesn't raise an exception the process
received the signal just fine and so must it exist.
- If an :exc:`~exceptions.OSError` exception with error number
:data:`~errno.EPERM` is raised we don't have permission to signal the
process, which implies that the process is alive.
- If an :exc:`~exceptions.OSError` exception with error number
:data:`~errno.ESRCH` is raised we know that no process with the given
id exists.
An advantage of this approach (on UNIX systems) is that you don't need
to be a parent of the process in question. A disadvantage of this
approach is that it is never going to work on Windows (if you're
serious about portability consider using a package like psutil_).
.. warning:: After a process has been terminated but before the parent
process has reclaimed its child process this property
returns :data:`True`. Usually this is a small time window,
but when it isn't it can be really confusing.
.. _man kill: http://linux.die.net/man/2/kill
.. _psutil: https://pypi.python.org/pypi/psutil
"""
# Querying in-use process IDs is a platform specific operation that
# Python doesn't provide, however sending the signal number zero is
# a platform specific trick that works on most UNIX systems.
logger.debug("Polling process status using signal 0: %s", self)
try:
os.kill(self.pid, 0)
# If no exception is raised we successfully sent a NOOP signal
# to the process so we know the process is (still) alive.
logger.debug("Successfully sent signal 0, process must be alive.")
return True
except OSError as e:
if e.errno == errno.EPERM:
# If we don't have permission this confirms that the
# process ID is in use.
logger.debug("Got EPERM, process must be alive.")
return True
elif e.errno == errno.ESRCH:
# If we get this error we know the process doesn't exist.
logger.debug("Got ESRCH, process can't be alive.")
return False
else:
# Don't swallow exceptions we can't handle.
raise
def terminate_helper(self):
"""
Gracefully terminate the process (by sending it a SIGTERM_ signal).
:raises: :exc:`~exceptions.OSError` when the signal can't be delivered.
Processes can choose to intercept SIGTERM_ to allow for graceful
termination (many daemon processes work like this) however the default
action is to simply exit immediately.
"""
if self.is_running:
logger.debug("Terminating process with SIGTERM: %s", self)
os.kill(self.pid, signal.SIGTERM)
def kill_helper(self):
"""
Forcefully kill the process (by sending it a SIGKILL_ signal).
:raises: :exc:`~exceptions.OSError` when the signal can't be delivered.
The SIGKILL_ signal cannot be intercepted or ignored and causes the
immediate termination of the process (under regular circumstances).
Non-regular circumstances are things like blocking I/O calls on an NFS
share while your file server is down (fun times!).
"""
if self.is_running:
logger.debug("Killing process with SIGKILL: %s", self)
os.kill(self.pid, signal.SIGKILL)
def suspend(self):
"""
Suspend the process so that its execution can be resumed later.
:raises: :exc:`~exceptions.OSError` when the signal can't be delivered.
The :func:`suspend()` method sends a SIGSTOP_ signal to the process.
This signal cannot be intercepted or ignored and has the effect of
completely pausing the process until you call :func:`resume()`.
.. _SIGSTOP: http://en.wikipedia.org/wiki/Unix_signal#SIGSTOP
"""
if self.is_running:
logger.info("Suspending process %s using SIGSTOP ..", self)
os.kill(self.pid, signal.SIGSTOP)
def resume(self):
"""
Resume a process that was previously paused using :func:`suspend()`.
:raises: :exc:`~exceptions.OSError` when the signal can't be delivered.
The :func:`resume()` method sends a SIGCONT_ signal to the process.
This signal resumes a process that was previously paused using SIGSTOP_
(e.g. using :func:`suspend()`).
.. _SIGCONT: http://en.wikipedia.org/wiki/Unix_signal#SIGCONT
"""
if self.is_running:
logger.info("Resuming process %s using SIGCONT ..", self)
os.kill(self.pid, signal.SIGCONT)
|
[
"os.kill",
"logging.getLogger"
] |
[((1029, 1056), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1046, 1056), False, 'import logging\n'), ((3463, 3483), 'os.kill', 'os.kill', (['self.pid', '(0)'], {}), '(self.pid, 0)\n', (3470, 3483), False, 'import os\n'), ((4838, 4871), 'os.kill', 'os.kill', (['self.pid', 'signal.SIGTERM'], {}), '(self.pid, signal.SIGTERM)\n', (4845, 4871), False, 'import os\n'), ((5474, 5507), 'os.kill', 'os.kill', (['self.pid', 'signal.SIGKILL'], {}), '(self.pid, signal.SIGKILL)\n', (5481, 5507), False, 'import os\n'), ((6117, 6150), 'os.kill', 'os.kill', (['self.pid', 'signal.SIGSTOP'], {}), '(self.pid, signal.SIGSTOP)\n', (6124, 6150), False, 'import os\n'), ((6734, 6767), 'os.kill', 'os.kill', (['self.pid', 'signal.SIGCONT'], {}), '(self.pid, signal.SIGCONT)\n', (6741, 6767), False, 'import os\n')]
|
import os
import pytest
from scrapy.settings import Settings
@pytest.fixture()
def settings(request):
""" Default scrapy-prerender settings """
s = dict(
# collect scraped items to .collected_items attribute
ITEM_PIPELINES={
'tests.utils.CollectorPipeline': 100,
},
# scrapy-prerender settings
PRERENDER_URL=os.environ.get('PRERENDER_URL'),
DOWNLOADER_MIDDLEWARES={
# Engine side
'scrapy_prerender.PrerenderCookiesMiddleware': 723,
'scrapy_prerender.PrerenderMiddleware': 725,
'scrapy.downloadermiddlewares.httpcompression.HttpCompressionMiddleware': 810,
# Downloader side
},
SPIDER_MIDDLEWARES={
'scrapy_prerender.PrerenderDeduplicateArgsMiddleware': 100,
},
DUPEFILTER_CLASS='scrapy_prerender.PrerenderAwareDupeFilter',
HTTPCACHE_STORAGE='scrapy_prerender.PrerenderAwareFSCacheStorage',
)
return Settings(s)
|
[
"os.environ.get",
"scrapy.settings.Settings",
"pytest.fixture"
] |
[((65, 81), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (79, 81), False, 'import pytest\n'), ((991, 1002), 'scrapy.settings.Settings', 'Settings', (['s'], {}), '(s)\n', (999, 1002), False, 'from scrapy.settings import Settings\n'), ((372, 403), 'os.environ.get', 'os.environ.get', (['"""PRERENDER_URL"""'], {}), "('PRERENDER_URL')\n", (386, 403), False, 'import os\n')]
|
# coding=utf-8
# Author: <NAME>
# Date: Sept 11, 2019
#
# Description: Dispalys Statistics about the calculated pipelines.
#
#
import math
import numpy as np
import pandas as pd
pd.set_option('display.max_rows', 100)
pd.set_option('display.max_columns', 500)
pd.set_option('display.width', 1000)
pd.set_option('display.precision', 4)
from tabulate import tabulate
def df2md(df, y_index=False, *args, **kwargs):
blob = tabulate(df, headers='keys', tablefmt='pipe', *args, **kwargs)
if not y_index:
return '\n'.join(['| {}'.format(row.split('|', 2)[-1]) for row in blob.split('\n')])
return blob
if __name__ == '__main__':
#
# Mamals
#
pipeline = 'mammals'
print('# Pipeline: {pipeline:s}\n'.format(pipeline=pipeline))
df_M = pd.read_csv('results/pipeline-{pipeline:s}/meta_meiotic_genes.csv'.format(pipeline=pipeline), index_col=0)
df_HS = pd.read_csv('results/pipeline-{pipeline:s}/HS_meiotic_genes.csv'.format(pipeline=pipeline), index_col=0)
df_MM = pd.read_csv('results/pipeline-{pipeline:s}/MM_meiotic_genes.csv'.format(pipeline=pipeline), index_col=0)
n_m = df_M.shape[0]
n_hs = df_HS.shape[0]
n_mm = df_MM.shape[0]
df_stat = pd.DataFrame.from_records([
('Meta', n_m),
('HS', n_hs),
('MM', n_mm),
], columns=['Species', 'Genes'])
print(df2md(df_stat, floatfmt='.4f'))
print('\n')
#
# Core
#
pipeline = 'core'
print('# Pipeline: {pipeline:s}\n'.format(pipeline=pipeline))
df_M = pd.read_csv('results/pipeline-{pipeline:s}/meta_meiotic_genes.csv'.format(pipeline=pipeline), index_col=0)
df_HS = pd.read_csv('results/pipeline-{pipeline:s}/HS_meiotic_genes.csv'.format(pipeline=pipeline), index_col=0)
df_MM = pd.read_csv('results/pipeline-{pipeline:s}/MM_meiotic_genes.csv'.format(pipeline=pipeline), index_col=0)
df_DM = pd.read_csv('results/pipeline-{pipeline:s}/DM_meiotic_genes.csv'.format(pipeline=pipeline), index_col=0)
n_m = df_M.shape[0]
n_hs = df_HS.shape[0]
n_mm = df_MM.shape[0]
n_dm = df_DM.shape[0]
df_stat = pd.DataFrame.from_records([
('Meta', n_m),
('HS', n_hs),
('MM', n_mm),
('DM', n_dm),
], columns=['Species', 'Genes'])
print(df2md(df_stat, floatfmt='.4f'))
print('\n')
|
[
"tabulate.tabulate",
"pandas.set_option",
"pandas.DataFrame.from_records"
] |
[((178, 216), 'pandas.set_option', 'pd.set_option', (['"""display.max_rows"""', '(100)'], {}), "('display.max_rows', 100)\n", (191, 216), True, 'import pandas as pd\n'), ((217, 258), 'pandas.set_option', 'pd.set_option', (['"""display.max_columns"""', '(500)'], {}), "('display.max_columns', 500)\n", (230, 258), True, 'import pandas as pd\n'), ((259, 295), 'pandas.set_option', 'pd.set_option', (['"""display.width"""', '(1000)'], {}), "('display.width', 1000)\n", (272, 295), True, 'import pandas as pd\n'), ((296, 333), 'pandas.set_option', 'pd.set_option', (['"""display.precision"""', '(4)'], {}), "('display.precision', 4)\n", (309, 333), True, 'import pandas as pd\n'), ((424, 486), 'tabulate.tabulate', 'tabulate', (['df', '*args'], {'headers': '"""keys"""', 'tablefmt': '"""pipe"""'}), "(df, *args, headers='keys', tablefmt='pipe', **kwargs)\n", (432, 486), False, 'from tabulate import tabulate\n'), ((1207, 1311), 'pandas.DataFrame.from_records', 'pd.DataFrame.from_records', (["[('Meta', n_m), ('HS', n_hs), ('MM', n_mm)]"], {'columns': "['Species', 'Genes']"}), "([('Meta', n_m), ('HS', n_hs), ('MM', n_mm)],\n columns=['Species', 'Genes'])\n", (1232, 1311), True, 'import pandas as pd\n'), ((2097, 2215), 'pandas.DataFrame.from_records', 'pd.DataFrame.from_records', (["[('Meta', n_m), ('HS', n_hs), ('MM', n_mm), ('DM', n_dm)]"], {'columns': "['Species', 'Genes']"}), "([('Meta', n_m), ('HS', n_hs), ('MM', n_mm), ('DM',\n n_dm)], columns=['Species', 'Genes'])\n", (2122, 2215), True, 'import pandas as pd\n')]
|
from enum import unique, Enum
from random import shuffle, choice, uniform
from insomniac.actions_types import LikeAction, FollowAction, GetProfileAction, StoryWatchAction, CommentAction
from insomniac.device_facade import DeviceFacade
from insomniac.navigation import switch_to_english, search_for, LanguageChangedException
from insomniac.scroll_end_detector import ScrollEndDetector
from insomniac.sleeper import sleeper
from insomniac.softban_indicator import softban_indicator
from insomniac.tools.spintax import spin
from insomniac.utils import *
from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView
FOLLOWERS_BUTTON_ID_REGEX = '{0}:id/row_profile_header_followers_container' \
'|{1}:id/row_profile_header_container_followers'
TEXTVIEW_OR_BUTTON_REGEX = 'android.widget.TextView|android.widget.Button'
FOLLOW_REGEX = 'Follow|Follow Back'
ALREADY_FOLLOWING_REGEX = 'Following|Requested'
SHOP_REGEX = 'Add Shop|View Shop'
USER_AVATAR_VIEW_ID = '{0}:id/circular_image|^$'
LISTVIEW_OR_RECYCLERVIEW_REGEX = 'android.widget.ListView|androidx.recyclerview.widget.RecyclerView'
liked_count = 0
is_followed = False
is_scrolled_down = False
is_commented = False
class InteractionStrategy(object):
def __init__(self, do_like=False, do_follow=False, do_story_watch=False, do_comment=False,
likes_count=2, like_percentage=100, follow_percentage=0, stories_count=2, comment_percentage=0,
comments_list=None):
self.do_like = do_like
self.do_follow = do_follow
self.do_story_watch = do_story_watch
self.do_comment = do_comment
self.likes_count = likes_count
self.follow_percentage = follow_percentage
self.like_percentage = like_percentage
self.stories_count = stories_count
self.comment_percentage = comment_percentage
self.comments_list = comments_list
def scroll_to_bottom(device):
print("Scroll to bottom")
def is_end_reached():
see_all_button = device.find(resourceId=f'{device.app_id}:id/see_all_button',
className='android.widget.TextView')
return see_all_button.exists()
list_view = device.find(resourceId='android:id/list',
className='android.widget.ListView')
while not is_end_reached():
list_view.swipe(DeviceFacade.Direction.BOTTOM)
print("Scroll back to the first follower")
def is_at_least_one_follower():
follower = device.find(resourceId=f'{device.app_id}:id/follow_list_container',
className='android.widget.LinearLayout')
return follower.exists()
while not is_at_least_one_follower():
list_view.scroll(DeviceFacade.Direction.TOP)
def is_private_account(device):
recycler_view = device.find(resourceId='android:id/list')
return not recycler_view.exists(quick=True)
def open_user(device, username, refresh=False, deep_link_usage_percentage=0, on_action=None):
return _open_user(device, username, False, False, refresh, deep_link_usage_percentage, on_action)
def open_user_followers(device, username, refresh=False, deep_link_usage_percentage=0, on_action=None):
return _open_user(device, username, True, False, refresh, deep_link_usage_percentage, on_action)
def open_user_followings(device, username, refresh=False, deep_link_usage_percentage=0, on_action=None):
return _open_user(device, username, False, True, refresh, deep_link_usage_percentage, on_action)
def iterate_over_followers(device, is_myself, iteration_callback, iteration_callback_pre_conditions,
iterate_without_sleep=False, check_item_was_removed=False):
# Wait until list is rendered
device.find(resourceId=f'{device.app_id}:id/follow_list_container',
className='android.widget.LinearLayout').wait()
def scrolled_to_top():
row_search = device.find(resourceId=f'{device.app_id}:id/row_search_edit_text',
className='android.widget.EditText')
return row_search.exists()
prev_screen_iterated_followers = []
scroll_end_detector = ScrollEndDetector()
while True:
try:
print("Iterate over visible followers")
if not iterate_without_sleep:
sleeper.random_sleep()
screen_iterated_followers = []
screen_skipped_followers_count = 0
scroll_end_detector.notify_new_page()
try:
for item in device.find(resourceId=f'{device.app_id}:id/follow_list_container',
className='android.widget.LinearLayout'):
user_info_view = item.child(index=1)
user_name_view = user_info_view.child(index=0).child()
if not user_name_view.exists(quick=True):
print(COLOR_OKGREEN + "Next item not found: probably reached end of the screen." + COLOR_ENDC)
break
username = user_name_view.get_text()
screen_iterated_followers.append(username)
scroll_end_detector.notify_username_iterated(username)
if not iteration_callback_pre_conditions(username, user_name_view):
screen_skipped_followers_count += 1
continue
to_continue = iteration_callback(username, user_name_view)
if not to_continue:
print(COLOR_OKBLUE + "Stopping followers iteration" + COLOR_ENDC)
return
if check_item_was_removed and \
(not user_name_view.exists()
or username != user_name_view.get_text()):
raise StopIteration("item was removed")
except IndexError:
print(COLOR_FAIL + "Cannot get next item: probably reached end of the screen." + COLOR_ENDC)
if is_myself and scrolled_to_top():
print(COLOR_OKGREEN + "Scrolled to top, finish." + COLOR_ENDC)
return
elif len(screen_iterated_followers) > 0:
load_more_button = device.find(resourceId=f'{device.app_id}:id/row_load_more_button')
load_more_button_exists = load_more_button.exists(quick=True)
if scroll_end_detector.is_the_end():
return
need_swipe = screen_skipped_followers_count == len(screen_iterated_followers)
list_view = device.find(resourceId='android:id/list',
className='android.widget.ListView')
if not list_view.exists():
print(COLOR_FAIL + "Cannot find the list of followers. Trying to press back again." + COLOR_ENDC)
device.back()
list_view = device.find(resourceId='android:id/list',
className='android.widget.ListView')
if is_myself:
print(COLOR_OKGREEN + "Need to scroll now" + COLOR_ENDC)
list_view.scroll(DeviceFacade.Direction.TOP)
else:
pressed_retry = False
if load_more_button_exists:
retry_button = load_more_button.child(className='android.widget.ImageView')
if retry_button.exists():
print("Press \"Load\" button")
retry_button.click()
sleeper.random_sleep()
pressed_retry = True
if need_swipe and not pressed_retry:
print(COLOR_OKGREEN + "All followers skipped, let's do a swipe" + COLOR_ENDC)
list_view.swipe(DeviceFacade.Direction.BOTTOM)
else:
print(COLOR_OKGREEN + "Need to scroll now" + COLOR_ENDC)
list_view.scroll(DeviceFacade.Direction.BOTTOM)
prev_screen_iterated_followers.clear()
prev_screen_iterated_followers += screen_iterated_followers
else:
print(COLOR_OKGREEN + "No followers were iterated, finish." + COLOR_ENDC)
return
except StopIteration as e:
print(COLOR_OKGREEN + f"Starting the screen from the beginning because {e}" + COLOR_ENDC)
def iterate_over_likers(device, iteration_callback, iteration_callback_pre_conditions):
likes_list_view = device.find(resourceId='android:id/list',
classNameMatches=LISTVIEW_OR_RECYCLERVIEW_REGEX)
prev_screen_iterated_likers = []
while True:
print("Iterate over visible likers.")
screen_iterated_likers = []
try:
for item in device.find(resourceId=f'{device.app_id}:id/row_user_container_base',
className='android.widget.LinearLayout'):
user_name_view = item.child(resourceId=f'{device.app_id}:id/row_user_primary_name',
className='android.widget.TextView')
if not user_name_view.exists(quick=True):
print(COLOR_OKGREEN + "Next item not found: probably reached end of the screen." + COLOR_ENDC)
break
username = user_name_view.get_text()
screen_iterated_likers.append(username)
if not iteration_callback_pre_conditions(username, user_name_view):
continue
to_continue = iteration_callback(username, user_name_view)
if not to_continue:
print(COLOR_OKBLUE + "Stopping hashtag-likers iteration" + COLOR_ENDC)
print(f"Going back")
device.back()
return False
except IndexError:
print(COLOR_FAIL + "Cannot get next item: probably reached end of the screen." + COLOR_ENDC)
if screen_iterated_likers == prev_screen_iterated_likers:
print(COLOR_OKGREEN + "Iterated exactly the same likers twice, finish." + COLOR_ENDC)
print(f"Going back")
device.back()
break
prev_screen_iterated_likers.clear()
prev_screen_iterated_likers += screen_iterated_likers
print(COLOR_OKGREEN + "Need to scroll now" + COLOR_ENDC)
likes_list_view.scroll(DeviceFacade.Direction.BOTTOM)
return True
def interact_with_user(device,
user_source,
source_type,
username,
my_username,
interaction_strategy: InteractionStrategy,
on_action) -> (bool, bool):
"""
:return: (whether some photos was liked, whether @username was followed during the interaction,
whether stories were watched, whether was commented)
"""
global liked_count, is_followed, is_scrolled_down, is_commented
liked_count = 0
is_followed = False
is_watched = False
is_scrolled_down = False
is_commented = False
if username == my_username:
print("It's you, skip.")
return liked_count == interaction_strategy.likes_count, is_followed, is_watched, is_commented
if interaction_strategy.do_story_watch:
is_watched = _watch_stories(device, user_source, source_type, username,
interaction_strategy.stories_count, on_action)
def do_like_actions():
global is_scrolled_down
if interaction_strategy.do_like or interaction_strategy.do_comment:
# Close suggestions if they are opened (hack to fix a bug with opening menu while scrolling)
suggestions_container = device.find(resourceId=f'{device.app_id}:id/similar_accounts_container',
className='android.widget.LinearLayout')
if suggestions_container.exists(quick=True):
print("Close suggestions to avoid bugs while scrolling")
arrow_button = device.find(resourceId=f'{device.app_id}:id/row_profile_header_button_chaining',
className='android.widget.Button')
arrow_button.click(ignore_if_missing=True)
sleeper.random_sleep()
coordinator_layout = device.find(resourceId=f'{device.app_id}:id/coordinator_root_layout')
if coordinator_layout.exists():
print("Scroll down to see more photos.")
coordinator_layout.scroll(DeviceFacade.Direction.BOTTOM)
is_scrolled_down = True
number_of_rows_to_use = min((interaction_strategy.likes_count * 2) // 3 + 1, 4)
photos_indices = list(range(0, number_of_rows_to_use * 3))
shuffle(photos_indices)
photos_indices = photos_indices[:interaction_strategy.likes_count]
photos_indices = sorted(photos_indices)
def on_like():
global liked_count
liked_count += 1
print(COLOR_OKGREEN + "@{} - photo been liked.".format(username) + COLOR_ENDC)
on_action(LikeAction(source_name=user_source, source_type=source_type, user=username))
def on_comment(comment):
global is_commented
is_commented = True
print(COLOR_OKGREEN + "@{} - photo been commented.".format(username) + COLOR_ENDC)
on_action(CommentAction(source_name=user_source, source_type=source_type, user=username, comment=comment))
for i in range(0, interaction_strategy.likes_count):
photo_index = photos_indices[i]
row = photo_index // 3
column = photo_index - row * 3
sleeper.random_sleep()
print("Open and like photo #" + str(i + 1) + " (" + str(row + 1) + " row, " + str(
column + 1) + " column)")
if not _open_photo_and_like_and_comment(device, row, column,
interaction_strategy.do_like, interaction_strategy.do_comment,
interaction_strategy.like_percentage, on_like,
interaction_strategy.comment_percentage,
interaction_strategy.comments_list, my_username, on_comment):
print(COLOR_OKGREEN + "Less than " + str(number_of_rows_to_use * 3) + " photos." + COLOR_ENDC)
break
def do_follow_action():
global is_followed
if interaction_strategy.do_follow:
is_followed = _follow(device, username, interaction_strategy.follow_percentage, is_scrolled_down)
if is_followed:
on_action(FollowAction(source_name=user_source, source_type=source_type, user=username))
if interaction_strategy.do_follow and (interaction_strategy.do_like or interaction_strategy.do_comment):
like_first_chance = randint(1, 100)
if like_first_chance > 50:
print("Going to like-images first and then follow")
do_like_actions()
do_follow_action()
else:
print("Going to follow first and then like-images")
do_follow_action()
do_like_actions()
else:
do_like_actions()
do_follow_action()
return liked_count > 0, is_followed, is_watched, is_commented
def _open_photo_and_like_and_comment(device, row, column, do_like, do_comment, like_percentage, on_like,
comment_percentage, comments_list, my_username, on_comment):
def open_photo():
# recycler_view has a className 'androidx.recyclerview.widget.RecyclerView' on modern Android versions and
# 'android.view.View' on Android 5.0.1 and probably earlier versions
recycler_view = device.find(resourceId='android:id/list')
row_view = recycler_view.child(index=row + 1)
if not row_view.exists():
return False
item_view = row_view.child(index=column)
if not item_view.exists():
return False
item_view.click()
if not OpenedPostView(device).is_visible():
print(COLOR_OKGREEN + "Didn't open the post by click, trying again..." + COLOR_ENDC)
item_view.click()
if not OpenedPostView(device).is_visible():
print(COLOR_FAIL + "Couldn't open this post twice, abort." + COLOR_ENDC)
return False
return True
if not open_photo():
return False
sleeper.random_sleep()
to_like = False
to_comment = False
if do_like:
to_like = True
like_chance = randint(1, 100)
if like_chance > like_percentage:
print("Not going to like image due to like-percentage hit")
to_like = False
if do_comment:
to_comment = True
comment_chance = randint(1, 100)
if comment_chance > comment_percentage:
print("Not going to comment image due to comment-percentage hit")
to_comment = False
if to_like:
OpenedPostView(device).like()
softban_indicator.detect_action_blocked_dialog(device)
on_like()
if to_comment:
_comment(device, my_username, comments_list, on_comment)
print("Back to profile")
device.back()
return True
def _comment(device, my_username, comments_list, on_comment):
comment_button = device.find(resourceId=f'{device.app_id}:id/row_feed_button_comment',
className="android.widget.ImageView")
if not comment_button.exists(quick=True) or not ActionBarView.is_in_interaction_rect(comment_button):
print("Cannot find comment button – will try to swipe down a bit")
device.swipe(DeviceFacade.Direction.TOP)
if not comment_button.exists(quick=True):
print("Still cannot find comment button – won't comment")
return
comment_box_exists = False
comment_box = None
for _ in range(2):
print("Open comments of post")
comment_button.click()
sleeper.random_sleep()
comment_box = device.find(resourceId=f'{device.app_id}:id/layout_comment_thread_edittext')
if comment_box.exists(quick=True):
if not comment_box.is_enabled():
print("Comments are restricted – not commenting...")
device.back()
return
comment_box_exists = True
break
if not comment_box_exists:
print("Couldn't open comments properly - not commenting...")
return
comment = spin(choice(comments_list))
print(f"Commenting: {comment}")
comment_box.set_text(comment)
sleeper.random_sleep()
post_button = device.find(resourceId=f'{device.app_id}:id/layout_comment_thread_post_button_click_area')
post_button.click()
sleeper.random_sleep()
softban_indicator.detect_action_blocked_dialog(device)
device.close_keyboard()
just_post = device.find(
resourceId=f'{device.app_id}:id/row_comment_textview_comment',
text=f"{my_username} {comment}",
)
if just_post.exists(True):
print("Comment succeed.")
on_comment(comment)
else:
print(COLOR_FAIL + "Failed to check if comment succeed." + COLOR_ENDC)
sleeper.random_sleep()
print("Go back to post view.")
device.back()
def _follow(device, username, follow_percentage, is_scrolled_down):
follow_chance = randint(1, 100)
if follow_chance > follow_percentage:
return False
print("Following...")
if is_scrolled_down:
coordinator_layout = device.find(resourceId=f'{device.app_id}:id/coordinator_root_layout')
if coordinator_layout.exists(quick=True):
coordinator_layout.scroll(DeviceFacade.Direction.TOP)
sleeper.random_sleep()
profile_header_main_layout = device.find(resourceId=f"{device.app_id}:id/profile_header_fixed_list",
className='android.widget.LinearLayout')
shop_button = profile_header_main_layout.child(className='android.widget.Button',
clickable=True,
textMatches=SHOP_REGEX)
if shop_button.exists(quick=True):
follow_button = profile_header_main_layout.child(className='android.widget.Button',
clickable=True,
textMatches=FOLLOW_REGEX)
if not follow_button.exists(quick=True):
print(COLOR_FAIL + "Look like a shop profile without an option to follow, continue." + COLOR_ENDC)
return False
else:
profile_header_actions_layout = device.find(resourceId=f'{device.app_id}:id/profile_header_actions_top_row',
className='android.widget.LinearLayout')
if not profile_header_actions_layout.exists(quick=True):
print(COLOR_FAIL + "Cannot find profile actions." + COLOR_ENDC)
return False
follow_button = profile_header_actions_layout.child(classNameMatches=TEXTVIEW_OR_BUTTON_REGEX,
clickable=True,
textMatches=FOLLOW_REGEX)
if not follow_button.exists(quick=True):
unfollow_button = profile_header_actions_layout.child(classNameMatches=TEXTVIEW_OR_BUTTON_REGEX,
clickable=True,
textMatches=ALREADY_FOLLOWING_REGEX)
if unfollow_button.exists(quick=True):
print(COLOR_OKGREEN + "You already follow @" + username + "." + COLOR_ENDC)
return False
else:
print(COLOR_FAIL + "Cannot find neither Follow button, nor Unfollow button. Maybe not "
"English language is set?" + COLOR_ENDC)
save_crash(device)
switch_to_english(device)
return False
follow_button.click()
softban_indicator.detect_action_blocked_dialog(device)
print(COLOR_OKGREEN + "Followed @" + username + COLOR_ENDC)
return True
def do_have_story(device):
return device.find(resourceId=f"{device.app_id}:id/reel_ring",
className="android.view.View").exists(quick=True)
def is_already_followed(device):
# Using main layout in order to support shop pages
profile_header_main_layout = device.find(resourceId=f"{device.app_id}:id/profile_header_fixed_list",
className='android.widget.LinearLayout')
unfollow_button = profile_header_main_layout.child(classNameMatches=TEXTVIEW_OR_BUTTON_REGEX,
clickable=True,
textMatches=ALREADY_FOLLOWING_REGEX)
return unfollow_button.exists(quick=True)
def _watch_stories(device, source_name, source_type, username, stories_value, on_action):
if stories_value == 0:
return False
def story_sleep():
delay = uniform(1, 5)
print(f"Sleep for {delay:.2f} seconds")
sleep(delay)
if do_have_story(device):
profile_picture = device.find(
resourceId=f"{device.app_id}:id/row_profile_header_imageview",
className="android.widget.ImageView"
)
if profile_picture.exists():
print(COLOR_OKGREEN + f"Watching @" + username + f" stories, at most {stories_value}" + COLOR_ENDC)
profile_picture.click() # Open the first story
on_action(StoryWatchAction(source_name=source_name, source_type=source_type, user=username))
sleeper.random_sleep()
for i in range(1, stories_value):
print("Watching a story...")
story_sleep()
if _skip_story(device):
print("Go next")
else:
print(COLOR_OKGREEN + "Watched all stories" + COLOR_ENDC)
break
if not _get_action_bar(device).exists():
print("Back to profile")
device.back()
if not ProfileView(device).is_visible():
print(COLOR_OKGREEN + "Oops, seems we got out of the profile. Going back..." + COLOR_ENDC)
username_view = device.find(className="android.widget.TextView",
text=username)
username_view.click()
sleeper.random_sleep()
return True
return False
def _skip_story(device):
if _is_story_opened(device):
device.screen_click(DeviceFacade.Place.RIGHT)
return True
else:
return False
def _is_story_opened(device):
reel_viewer = device.find(resourceId=f"{device.app_id}:id/reel_viewer_root",
className="android.widget.FrameLayout")
return reel_viewer.exists()
def _open_user(device, username, open_followers=False, open_followings=False,
refresh=False, deep_link_usage_percentage=0, on_action=None):
if refresh:
print("Refreshing profile status...")
coordinator_layout = device.find(resourceId=f'{device.app_id}:id/coordinator_root_layout')
if coordinator_layout.exists():
coordinator_layout.scroll(DeviceFacade.Direction.TOP)
if username is None:
if open_followers:
print("Open your followers")
ProfileView(device, is_own_profile=True).navigate_to_followers()
if open_followings:
print("Open your following")
ProfileView(device, is_own_profile=True).navigate_to_following()
else:
should_open_user_with_search = True
deep_link_usage_chance = randint(1, 100)
if deep_link_usage_chance <= deep_link_usage_percentage:
print(f"Going to open {username} using deeplink")
should_open_user_with_search = False
should_continue, is_profile_opened = _open_profile_using_deeplink(device, username)
if not should_continue:
return False
if not is_profile_opened:
print(f"Failed to open profile using deeplink. Using search instead")
should_open_user_with_search = True
if should_open_user_with_search:
if not search_for(device, username=username, on_action=on_action):
return False
sleeper.random_sleep()
is_profile_empty = softban_indicator.detect_empty_profile(device)
if is_profile_empty:
return False
if open_followers:
print("Open @" + username + " followers")
ProfileView(device, is_own_profile=True).navigate_to_followers()
if open_followings:
print("Open @" + username + " following")
ProfileView(device, is_own_profile=True).navigate_to_following()
return True
def _open_profile_using_deeplink(device, profile_name):
is_profile_opened = False
should_continue = True
profile_url = f"https://www.instagram.com/{profile_name}/"
if not open_instagram_with_url(device.device_id, device.app_id, profile_url):
return should_continue, is_profile_opened
sleeper.random_sleep()
user_not_found_text = device.find(resourceId=f'{device.app_id}:id/no_found_text',
className='android.widget.TextView')
if user_not_found_text.exists(quick=True):
print(COLOR_FAIL + f"Seems like profile {profile_name} is not exists. Pressing back." + COLOR_ENDC)
should_continue = False
is_profile_opened = False
device.back()
else:
should_continue = True
is_profile_opened = True
return should_continue, is_profile_opened
def iterate_over_my_followers(device, iteration_callback, iteration_callback_pre_conditions):
_iterate_over_my_followers_or_followings(device,
iteration_callback,
iteration_callback_pre_conditions,
is_followers=True,
is_swipes_allowed=True)
def iterate_over_my_followers_no_swipes(device, iteration_callback, iteration_callback_pre_conditions):
_iterate_over_my_followers_or_followings(device,
iteration_callback,
iteration_callback_pre_conditions,
is_followers=True,
is_swipes_allowed=False)
def iterate_over_my_followings(device, iteration_callback, iteration_callback_pre_conditions):
_iterate_over_my_followers_or_followings(device,
iteration_callback,
iteration_callback_pre_conditions,
is_followers=False,
is_swipes_allowed=True)
def _iterate_over_my_followers_or_followings(device,
iteration_callback,
iteration_callback_pre_conditions,
is_followers,
is_swipes_allowed):
entities_name = "followers" if is_followers else "followings"
# Wait until list is rendered
device.find(resourceId=f'{device.app_id}:id/follow_list_container',
className='android.widget.LinearLayout').wait()
while True:
print(f"Iterate over visible {entities_name}")
sleeper.random_sleep()
screen_iterated_followings = 0
screen_skipped_followings = 0
for item in device.find(resourceId=f'{device.app_id}:id/follow_list_container',
className='android.widget.LinearLayout'):
user_info_view = item.child(index=1)
user_name_view = user_info_view.child(index=0).child()
if not user_name_view.exists(quick=True):
print(COLOR_OKGREEN + "Next item not found: probably reached end of the screen." + COLOR_ENDC)
break
follow_status_button_view = item.child(index=2)
if not follow_status_button_view.exists(quick=True):
follow_status_button_view = None
username = user_name_view.get_text()
screen_iterated_followings += 1
if not iteration_callback_pre_conditions(username, user_name_view, follow_status_button_view):
screen_skipped_followings += 1
continue
to_continue = iteration_callback(username, user_name_view, follow_status_button_view)
if to_continue:
sleeper.random_sleep()
else:
print(COLOR_OKBLUE + f"Stopping iteration over {entities_name}" + COLOR_ENDC)
return
list_view = device.find(resourceId='android:id/list',
className='android.widget.ListView')
if screen_skipped_followings == screen_iterated_followings > 0 and is_swipes_allowed:
print(COLOR_OKGREEN + "All followings skipped, let's do a swipe" + COLOR_ENDC)
list_view.swipe(DeviceFacade.Direction.BOTTOM)
sleeper.random_sleep(multiplier=2.0)
elif screen_iterated_followings > 0:
print(COLOR_OKGREEN + "Need to scroll now" + COLOR_ENDC)
list_view.scroll(DeviceFacade.Direction.BOTTOM)
else:
print(COLOR_OKGREEN + f"No {entities_name} were iterated, finish." + COLOR_ENDC)
return
@unique
class FollowingsSortOrder(Enum):
DEFAULT = 'default order'
LATEST = 'date: from newest to oldest'
EARLIEST = 'date: from oldest to newest'
def sort_followings_by_date(device, sort_order):
print(f"Sort followings by {sort_order.value}.")
sort_button = device.find(resourceId=f'{device.app_id}:id/sorting_entry_row_icon',
className='android.widget.ImageView')
if not sort_button.exists():
print(COLOR_FAIL + "Cannot find button to sort followings. Continue without sorting." + COLOR_ENDC)
return
sort_button.click()
sort_options_recycler_view = device.find(
resourceId=f'{device.app_id}:id/follow_list_sorting_options_recycler_view')
if not sort_options_recycler_view.exists():
print(COLOR_FAIL + "Cannot find options to sort followings. Continue without sorting." + COLOR_ENDC)
return
if sort_order == FollowingsSortOrder.DEFAULT:
sort_item = sort_options_recycler_view.child(index=0)
elif sort_order == FollowingsSortOrder.LATEST:
sort_item = sort_options_recycler_view.child(index=1)
else: # EARLIEST
sort_item = sort_options_recycler_view.child(index=2)
if not sort_item.exists():
print(COLOR_FAIL + f"Cannot find an option to sort by {sort_order.name}" + COLOR_ENDC)
device.back()
return
sort_item.click()
def do_unfollow(device, my_username, username, storage, check_if_is_follower, username_view, follow_status_button_view, on_action):
"""
:return: whether unfollow was successful
"""
need_to_go_back_to_list = True
unfollow_from_list_chance = randint(1, 100)
if follow_status_button_view is not None and not check_if_is_follower and unfollow_from_list_chance > 50:
# We can unfollow directly here instead of getting inside to profile
need_to_go_back_to_list = False
print("Unfollowing a profile directly from the following list.")
follow_status_button_view.click()
else:
print("Unfollowing a profile from their profile page.")
username_view.click()
on_action(GetProfileAction(user=username))
sleeper.random_sleep()
if_profile_empty = softban_indicator.detect_empty_profile(device)
if if_profile_empty:
print("Back to the followings list.")
device.back()
return False
if check_if_is_follower:
if _check_is_follower(device, username, my_username):
print("Skip @" + username + ". This user is following you.")
storage.update_follow_status(username, is_follow_me=True, do_i_follow_him=True)
print("Back to the followings list.")
device.back()
return False
storage.update_follow_status(username, is_follow_me=False, do_i_follow_him=True)
unfollow_button = device.find(classNameMatches=TEXTVIEW_OR_BUTTON_REGEX,
clickable=True,
text='Following')
if not unfollow_button.exists():
print(COLOR_FAIL + "Cannot find Following button. Maybe not English language is set?" + COLOR_ENDC)
save_crash(device)
switch_to_english(device)
raise LanguageChangedException()
print(f"Unfollowing @{username}...")
unfollow_button.click()
sleeper.random_sleep()
unfollow_confirmed = False
dialog_view = DialogView(device)
if dialog_view.is_visible():
print("Confirming unfollow...")
unfollow_confirmed = dialog_view.click_unfollow()
if unfollow_confirmed:
sleeper.random_sleep()
if dialog_view.is_visible():
print("Confirming unfollow again...")
if dialog_view.click_unfollow():
sleeper.random_sleep()
else:
softban_indicator.detect_action_blocked_dialog(device)
if need_to_go_back_to_list:
print("Back to the followings list.")
device.back()
return True
def open_likers(device):
posts_view_list = PostsViewList(device)
if not posts_view_list.is_visible():
likers_list_view = LikersListView(device)
if likers_list_view.is_visible():
print(COLOR_FAIL + "Oops, likers list is opened instead of posts list. Going back." + COLOR_FAIL)
posts_view_list = likers_list_view.press_back_arrow()
else:
raise Exception("We are supposed to be on posts list, but something gone wrong.")
return posts_view_list.open_likers()
def interact_with_feed(navigate_to_feed, should_continue, interact_with_feed_post):
posts_views_list = navigate_to_feed()
if posts_views_list is None:
return False
while True:
if not posts_views_list.is_visible():
print(COLOR_FAIL + "Went away from posts list, going back..." + COLOR_ENDC)
posts_views_list = navigate_to_feed()
if posts_views_list is None:
return False
if not interact_with_feed_post(posts_views_list) or not should_continue():
print("Stopping interaction with feed...")
return True
print_debug("Scrolling down...")
posts_views_list.scroll_down()
sleeper.random_sleep()
def _check_is_follower(device, username, my_username):
print(COLOR_OKGREEN + "Check if @" + username + " is following you." + COLOR_ENDC)
ProfileView(device, is_own_profile=True).navigate_to_following()
sleeper.random_sleep()
is_list_empty = softban_indicator.detect_empty_list(device)
if is_list_empty:
# By default, the profile will be considered as following if the profile list didnt loaded
print("List seems to be empty, cant decide if you are followed by the profile or not (could be a soft-ban).")
print("Back to the profile.")
device.back()
return True
else:
my_username_view = device.find(resourceId=f'{device.app_id}:id/follow_list_username',
className='android.widget.TextView',
text=my_username)
result = my_username_view.exists(quick=True)
print("Back to the profile.")
device.back()
return result
def _get_action_bar(device):
tab_bar = device.find(
resourceIdMatches=case_insensitive_re(
f"{device.app_id}:id/action_bar_container"
),
className="android.widget.FrameLayout",
)
return tab_bar
def case_insensitive_re(str_list):
if isinstance(str_list, str):
strings = str_list
else:
strings = "|".join(str_list)
re_str = f"(?i)({strings})"
return re_str
|
[
"insomniac.softban_indicator.softban_indicator.detect_action_blocked_dialog",
"insomniac.views.ProfileView",
"random.shuffle",
"insomniac.softban_indicator.softban_indicator.detect_empty_profile",
"insomniac.views.LikersListView",
"insomniac.views.OpenedPostView",
"insomniac.navigation.LanguageChangedException",
"insomniac.scroll_end_detector.ScrollEndDetector",
"insomniac.views.PostsViewList",
"insomniac.softban_indicator.softban_indicator.detect_empty_list",
"insomniac.actions_types.GetProfileAction",
"insomniac.sleeper.sleeper.random_sleep",
"insomniac.actions_types.StoryWatchAction",
"insomniac.views.ActionBarView.is_in_interaction_rect",
"insomniac.actions_types.LikeAction",
"insomniac.navigation.switch_to_english",
"insomniac.actions_types.FollowAction",
"insomniac.views.DialogView",
"random.uniform",
"insomniac.actions_types.CommentAction",
"random.choice",
"insomniac.navigation.search_for"
] |
[((4232, 4251), 'insomniac.scroll_end_detector.ScrollEndDetector', 'ScrollEndDetector', ([], {}), '()\n', (4249, 4251), False, 'from insomniac.scroll_end_detector import ScrollEndDetector\n'), ((16993, 17015), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (17013, 17015), False, 'from insomniac.sleeper import sleeper\n'), ((19171, 19193), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (19191, 19193), False, 'from insomniac.sleeper import sleeper\n'), ((19333, 19355), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (19353, 19355), False, 'from insomniac.sleeper import sleeper\n'), ((19360, 19414), 'insomniac.softban_indicator.softban_indicator.detect_action_blocked_dialog', 'softban_indicator.detect_action_blocked_dialog', (['device'], {}), '(device)\n', (19406, 19414), False, 'from insomniac.softban_indicator import softban_indicator\n'), ((19780, 19802), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (19800, 19802), False, 'from insomniac.sleeper import sleeper\n'), ((22724, 22778), 'insomniac.softban_indicator.softban_indicator.detect_action_blocked_dialog', 'softban_indicator.detect_action_blocked_dialog', (['device'], {}), '(device)\n', (22770, 22778), False, 'from insomniac.softban_indicator import softban_indicator\n'), ((28010, 28032), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (28030, 28032), False, 'from insomniac.sleeper import sleeper\n'), ((36043, 36061), 'insomniac.views.DialogView', 'DialogView', (['device'], {}), '(device)\n', (36053, 36061), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((36663, 36684), 'insomniac.views.PostsViewList', 'PostsViewList', (['device'], {}), '(device)\n', (36676, 36684), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((38089, 38111), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (38109, 38111), False, 'from insomniac.sleeper import sleeper\n'), ((38133, 38176), 'insomniac.softban_indicator.softban_indicator.detect_empty_list', 'softban_indicator.detect_empty_list', (['device'], {}), '(device)\n', (38168, 38176), False, 'from insomniac.softban_indicator import softban_indicator\n'), ((17586, 17640), 'insomniac.softban_indicator.softban_indicator.detect_action_blocked_dialog', 'softban_indicator.detect_action_blocked_dialog', (['device'], {}), '(device)\n', (17632, 17640), False, 'from insomniac.softban_indicator import softban_indicator\n'), ((18548, 18570), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (18568, 18570), False, 'from insomniac.sleeper import sleeper\n'), ((19073, 19094), 'random.choice', 'choice', (['comments_list'], {}), '(comments_list)\n', (19079, 19094), False, 'from random import shuffle, choice, uniform\n'), ((23796, 23809), 'random.uniform', 'uniform', (['(1)', '(5)'], {}), '(1, 5)\n', (23803, 23809), False, 'from random import shuffle, choice, uniform\n'), ((27257, 27303), 'insomniac.softban_indicator.softban_indicator.detect_empty_profile', 'softban_indicator.detect_empty_profile', (['device'], {}), '(device)\n', (27295, 27303), False, 'from insomniac.softban_indicator import softban_indicator\n'), ((30494, 30516), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (30514, 30516), False, 'from insomniac.sleeper import sleeper\n'), ((34718, 34740), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (34738, 34740), False, 'from insomniac.sleeper import sleeper\n'), ((34768, 34814), 'insomniac.softban_indicator.softban_indicator.detect_empty_profile', 'softban_indicator.detect_empty_profile', (['device'], {}), '(device)\n', (34806, 34814), False, 'from insomniac.softban_indicator import softban_indicator\n'), ((35970, 35992), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (35990, 35992), False, 'from insomniac.sleeper import sleeper\n'), ((36229, 36251), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (36249, 36251), False, 'from insomniac.sleeper import sleeper\n'), ((36441, 36495), 'insomniac.softban_indicator.softban_indicator.detect_action_blocked_dialog', 'softban_indicator.detect_action_blocked_dialog', (['device'], {}), '(device)\n', (36487, 36495), False, 'from insomniac.softban_indicator import softban_indicator\n'), ((36753, 36775), 'insomniac.views.LikersListView', 'LikersListView', (['device'], {}), '(device)\n', (36767, 36775), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((37849, 37871), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (37869, 37871), False, 'from insomniac.sleeper import sleeper\n'), ((13089, 13112), 'random.shuffle', 'shuffle', (['photos_indices'], {}), '(photos_indices)\n', (13096, 13112), False, 'from random import shuffle, choice, uniform\n'), ((18086, 18138), 'insomniac.views.ActionBarView.is_in_interaction_rect', 'ActionBarView.is_in_interaction_rect', (['comment_button'], {}), '(comment_button)\n', (18122, 18138), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((20304, 20326), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (20324, 20326), False, 'from insomniac.sleeper import sleeper\n'), ((24411, 24433), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (24431, 24433), False, 'from insomniac.sleeper import sleeper\n'), ((27206, 27228), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (27226, 27228), False, 'from insomniac.sleeper import sleeper\n'), ((32199, 32235), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {'multiplier': '(2.0)'}), '(multiplier=2.0)\n', (32219, 32235), False, 'from insomniac.sleeper import sleeper\n'), ((34677, 34708), 'insomniac.actions_types.GetProfileAction', 'GetProfileAction', ([], {'user': 'username'}), '(user=username)\n', (34693, 34708), False, 'from insomniac.actions_types import LikeAction, FollowAction, GetProfileAction, StoryWatchAction, CommentAction\n'), ((35813, 35838), 'insomniac.navigation.switch_to_english', 'switch_to_english', (['device'], {}), '(device)\n', (35830, 35838), False, 'from insomniac.navigation import switch_to_english, search_for, LanguageChangedException\n'), ((35857, 35883), 'insomniac.navigation.LanguageChangedException', 'LanguageChangedException', ([], {}), '()\n', (35881, 35883), False, 'from insomniac.navigation import switch_to_english, search_for, LanguageChangedException\n'), ((38020, 38060), 'insomniac.views.ProfileView', 'ProfileView', (['device'], {'is_own_profile': '(True)'}), '(device, is_own_profile=True)\n', (38031, 38060), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((4391, 4413), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (4411, 4413), False, 'from insomniac.sleeper import sleeper\n'), ((12572, 12594), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (12592, 12594), False, 'from insomniac.sleeper import sleeper\n'), ((14087, 14109), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (14107, 14109), False, 'from insomniac.sleeper import sleeper\n'), ((17548, 17570), 'insomniac.views.OpenedPostView', 'OpenedPostView', (['device'], {}), '(device)\n', (17562, 17570), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((22638, 22663), 'insomniac.navigation.switch_to_english', 'switch_to_english', (['device'], {}), '(device)\n', (22655, 22663), False, 'from insomniac.navigation import switch_to_english, search_for, LanguageChangedException\n'), ((24316, 24402), 'insomniac.actions_types.StoryWatchAction', 'StoryWatchAction', ([], {'source_name': 'source_name', 'source_type': 'source_type', 'user': 'username'}), '(source_name=source_name, source_type=source_type, user=\n username)\n', (24332, 24402), False, 'from insomniac.actions_types import LikeAction, FollowAction, GetProfileAction, StoryWatchAction, CommentAction\n'), ((25239, 25261), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (25259, 25261), False, 'from insomniac.sleeper import sleeper\n'), ((27104, 27162), 'insomniac.navigation.search_for', 'search_for', (['device'], {'username': 'username', 'on_action': 'on_action'}), '(device, username=username, on_action=on_action)\n', (27114, 27162), False, 'from insomniac.navigation import switch_to_english, search_for, LanguageChangedException\n'), ((31652, 31674), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (31672, 31674), False, 'from insomniac.sleeper import sleeper\n'), ((36400, 36422), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (36420, 36422), False, 'from insomniac.sleeper import sleeper\n'), ((13461, 13536), 'insomniac.actions_types.LikeAction', 'LikeAction', ([], {'source_name': 'user_source', 'source_type': 'source_type', 'user': 'username'}), '(source_name=user_source, source_type=source_type, user=username)\n', (13471, 13536), False, 'from insomniac.actions_types import LikeAction, FollowAction, GetProfileAction, StoryWatchAction, CommentAction\n'), ((13773, 13873), 'insomniac.actions_types.CommentAction', 'CommentAction', ([], {'source_name': 'user_source', 'source_type': 'source_type', 'user': 'username', 'comment': 'comment'}), '(source_name=user_source, source_type=source_type, user=\n username, comment=comment)\n', (13786, 13873), False, 'from insomniac.actions_types import LikeAction, FollowAction, GetProfileAction, StoryWatchAction, CommentAction\n'), ((15173, 15250), 'insomniac.actions_types.FollowAction', 'FollowAction', ([], {'source_name': 'user_source', 'source_type': 'source_type', 'user': 'username'}), '(source_name=user_source, source_type=source_type, user=username)\n', (15185, 15250), False, 'from insomniac.actions_types import LikeAction, FollowAction, GetProfileAction, StoryWatchAction, CommentAction\n'), ((16583, 16605), 'insomniac.views.OpenedPostView', 'OpenedPostView', (['device'], {}), '(device)\n', (16597, 16605), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((26213, 26253), 'insomniac.views.ProfileView', 'ProfileView', (['device'], {'is_own_profile': '(True)'}), '(device, is_own_profile=True)\n', (26224, 26253), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((26360, 26400), 'insomniac.views.ProfileView', 'ProfileView', (['device'], {'is_own_profile': '(True)'}), '(device, is_own_profile=True)\n', (26371, 26400), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((27452, 27492), 'insomniac.views.ProfileView', 'ProfileView', (['device'], {'is_own_profile': '(True)'}), '(device, is_own_profile=True)\n', (27463, 27492), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((27612, 27652), 'insomniac.views.ProfileView', 'ProfileView', (['device'], {'is_own_profile': '(True)'}), '(device, is_own_profile=True)\n', (27623, 27652), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((16766, 16788), 'insomniac.views.OpenedPostView', 'OpenedPostView', (['device'], {}), '(device)\n', (16780, 16788), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((24904, 24923), 'insomniac.views.ProfileView', 'ProfileView', (['device'], {}), '(device)\n', (24915, 24923), False, 'from insomniac.views import ActionBarView, ProfileView, PostsViewList, OpenedPostView, LikersListView, DialogView\n'), ((7728, 7750), 'insomniac.sleeper.sleeper.random_sleep', 'sleeper.random_sleep', ([], {}), '()\n', (7748, 7750), False, 'from insomniac.sleeper import sleeper\n')]
|
import spacy
nlp = spacy.load("zh_core_web_sm")
text = (
"在300多年的风雨历程中,历代同仁堂人始终恪守“炮制虽繁必不敢省人工,品味虽贵必不敢减物力”的古训,"
"树立“修合无人见,存心有天知”的自律意识,造就了制药过程中兢兢小心、精益求精的严细精神。"
)
# 关闭tagger和parser
with nlp.disable_pipes("tagger", "parser"):
# 处理文本
doc = nlp(text)
# 打印doc中的实体
print(doc.ents)
|
[
"spacy.load"
] |
[((20, 48), 'spacy.load', 'spacy.load', (['"""zh_core_web_sm"""'], {}), "('zh_core_web_sm')\n", (30, 48), False, 'import spacy\n')]
|
# SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC
# SPDX-License-Identifier: Apache-2.0
#
# Project:
# glideinWMS
#
# File Version:
#
# Description:
# This module contains the generic params classes
#
# Extracted from:
# cgWParams.py
#
# Author:
# <NAME>
#
import copy
import os
import os.path
import string
import sys
import xml.parsers.expat
from collections.abc import Mapping
from glideinwms.lib import xmlFormat, xmlParse
from glideinwms.lib.util import chmod
from glideinwms.lib.xmlParse import OrderedDict
class SubParams(Mapping):
"""Read-only dictionary containing Configuration info"""
def __init__(self, data):
"""Constructor, only method changing the value"""
self.data = data
def __repr__(self):
return self.data.__repr__()
# Abstract methods to implement for the Mapping
def __getitem__(self, key):
return self.__get_el(key)
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __getattr__(self, name):
"""Make data elements look like class attributes
This is called if the interpreter failed to reference an attribute
Args:
name (str): attribute name
Returns:
attribute value
Returns:
AttributeError: if looking for 'data' or protected attributes that have not been defined in the class
"""
if name == "data":
# needed because copy/deepcopy and pickle call __getattr__ on objects that have not been initialized
# they will catch and ignore the AttributeError exception
raise AttributeError("%r has no attribute data: the init method has not been called" % type(self))
# work around for pickle bug in Python 3.4
# see http://bugs.python.org/issue16251
if name == "__getnewargs_ex__" or name == "__getnewargs__":
raise AttributeError(f"{type(self)!r} has no attribute {name!r}")
if name == "__deepcopy__" or name == "__copy__":
raise AttributeError(f"{type(self)!r} has no attribute {name!r}")
# if not name in self.data:
if name.startswith("__"):
raise AttributeError(f"{type(self)!r} has no attribute {name!r}")
return self.__get_el(name)
#
# PROTECTED
#
def validate(self, base, path_text):
"""Validate input against base template (i.e. the defaults)
Args:
base:
path_text:
Returns:
"""
for k in self.data:
self.data
if k not in base:
# element not in base, report
raise RuntimeError(f"Unknown parameter {path_text}.{k}")
else:
# verify sub-elements, if any
defel = base[k]
if isinstance(defel, OrderedDict):
# subdictionary
self[k].validate(defel, f"{path_text}.{k}")
else:
# final element
defvalue, ktype, txt, subdef = defel
if isinstance(defvalue, OrderedDict):
# dictionary el elements
data_el = self[k]
for data_subkey in list(data_el.keys()):
data_el[data_subkey].validate(subdef, f"{path_text}.{k}.{data_subkey}")
elif isinstance(defvalue, list):
# list of elements
if isinstance(self.data[k], OrderedDict):
if len(list(self.data[k].keys())) == 0:
self.data[
k
] = [] # XML does not know if an empty list is a dictionary or not.. fix this
mylist = self[k]
if not isinstance(mylist, list):
raise RuntimeError(f"Parameter {path_text}.{k} not a list: {type(mylist)} {mylist}")
for data_el in mylist:
data_el.validate(subdef, f"{path_text}.*.{k}")
else:
# a simple value
pass # nothing to be done
def use_defaults(self, defaults):
"""Put default values where there is nothing
Args:
defaults:
Returns:
"""
for k in list(defaults.keys()):
defel = defaults[k]
if isinstance(defel, OrderedDict):
# subdictionary
if k not in self.data:
self.data[k] = OrderedDict() # first create empty, if does not exist
# then, set defaults on all elements of subdictionary
self[k].use_defaults(defel)
else:
# final element
defvalue, ktype, txt, subdef = defel
if isinstance(defvalue, OrderedDict):
# dictionary el elements
if k not in self.data:
self.data[k] = OrderedDict() # no elements yet, set and empty dictionary
else:
# need to set defaults on all elements in the dictionary
data_el = self[k]
for data_subkey in list(data_el.keys()):
data_el[data_subkey].use_defaults(subdef)
elif isinstance(defvalue, list):
# list of elements
if k not in self.data:
self.data[k] = [] # no elements yet, set and empty list
else:
# need to set defaults on all elements in the list
for data_el in self[k]:
data_el.use_defaults(subdef)
else:
# a simple value
if k not in self.data:
self.data[k] = copy.deepcopy(defvalue)
# else nothing to do, already set
#
# PRIVATE
#
def __get_el(self, name):
"""Element getter, used by both __getitem__ and __getattr__
Args:
name (str): key or attribute name
Returns:
value
Raises:
KeyError: when the key/attribute name is not in self.data
"""
try:
el = self.data[name]
except KeyError:
# This function is used also in __getattr__ which is expected to raise AttributeError
# Some methods with workarounds or defaults for missing attributes (hasattr, getattr, ...)
# do expect AttributeError and not KeyError
raise # AttributeError("%s object has no attribute/key %s" % (type(self), name))
if isinstance(el, OrderedDict):
return self.__class__(el)
elif isinstance(el, list):
outlst = []
for k in el:
if isinstance(k, OrderedDict):
outlst.append(self.__class__(k))
else:
outlst.append(k)
return outlst
else:
return el
class Params:
"""abstract class
Children must define:
get_top_element(self)
init_defaults(self)
derive(self)
get_xml_format(self)
"""
def __init__(self, usage_prefix, src_dir, argv):
"""Constructor. Load the default values and override with the config file content
Args:
usage_prefix:
src_dir (str): source directory of the config file(s)
argv (list): TODO: this way for historical reasons, should probably be refactored
[0] is the caller, sys.argv[0] (NOT USED)
[1] can be the config file or '-help'
it seems the length used is always 2, other elements are NOT USED
"""
self.usage_prefix = usage_prefix
# support dir
self.src_dir = src_dir
# initialize the defaults
self.defaults = OrderedDict()
self.init_defaults()
try:
if len(argv) < 2:
raise RuntimeError("Missing config file")
if argv[1] == "-help":
raise RuntimeError(
"\nA config file will contain:\n%s\n\nThe config file will be in XML format."
% self.get_description(" ")
)
self.cfg_name = os.path.abspath(argv[1])
self.load_file(self.cfg_name)
self.subparams.validate(self.defaults, self.get_top_element())
# make a copy of the loaded data, so that I can always tell what was derived and what was not
self.org_data = copy.deepcopy(self.data)
self.subparams.use_defaults(self.defaults)
# create derived values
self.derive()
except RuntimeError as e:
raise RuntimeError("Unexpected error occurred loading the configuration file.\n\n%s" % e)
def derive(self):
return # by default nothing... children should overwrite this
def get_xml(self):
old_default_ignore_nones = xmlFormat.DEFAULT_IGNORE_NONES
old_default_lists_params = xmlFormat.DEFAULT_LISTS_PARAMS
old_default_dicts_params = xmlFormat.DEFAULT_DICTS_PARAMS
xmlFormat.DEFAULT_IGNORE_NONES = True
# these are used internally, do not need to be ordered
xml_format = self.get_xml_format()
xmlFormat.DEFAULT_LISTS_PARAMS = xml_format["lists_params"]
xmlFormat.DEFAULT_DICTS_PARAMS = xml_format["dicts_params"]
# hack needed to make xmlFormat to properly do the formating, using override_dictionary_type
dict_override = type(OrderedDict())
out = xmlFormat.class2string(self.data, self.get_top_element(), override_dictionary_type=dict_override)
xmlFormat.DEFAULT_IGNORE_NONES = old_default_ignore_nones
xmlFormat.DEFAULT_LISTS_PARAMS = old_default_lists_params
xmlFormat.DEFAULT_DICTS_PARAMS = old_default_dicts_params
return out
def get_description(self, indent="", width=80):
return defdict2string(self.defaults, indent, width)
def load_file(self, fname):
"""Load from a file
one element per line
-opt val
Args:
fname:
Returns:
"""
if fname == "-":
fname = sys.stdin
try:
self.data = xmlParse.xmlfile2dict(fname, use_ord_dict=True)
except xml.parsers.expat.ExpatError as e:
raise RuntimeError("XML error parsing config file: %s" % e)
except OSError as e:
raise RuntimeError("Config file error: %s" % e)
self.subparams = self.get_subparams_class()(self.data)
return
def __eq__(self, other):
if other is None:
return False
if not isinstance(other, Params):
return False
return self.subparams == other.subparams
def __getattr__(self, name):
"""__getattr__ is called if the object (Params subclass) has not the 'name' attribute
Return the attribute from the included SubParam objects (self.subparams)
Args:
name (str): name of the attribute
Returns:
value of the attribute
Raises:
AttributeError: when subparams is requested
"""
if name == "subparams":
# if there is no subparams, it cannot be used to retrieve values (of itself!)
# this can happen w/ deepcopy or pickle, where __init__ is not called
raise AttributeError(f"{type(self)!r} has no attribute {name!r}")
return self.subparams.__getattr__(name)
def save_into_file(self, fname, set_ro=False):
"""Save into a file
The file should be usable for reload
Args:
fname:
set_ro:
Returns:
"""
with open(fname, "w") as fd:
fd.write(self.get_xml())
fd.write("\n")
if set_ro:
chmod(fname, os.stat(fname)[0] & 0o444)
return
def save_into_file_wbackup(self, fname, set_ro=False):
"""Save into a file (making a backup)
The file should be usable for reload
Args:
fname:
set_ro:
Returns:
"""
# rewrite config file (write tmp file first)
tmp_name = "%s.tmp" % fname
try:
os.unlink(tmp_name)
except:
pass # just protect
self.save_into_file(tmp_name)
# also save old one with backup name
backup_name = "%s~" % fname
try:
os.unlink(backup_name)
except:
pass # just protect
try:
os.rename(fname, backup_name)
# make it user writable
chmod(backup_name, (os.stat(backup_name)[0] & 0o666) | 0o200)
except:
pass # just protect
# finally rename to the proper name
os.rename(tmp_name, fname)
if set_ro:
chmod(fname, os.stat(fname)[0] & 0o444)
# used internally to define subtype class
def get_subparams_class(self):
return SubParams
class CommentedOrderedDict(OrderedDict):
"""Ordered dictionary with comment support"""
def __init__(self, indict=None):
# TODO: double check restriction, all can be removed?
# cannot call directly the parent due to the particular implementation restrictions
# self._keys = []
# #was: UserDict.__init__(self, dict)
# OrderedDict.__init__(self, indict)
# super().__init__(indict)
self._keys = []
xmlParse.UserDict.__init__(self, indict)
self["comment"] = (None, "string", "Humman comment, not used by the code", None)
####################################################################
# INTERNAL, don't use directly
# Use the class definition instead
#
def extract_attr_val(attr_obj):
"""Return attribute value in the proper python format
INTERNAL, don't use directly
Use the class definition instead
Args:
attr_obj:
Returns:
"""
if not attr_obj.type in ("string", "int", "expr"):
raise RuntimeError("Wrong attribute type '%s', must be either 'int' or 'string'" % attr_obj.type)
if attr_obj.type in ("string", "expr"):
return str(attr_obj.value)
else:
return int(attr_obj.value)
######################################################
# Define common defaults
class CommonSubParams(SubParams):
# return attribute value in the proper python format
def extract_attr_val(self, attr_obj):
return extract_attr_val(attr_obj)
class CommonParams(Params):
# populate self.defaults
def init_support_defaults(self):
# attributes are generic, shared between frontend and factory
self.attr_defaults = CommentedOrderedDict()
self.attr_defaults["value"] = (None, "Value", "Value of the attribute (string)", None)
self.attr_defaults["parameter"] = ("True", "Bool", "Should it be passed as a parameter?", None)
self.attr_defaults["glidein_publish"] = (
"False",
"Bool",
"Should it be published by the glidein? (Used only if parameter is True.)",
None,
)
self.attr_defaults["job_publish"] = (
"False",
"Bool",
"Should the glidein publish it to the job? (Used only if parameter is True.)",
None,
)
self.attr_defaults["type"] = ["string", "string|int", "What kind on data is value.", None]
# most file attributes are generic, shared between frontend and factory
self.file_defaults = CommentedOrderedDict()
self.file_defaults["absfname"] = (None, "fname", "File name on the local disk.", None)
self.file_defaults["relfname"] = (
None,
"fname",
"Name of the file once it gets to the worker node. (defaults to the last part of absfname)",
None,
)
self.file_defaults["const"] = (
"True",
"Bool",
"Will the file be constant? If True, the file will be signed. If False, it can be modified at any time and will not be cached.",
None,
)
self.file_defaults["executable"] = (
"False",
"Bool",
"Is this an executable that needs to be run in the glidein?",
None,
)
self.file_defaults["wrapper"] = (
"False",
"Bool",
"Is this a wrapper script that needs to be sourced in the glidein job wrapper?",
None,
)
self.file_defaults["untar"] = ("False", "Bool", "Do I need to untar it? ", None)
self.file_defaults["period"] = (0, "int", 'Re-run the executable every "period" seconds if > 0.', None)
self.file_defaults["prefix"] = ("GLIDEIN_PS_", "string", "Prefix used for periodic jobs (STARTD_CRON).", None)
self.file_defaults["type"] = (
None,
"string",
'File type (regular,run,source). Allows modifiers like ":singularity" to run in singularity.',
None,
)
# TODO: consider adding "time" setup, prejob, postjob, cleanup, periodic. setup & cleanup w/ qualifier :bebg-aeag before/after entry + before/after group og na (group positioning does not apply to factory files)
# to add check scripts around jobs: self.file_defaults["job_wrap"]=("no","pre|post|no",'Run the executable before (pre) or after (post) each job.',None)
untar_defaults = CommentedOrderedDict()
untar_defaults["cond_attr"] = (
"TRUE",
"attrname",
"If not the special value TRUE, the attribute name used at runtime to determine if the file should be untarred or not.",
None,
)
untar_defaults["dir"] = (
None,
"dirname",
"Subdirectory in which to untar. (defaults to relname up to first .)",
None,
)
untar_defaults["absdir_outattr"] = (
None,
"attrname",
"Attribute to be set to the abs dir name where the tarball was unpacked. Will be defined only if untar effectively done. (Not defined if None)",
None,
)
self.file_defaults["untar_options"] = untar_defaults
self.monitor_defaults = CommentedOrderedDict()
self.monitor_defaults["javascriptRRD_dir"] = (
os.path.join(self.src_dir, "../../externals/flot"),
"base_dir",
"Location of the javascriptRRD library.",
None,
)
self.monitor_defaults["flot_dir"] = (
os.path.join(self.src_dir, "../../externals/flot"),
"base_dir",
"Location of the flot library.",
None,
)
self.monitor_defaults["jquery_dir"] = (
os.path.join(self.src_dir, "../../externals/jquery"),
"base_dir",
"Location of the jquery library.",
None,
)
return
def get_subparams_class(self):
return CommonSubParams
# return attribute value in the proper python format
def extract_attr_val(self, attr_obj):
return extract_attr_val(attr_obj)
################################################
# only allow ascii characters, the numbers and a few punctuations
# no spaces, not special characters or other punctuation
VALID_NAME_CHARS = string.ascii_letters + string.digits + "._-"
def is_valid_name(name):
"""Check if a string can be used as a valid name
Whitelist based:
only allow ascii characters, numbers and a few punctuations
no spaces, no special characters or other punctuation
Args:
name (str): name to validate
Returns:
bool: True if the name is not empty and has only valid characters, False otherwise
"""
# empty name is not valid
if name is None:
return False
if name == "":
return False
for c in name:
if not (c in VALID_NAME_CHARS):
return False
return True
############################################################
#
# P R I V A T E - Do not use
#
############################################################
def col_wrap(text, width, indent):
"""Wrap a text string to a fixed length
Args:
text (str): string to wrap
width (int): length
indent (str): indentation string
Returns:
"""
short_text, next_char = shorten_text(text, width)
if len(short_text) != len(text): # was shortened
# print short_text
org_short_text = short_text[0:]
# make sure you are not breaking words.
while not (next_char in ("", " ", "\t")):
if len(short_text) == 0:
# could not break on word boundary, leave as is
short_text = org_short_text
break
next_char = short_text[-1]
short_text = short_text[:-1]
if len(short_text) <= len(indent):
# too short, just split as it was
short_text = org_short_text
# calc next lines
subtext = col_wrap(indent + text[len(short_text) :].lstrip(" \t"), width, indent)
# glue
return short_text + "\n" + subtext
else:
return text
def shorten_text(text, width):
"""Shorten text, make sure you properly account tabs
Tabs are every 8 spaces (counted as number of chars to the next tab stop)
Args:
text (str): text to shorten
width (int): length
Returns (tuple):
shorten text (str): shortened text
next char (str): remainder
"""
count = 0
idx = 0
for c in text:
if count >= width:
return (text[:idx], c)
if c == "\t":
count = ((count + 8) // 8) * 8 # round to neares mult of 8
if count > width:
return (text[:idx], c)
idx = idx + 1
else:
count = count + 1
idx = idx + 1
return (text[:idx], "")
def defdict2string(defaults, indent, width=80):
"""Convert defualts to a string
Args:
defaults:
indent:
width:
Returns:
"""
outstrarr = []
keys = sorted(defaults.keys())
final_keys = []
# put simple elements first
for k in keys:
el = defaults[k]
if not isinstance(el, OrderedDict):
defvalue, ktype, txt, subdef = el
if subdef is None:
final_keys.append(k)
# put simple elements first
for k in keys:
el = defaults[k]
if isinstance(el, OrderedDict):
final_keys.append(k)
else:
defvalue, ktype, txt, subdef = el
if subdef is not None:
final_keys.append(k)
for k in final_keys:
el = defaults[k]
if isinstance(el, OrderedDict): # sub-dictionary
outstrarr.append(f"{indent}{k}:" + "\n" + defdict2string(el, indent + "\t", width))
else:
# print el
defvalue, ktype, txt, subdef = el
wrap_indent = indent + " " * len(f"{k}({ktype}) - ")
if subdef is not None:
if isinstance(defvalue, OrderedDict):
dict_subdef = copy.deepcopy(subdef)
dict_subdef["name"] = (None, "name", "Name", None)
outstrarr.append(
col_wrap(f"{indent}{k}({ktype}) - {txt}:", width, wrap_indent)
+ "\n"
+ defdict2string(dict_subdef, indent + "\t", width)
)
else:
outstrarr.append(
col_wrap(f"{indent}{k}({ktype}) - {txt}:", width, wrap_indent)
+ "\n"
+ defdict2string(subdef, indent + "\t", width)
)
else:
outstrarr.append(col_wrap(f"{indent}{k}({ktype}) - {txt} [{defvalue}]", width, wrap_indent))
return "\n".join(outstrarr)
|
[
"copy.deepcopy",
"os.path.abspath",
"os.unlink",
"glideinwms.lib.xmlParse.UserDict.__init__",
"os.stat",
"os.rename",
"glideinwms.lib.xmlParse.xmlfile2dict",
"glideinwms.lib.xmlParse.OrderedDict",
"os.path.join"
] |
[((8169, 8182), 'glideinwms.lib.xmlParse.OrderedDict', 'OrderedDict', ([], {}), '()\n', (8180, 8182), False, 'from glideinwms.lib.xmlParse import OrderedDict\n'), ((13167, 13193), 'os.rename', 'os.rename', (['tmp_name', 'fname'], {}), '(tmp_name, fname)\n', (13176, 13193), False, 'import os\n'), ((13849, 13889), 'glideinwms.lib.xmlParse.UserDict.__init__', 'xmlParse.UserDict.__init__', (['self', 'indict'], {}), '(self, indict)\n', (13875, 13889), False, 'from glideinwms.lib import xmlFormat, xmlParse\n'), ((8580, 8604), 'os.path.abspath', 'os.path.abspath', (['argv[1]'], {}), '(argv[1])\n', (8595, 8604), False, 'import os\n'), ((8858, 8882), 'copy.deepcopy', 'copy.deepcopy', (['self.data'], {}), '(self.data)\n', (8871, 8882), False, 'import copy\n'), ((9872, 9885), 'glideinwms.lib.xmlParse.OrderedDict', 'OrderedDict', ([], {}), '()\n', (9883, 9885), False, 'from glideinwms.lib.xmlParse import OrderedDict\n'), ((10594, 10641), 'glideinwms.lib.xmlParse.xmlfile2dict', 'xmlParse.xmlfile2dict', (['fname'], {'use_ord_dict': '(True)'}), '(fname, use_ord_dict=True)\n', (10615, 10641), False, 'from glideinwms.lib import xmlFormat, xmlParse\n'), ((12614, 12633), 'os.unlink', 'os.unlink', (['tmp_name'], {}), '(tmp_name)\n', (12623, 12633), False, 'import os\n'), ((12828, 12850), 'os.unlink', 'os.unlink', (['backup_name'], {}), '(backup_name)\n', (12837, 12850), False, 'import os\n'), ((12925, 12954), 'os.rename', 'os.rename', (['fname', 'backup_name'], {}), '(fname, backup_name)\n', (12934, 12954), False, 'import os\n'), ((18738, 18788), 'os.path.join', 'os.path.join', (['self.src_dir', '"""../../externals/flot"""'], {}), "(self.src_dir, '../../externals/flot')\n", (18750, 18788), False, 'import os\n'), ((18954, 19004), 'os.path.join', 'os.path.join', (['self.src_dir', '"""../../externals/flot"""'], {}), "(self.src_dir, '../../externals/flot')\n", (18966, 19004), False, 'import os\n'), ((19163, 19215), 'os.path.join', 'os.path.join', (['self.src_dir', '"""../../externals/jquery"""'], {}), "(self.src_dir, '../../externals/jquery')\n", (19175, 19215), False, 'import os\n'), ((4684, 4697), 'glideinwms.lib.xmlParse.OrderedDict', 'OrderedDict', ([], {}), '()\n', (4695, 4697), False, 'from glideinwms.lib.xmlParse import OrderedDict\n'), ((23597, 23618), 'copy.deepcopy', 'copy.deepcopy', (['subdef'], {}), '(subdef)\n', (23610, 23618), False, 'import copy\n'), ((5139, 5152), 'glideinwms.lib.xmlParse.OrderedDict', 'OrderedDict', ([], {}), '()\n', (5150, 5152), False, 'from glideinwms.lib.xmlParse import OrderedDict\n'), ((12222, 12236), 'os.stat', 'os.stat', (['fname'], {}), '(fname)\n', (12229, 12236), False, 'import os\n'), ((13238, 13252), 'os.stat', 'os.stat', (['fname'], {}), '(fname)\n', (13245, 13252), False, 'import os\n'), ((6041, 6064), 'copy.deepcopy', 'copy.deepcopy', (['defvalue'], {}), '(defvalue)\n', (6054, 6064), False, 'import copy\n'), ((13023, 13043), 'os.stat', 'os.stat', (['backup_name'], {}), '(backup_name)\n', (13030, 13043), False, 'import os\n')]
|
import torch
import numpy as np
from torch.utils.data import Dataset
class DomainAdaptationMoonDataset(Dataset):
r"""Domain adaptation version of the moon dataset object to iterate and collect samples.
"""
def __init__(self, data):
self.xs, self.ys, self.xt, self.yt = data
def __len__(self):
return self.xs.shape[0]
def __getitem__(self, idx):
xs = self.xs[idx]
ys = self.ys[idx]
xt = self.xt[idx]
yt = self.yt[idx]
# convert to tensors
xs = torch.from_numpy(xs.astype(np.float32))
ys = torch.from_numpy(np.array(ys).astype(np.int64))
xt = torch.from_numpy(xt.astype(np.float32))
yt = torch.from_numpy(np.array(yt).astype(np.int64))
return xs, ys, xt, yt
def create_domain_adaptation_data(config):
"""Creates a domain adaptation version of the moon datasets and dataloader"""
# load data from file
Xs_train = np.load(config.dataloader.MoonsNS.source_train_x)
Ys_train = np.argmax(np.load(config.dataloader.MoonsNS.source_train_y), axis=1)
Xt_train = np.load(config.dataloader.MoonsNS.target_train_x)
Yt_train = np.argmax(np.load(config.dataloader.MoonsNS.target_train_y), axis=1)
Xs_eval = np.load(config.dataloader.MoonsNS.source_valid_x)
Ys_eval = np.argmax(np.load(config.dataloader.MoonsNS.source_valid_y), axis=1)
Xt_eval = np.load(config.dataloader.MoonsNS.target_valid_x)
Yt_eval = np.argmax(np.load(config.dataloader.MoonsNS.target_valid_y), axis=1)
Xs_test = np.load(config.dataloader.MoonsNS.source_test_x)
Ys_test = np.argmax(np.load(config.dataloader.MoonsNS.source_test_y), axis=1)
Xt_test = np.load(config.dataloader.MoonsNS.target_test_x)
Yt_test = np.argmax(np.load(config.dataloader.MoonsNS.target_test_y), axis=1)
if config.dataloader.MoonsNS.loading_schema == 'train-eval':
train_loader = torch.utils.data.DataLoader(
DomainAdaptationMoonDataset((Xs_train, Ys_train, Xt_train, Yt_train)),
batch_size=config.trainer.batchsize,
shuffle=True
)
eval_loader = torch.utils.data.DataLoader(
DomainAdaptationMoonDataset((Xs_eval, Ys_eval, Xt_eval, Yt_eval)),
batch_size=config.trainer.batchsize,
shuffle=False
)
return (train_loader, eval_loader), (Xs_train, Xs_eval, Ys_train, Ys_eval, Xt_train, Xt_eval, Yt_train, Yt_eval)
elif config.dataloader.MoonsNS.loading_schema == 'train-test':
train_loader = torch.utils.data.DataLoader(
DomainAdaptationMoonDataset((Xs_train, Ys_train, Xt_train, Yt_train)),
batch_size=config.trainer.batchsize,
shuffle=True
)
eval_loader = torch.utils.data.DataLoader(
DomainAdaptationMoonDataset((Xs_eval, Ys_eval, Xt_eval, Yt_eval)),
batch_size=config.trainer.batchsize,
shuffle=False
)
test_loader = torch.utils.data.DataLoader(
DomainAdaptationMoonDataset((Xs_test, Ys_test, Xt_test, Yt_test)),
batch_size=config.trainer.batchsize,
shuffle=False
)
return (train_loader, eval_loader, test_loader), (Xs_train, Xs_eval, Xs_test, Ys_train, Ys_eval, Ys_test, \
Xt_train, Xt_eval, Xt_test, Yt_train, Yt_eval, Yt_test)
elif config.dataloader.MoonsNS.loading_schema == 'test':
test_loader = torch.utils.data.DataLoader(
DomainAdaptationMoonDataset((Xs_test, Ys_test, Xt_test, Yt_test)),
batch_size=config.trainer.batchsize,
shuffle=False
)
return (test_loader), (Xs_test, Ys_test, Xt_test, Yt_test)
|
[
"numpy.load",
"numpy.array"
] |
[((944, 993), 'numpy.load', 'np.load', (['config.dataloader.MoonsNS.source_train_x'], {}), '(config.dataloader.MoonsNS.source_train_x)\n', (951, 993), True, 'import numpy as np\n'), ((1093, 1142), 'numpy.load', 'np.load', (['config.dataloader.MoonsNS.target_train_x'], {}), '(config.dataloader.MoonsNS.target_train_x)\n', (1100, 1142), True, 'import numpy as np\n'), ((1242, 1291), 'numpy.load', 'np.load', (['config.dataloader.MoonsNS.source_valid_x'], {}), '(config.dataloader.MoonsNS.source_valid_x)\n', (1249, 1291), True, 'import numpy as np\n'), ((1389, 1438), 'numpy.load', 'np.load', (['config.dataloader.MoonsNS.target_valid_x'], {}), '(config.dataloader.MoonsNS.target_valid_x)\n', (1396, 1438), True, 'import numpy as np\n'), ((1537, 1585), 'numpy.load', 'np.load', (['config.dataloader.MoonsNS.source_test_x'], {}), '(config.dataloader.MoonsNS.source_test_x)\n', (1544, 1585), True, 'import numpy as np\n'), ((1682, 1730), 'numpy.load', 'np.load', (['config.dataloader.MoonsNS.target_test_x'], {}), '(config.dataloader.MoonsNS.target_test_x)\n', (1689, 1730), True, 'import numpy as np\n'), ((1019, 1068), 'numpy.load', 'np.load', (['config.dataloader.MoonsNS.source_train_y'], {}), '(config.dataloader.MoonsNS.source_train_y)\n', (1026, 1068), True, 'import numpy as np\n'), ((1168, 1217), 'numpy.load', 'np.load', (['config.dataloader.MoonsNS.target_train_y'], {}), '(config.dataloader.MoonsNS.target_train_y)\n', (1175, 1217), True, 'import numpy as np\n'), ((1316, 1365), 'numpy.load', 'np.load', (['config.dataloader.MoonsNS.source_valid_y'], {}), '(config.dataloader.MoonsNS.source_valid_y)\n', (1323, 1365), True, 'import numpy as np\n'), ((1463, 1512), 'numpy.load', 'np.load', (['config.dataloader.MoonsNS.target_valid_y'], {}), '(config.dataloader.MoonsNS.target_valid_y)\n', (1470, 1512), True, 'import numpy as np\n'), ((1610, 1658), 'numpy.load', 'np.load', (['config.dataloader.MoonsNS.source_test_y'], {}), '(config.dataloader.MoonsNS.source_test_y)\n', (1617, 1658), True, 'import numpy as np\n'), ((1755, 1803), 'numpy.load', 'np.load', (['config.dataloader.MoonsNS.target_test_y'], {}), '(config.dataloader.MoonsNS.target_test_y)\n', (1762, 1803), True, 'import numpy as np\n'), ((600, 612), 'numpy.array', 'np.array', (['ys'], {}), '(ys)\n', (608, 612), True, 'import numpy as np\n'), ((714, 726), 'numpy.array', 'np.array', (['yt'], {}), '(yt)\n', (722, 726), True, 'import numpy as np\n')]
|
"""
@created by: heyao
@created at: 2021-12-09 13:30:09
"""
import random
import os
import numpy as np
import torch
def seed_everything(seed=42):
random.seed(seed)
os.environ['PYTHONASSEED'] = str(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
|
[
"numpy.random.seed",
"torch.manual_seed",
"torch.cuda.manual_seed",
"torch.cuda.is_available",
"random.seed"
] |
[((153, 170), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (164, 170), False, 'import random\n'), ((218, 238), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (232, 238), True, 'import numpy as np\n'), ((243, 266), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (260, 266), False, 'import torch\n'), ((274, 299), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (297, 299), False, 'import torch\n'), ((309, 337), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['seed'], {}), '(seed)\n', (331, 337), False, 'import torch\n')]
|
#!/usr/bin/env python
# Copyright 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, sys
import logging
import argparse
import random
import tempfile
import time
import pdo.common.config as pconfig
import pdo.common.logger as plogger
import pdo.common.crypto as pcrypto
import pdo.common.utility as putils
from pdo.common.keys import ServiceKeys
from pdo.contract import ContractCode
from pdo.contract import ContractState
from pdo.contract import Contract
from pdo.contract import register_contract
from pdo.contract import add_enclave_to_contract
from pdo.service_client.enclave import EnclaveServiceClient
from pdo.service_client.provisioning import ProvisioningServiceClient
import toxaway.models.contract
logger = logging.getLogger(__name__)
## -----------------------------------------------------------------
def AddEnclaveSecrets(ledger_config, contract_id, client_keys, enclaveclients, provclients) :
secrets = {}
encrypted_state_encryption_keys = {}
for enclaveclient in enclaveclients:
psecrets = []
for provclient in provclients:
# Get a pspk:esecret pair from the provisioning service for each enclave
sig_payload = pcrypto.string_to_byte_array(enclaveclient.enclave_id + contract_id)
secretinfo = provclient.get_secret(enclaveclient.enclave_id,
contract_id,
client_keys.verifying_key,
client_keys.sign(sig_payload))
logger.debug("pservice secretinfo: %s", secretinfo)
# Add this pspk:esecret pair to the list
psecrets.append(secretinfo)
# Print all of the secret pairs generated for this particular enclave
logger.debug('psecrets for enclave %s : %s', enclaveclient.enclave_id, psecrets)
# Verify those secrets with the enclave
esresponse = enclaveclient.verify_secrets(contract_id, client_keys.verifying_key, psecrets)
logger.debug("verify_secrets response: %s", esresponse)
# Store the ESEK mapping in a dictionary key'd by the enclave's public key (ID)
encrypted_state_encryption_keys[enclaveclient.enclave_id] = esresponse['encrypted_state_encryption_key']
# Add this spefiic enclave to the contract
add_enclave_to_contract(ledger_config,
client_keys,
contract_id,
enclaveclient.enclave_id,
psecrets,
esresponse['encrypted_state_encryption_key'],
esresponse['signature'])
return encrypted_state_encryption_keys
## -----------------------------------------------------------------
def CreateContract(ledger_config, client_keys, enclaveclients, contract) :
# Choose one enclave at random to use to create the contract
enclaveclient = random.choice(enclaveclients)
logger.info('Requesting that the enclave initialize the contract...')
initialize_request = contract.create_initialize_request(client_keys, enclaveclient)
initialize_response = initialize_request.evaluate()
contract.set_state(initialize_response.raw_state)
logger.info('Contract state created successfully')
logger.info('Saving the initial contract state in the ledger...')
# submit the commit task: (a commit task replicates change-set and submits the corresponding transaction)
initialize_response.commit_asynchronously(ledger_config)
txn_id = initialize_response.wait_for_commit()
if txn_id is None:
raise Exception("failed to commit transaction for the initial commit")
logger.info('contract initialized; transaction id %s', txn_id)
## -----------------------------------------------------------------
## -----------------------------------------------------------------
def Create(config, client_profile, contract_name, contract_code, eservices, pservices) :
"""
client_profile -- toxaway.models.profile.Profile
contract_code -- toxaway.models.contract_code.ContractCode
eservices -- toxaway.models.eservice.EnclaveServiceList
pservices -- toxaway.models.pservice.ProvisioningServiceList
"""
ledger_config = config['Sawtooth']
contract_config = config['ContentPaths']
state_directory = contract_config['State']
client_keys = client_profile.keys
provisioning_service_keys = list(pservices.identities())
try :
pdo_code_object = contract_code.create_pdo_contract()
except Exception as e :
logger.error('failed to create the contract object; %s', str(e))
return None
try :
pdo_contract_id = register_contract(
ledger_config, client_keys, pdo_code_object, provisioning_service_keys)
logger.info('Registered contract %s with id %s', contract_code.name, pdo_contract_id)
pdo_contract_state = ContractState.create_new_state(pdo_contract_id)
contract = Contract(pdo_code_object, pdo_contract_state, pdo_contract_id, client_keys.identity)
except Exception as e :
logger.error('failed to register the contract; %s', str(e))
return None
logger.info('Contract created')
enclaveclients = []
for eservice in eservices :
enclaveclients.append(eservice.eservice_client)
provclients = []
for pservice in pservices :
provclients.append(pservice.pservice_client)
encrypted_state_encryption_keys = AddEnclaveSecrets(
ledger_config, pdo_contract_id, client_keys, enclaveclients, provclients)
for enclave_id in encrypted_state_encryption_keys :
encrypted_key = encrypted_state_encryption_keys[enclave_id]
contract.set_state_encryption_key(enclave_id, encrypted_key)
CreateContract(ledger_config, client_keys, enclaveclients, contract)
contract.contract_state.save_to_cache(data_dir = state_directory)
logger.info('state saved to cache')
with tempfile.NamedTemporaryFile() as pdo_temp :
contract.save_to_file(pdo_temp.name)
toxaway_contract = toxaway.models.contract.Contract.import_contract(config, pdo_temp, contract_name)
return toxaway_contract
|
[
"tempfile.NamedTemporaryFile",
"pdo.contract.Contract",
"random.choice",
"pdo.contract.register_contract",
"pdo.contract.add_enclave_to_contract",
"pdo.contract.ContractState.create_new_state",
"pdo.common.crypto.string_to_byte_array",
"logging.getLogger"
] |
[((1254, 1281), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1271, 1281), False, 'import logging\n'), ((3510, 3539), 'random.choice', 'random.choice', (['enclaveclients'], {}), '(enclaveclients)\n', (3523, 3539), False, 'import random\n'), ((2872, 3052), 'pdo.contract.add_enclave_to_contract', 'add_enclave_to_contract', (['ledger_config', 'client_keys', 'contract_id', 'enclaveclient.enclave_id', 'psecrets', "esresponse['encrypted_state_encryption_key']", "esresponse['signature']"], {}), "(ledger_config, client_keys, contract_id,\n enclaveclient.enclave_id, psecrets, esresponse[\n 'encrypted_state_encryption_key'], esresponse['signature'])\n", (2895, 3052), False, 'from pdo.contract import add_enclave_to_contract\n'), ((5281, 5374), 'pdo.contract.register_contract', 'register_contract', (['ledger_config', 'client_keys', 'pdo_code_object', 'provisioning_service_keys'], {}), '(ledger_config, client_keys, pdo_code_object,\n provisioning_service_keys)\n', (5298, 5374), False, 'from pdo.contract import register_contract\n'), ((5508, 5555), 'pdo.contract.ContractState.create_new_state', 'ContractState.create_new_state', (['pdo_contract_id'], {}), '(pdo_contract_id)\n', (5538, 5555), False, 'from pdo.contract import ContractState\n'), ((5575, 5664), 'pdo.contract.Contract', 'Contract', (['pdo_code_object', 'pdo_contract_state', 'pdo_contract_id', 'client_keys.identity'], {}), '(pdo_code_object, pdo_contract_state, pdo_contract_id, client_keys.\n identity)\n', (5583, 5664), False, 'from pdo.contract import Contract\n'), ((6562, 6591), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (6589, 6591), False, 'import tempfile\n'), ((1717, 1785), 'pdo.common.crypto.string_to_byte_array', 'pcrypto.string_to_byte_array', (['(enclaveclient.enclave_id + contract_id)'], {}), '(enclaveclient.enclave_id + contract_id)\n', (1745, 1785), True, 'import pdo.common.crypto as pcrypto\n')]
|
from taurex.log import Logger
import numpy as np
class Output(Logger):
def __init__(self, name):
super().__init__(name)
def open(self):
raise NotImplementedError
def create_group(self, group_name):
raise NotImplementedError
def close(self):
raise NotImplementedError
def __enter__(self):
self.open()
return self
def __exit__(self, type, value, tb):
self.close()
def store_dictionary(self, dictionary, group_name=None):
from taurex.util.util import recursively_save_dict_contents_to_output
out = self
if group_name is not None:
out = self.create_group(group_name)
recursively_save_dict_contents_to_output(out, dictionary)
class OutputGroup(Output):
def __init__(self, name):
super().__init__(name)
self._name = name
def write_array(self, array_name, array, metadata=None):
raise NotImplementedError
def write_list(self, list_name, list_array, metadata=None):
arr = np.array(list_array)
self.write_array(list_name, arr)
def write_scalar(self, scalar_name, scalar, metadata=None):
raise NotImplementedError
def write_string(self, string_name, string, metadata=None):
raise NotImplementedError
def write_string_array(self, string_name, string_array, metadata=None):
raise NotImplementedError
|
[
"taurex.util.util.recursively_save_dict_contents_to_output",
"numpy.array"
] |
[((702, 759), 'taurex.util.util.recursively_save_dict_contents_to_output', 'recursively_save_dict_contents_to_output', (['out', 'dictionary'], {}), '(out, dictionary)\n', (742, 759), False, 'from taurex.util.util import recursively_save_dict_contents_to_output\n'), ((1052, 1072), 'numpy.array', 'np.array', (['list_array'], {}), '(list_array)\n', (1060, 1072), True, 'import numpy as np\n')]
|
# $Id: TestMetadataMerging.py 1047 2009-01-15 14:48:58Z bhavana $
#
# Unit testing for WebBrick library functions (Functions.py)
# See http://pyunit.sourceforge.net/pyunit.html
#
import sys, unittest, logging, re, StringIO, os, cgi, rdflib
from rdflib import URIRef, Namespace, RDF
from os.path import normpath
sys.path.append("..")
sys.path.append("../cgi-bin")
from MiscLib import TestUtils
import SubmitDatasetUtils, ManifestRDFUtils, TestConfig
Logger = logging.getLogger("TestMetadataMerging")
ExpectedDictionary = {
"creator" : "admiral"
, "identifier" : "SubmissionToolTest"
, "title" : "Submission tool test title"
, "description" : "Submission tool test description"
}
class TestMetadataMerging(unittest.TestCase):
def setUp(self):
return
def tearDown(self):
return
# Tests
def testReadMetadata(self):
rdfGraphBeforeSerialisation = ManifestRDFUtils.writeToManifestFile(TestConfig.ManifestFilePath,TestConfig.NamespaceDictionary, TestConfig.ElementUriList, TestConfig.ElementValueList)
rdfGraphAfterSerialisation = ManifestRDFUtils.readManifestFile(TestConfig.ManifestFilePath)
# Compare the serialised graph obtained with the graph before serialisation
self.assertEqual(len(rdfGraphBeforeSerialisation),5,'Graph length %i' %len(rdfGraphAfterSerialisation))
subject = rdfGraphAfterSerialisation.value(None,RDF.type,URIRef(ManifestRDFUtils.oxds+"Grouping"))
self.failUnless((subject,RDF.type,URIRef(TestConfig.oxds+"Grouping")) in rdfGraphAfterSerialisation, 'Testing submission type: '+subject+", "+ URIRef(TestConfig.oxds+"Grouping"))
self.failUnless((subject,TestConfig.ElementCreatorUri,TestConfig.User) in rdfGraphAfterSerialisation, 'dcterms:creator')
self.failUnless((subject,TestConfig.ElementIdentifierUri,TestConfig.DatasetId) in rdfGraphAfterSerialisation, 'ManifestRDFUtils.dcterms:identifier')
self.failUnless((subject,TestConfig.ElementTitleUri,TestConfig.Title) in rdfGraphAfterSerialisation, 'dcterms:title')
self.failUnless((subject,TestConfig.ElementDescriptionUri,TestConfig.Description) in rdfGraphAfterSerialisation, 'dcterms:TestConfig.Description')
return
def testUpdateMetadata(self):
updatedTitle = "Submission tool updated test title"
updatedDescription = "Submission tool updated test description"
initialGraph = ManifestRDFUtils.writeToManifestFile(TestConfig.ManifestFilePath, TestConfig.NamespaceDictionary,TestConfig.ElementUriList, TestConfig.ElementValueList)
updatedGraph = ManifestRDFUtils.updateManifestFile(TestConfig.ManifestFilePath, [TestConfig.ElementTitleUri,TestConfig.ElementDescriptionUri], [updatedTitle, updatedDescription])
readGraph = ManifestRDFUtils.readManifestFile(TestConfig.ManifestFilePath)
# Assert that (initialGraph != updatedGraph)
self.assertEqual(False, ManifestRDFUtils.compareRDFGraphs(initialGraph, updatedGraph,TestConfig.ElementUriList),"Error updating the manifest file!")
# Assert that (updatedGraph == readGraph)
self.assertEqual(True, ManifestRDFUtils.compareRDFGraphs(updatedGraph, readGraph,TestConfig.ElementUriList),"Error updating the manifest file!")
return
def testGetElementValuesFromManifest(self):
rdfGraph = ManifestRDFUtils.writeToManifestFile(TestConfig.ManifestFilePath, TestConfig.NamespaceDictionary, TestConfig.ElementUriList, TestConfig.ElementValueList)
fields = ManifestRDFUtils.getElementValuesFromManifest(rdfGraph, TestConfig.ElementUriList)
self.assertEquals(fields,TestConfig.ElementValueList,"Problem reading submit dataset utility Fields!")
return
def testGetDictionaryFromManifest(self):
rdfGraph = ManifestRDFUtils.writeToManifestFile(TestConfig.ManifestFilePath, TestConfig.NamespaceDictionary, TestConfig.ElementUriList, TestConfig.ElementValueList)
actualDictionary = ManifestRDFUtils.getDictionaryFromManifest(TestConfig.ManifestFilePath, TestConfig.ElementUriList)
Logger.debug(repr(actualDictionary))
#print "ExpectedDictionary: "+repr(ExpectedDictionary)
#print "actualDictionary: "+repr(actualDictionary)
self.assertEqual(ExpectedDictionary,actualDictionary, "Error fetching dictionary from the metadata!")
return
def getTestSuite(select="unit"):
"""
Get test suite
select is one of the following:
"unit" return suite of unit tests only
"component" return suite of unit and component tests
"all" return suite of unit, component and integration tests
"pending" return suite of pending tests
name a single named test to be run
"""
testdict = {
"unit":
[
"testReadMetadata",
"testUpdateMetadata",
"testGetElementValuesFromManifest",
"testGetDictionaryFromManifest"
],
"component":
[ #"testComponents"
],
"integration":
[ #"testIntegration"
],
"pending":
[ #"testPending"
]
}
return TestUtils.getTestSuite(TestMetadataMerging, testdict, select=select)
if __name__ == "__main__":
TestConfig.setDatasetsBaseDir(".")
TestUtils.runTests("TestMetadataMerging.log", getTestSuite, sys.argv)
|
[
"sys.path.append",
"MiscLib.TestUtils.runTests",
"TestConfig.setDatasetsBaseDir",
"ManifestRDFUtils.compareRDFGraphs",
"ManifestRDFUtils.getDictionaryFromManifest",
"rdflib.URIRef",
"ManifestRDFUtils.updateManifestFile",
"ManifestRDFUtils.writeToManifestFile",
"ManifestRDFUtils.getElementValuesFromManifest",
"ManifestRDFUtils.readManifestFile",
"MiscLib.TestUtils.getTestSuite",
"logging.getLogger"
] |
[((312, 333), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (327, 333), False, 'import sys, unittest, logging, re, StringIO, os, cgi, rdflib\n'), ((334, 363), 'sys.path.append', 'sys.path.append', (['"""../cgi-bin"""'], {}), "('../cgi-bin')\n", (349, 363), False, 'import sys, unittest, logging, re, StringIO, os, cgi, rdflib\n'), ((481, 521), 'logging.getLogger', 'logging.getLogger', (['"""TestMetadataMerging"""'], {}), "('TestMetadataMerging')\n", (498, 521), False, 'import sys, unittest, logging, re, StringIO, os, cgi, rdflib\n'), ((5560, 5628), 'MiscLib.TestUtils.getTestSuite', 'TestUtils.getTestSuite', (['TestMetadataMerging', 'testdict'], {'select': 'select'}), '(TestMetadataMerging, testdict, select=select)\n', (5582, 5628), False, 'from MiscLib import TestUtils\n'), ((5662, 5696), 'TestConfig.setDatasetsBaseDir', 'TestConfig.setDatasetsBaseDir', (['"""."""'], {}), "('.')\n", (5691, 5696), False, 'import SubmitDatasetUtils, ManifestRDFUtils, TestConfig\n'), ((5701, 5770), 'MiscLib.TestUtils.runTests', 'TestUtils.runTests', (['"""TestMetadataMerging.log"""', 'getTestSuite', 'sys.argv'], {}), "('TestMetadataMerging.log', getTestSuite, sys.argv)\n", (5719, 5770), False, 'from MiscLib import TestUtils\n'), ((1141, 1303), 'ManifestRDFUtils.writeToManifestFile', 'ManifestRDFUtils.writeToManifestFile', (['TestConfig.ManifestFilePath', 'TestConfig.NamespaceDictionary', 'TestConfig.ElementUriList', 'TestConfig.ElementValueList'], {}), '(TestConfig.ManifestFilePath,\n TestConfig.NamespaceDictionary, TestConfig.ElementUriList, TestConfig.\n ElementValueList)\n', (1177, 1303), False, 'import SubmitDatasetUtils, ManifestRDFUtils, TestConfig\n'), ((1339, 1401), 'ManifestRDFUtils.readManifestFile', 'ManifestRDFUtils.readManifestFile', (['TestConfig.ManifestFilePath'], {}), '(TestConfig.ManifestFilePath)\n', (1372, 1401), False, 'import SubmitDatasetUtils, ManifestRDFUtils, TestConfig\n'), ((2701, 2863), 'ManifestRDFUtils.writeToManifestFile', 'ManifestRDFUtils.writeToManifestFile', (['TestConfig.ManifestFilePath', 'TestConfig.NamespaceDictionary', 'TestConfig.ElementUriList', 'TestConfig.ElementValueList'], {}), '(TestConfig.ManifestFilePath,\n TestConfig.NamespaceDictionary, TestConfig.ElementUriList, TestConfig.\n ElementValueList)\n', (2737, 2863), False, 'import SubmitDatasetUtils, ManifestRDFUtils, TestConfig\n'), ((2877, 3051), 'ManifestRDFUtils.updateManifestFile', 'ManifestRDFUtils.updateManifestFile', (['TestConfig.ManifestFilePath', '[TestConfig.ElementTitleUri, TestConfig.ElementDescriptionUri]', '[updatedTitle, updatedDescription]'], {}), '(TestConfig.ManifestFilePath, [\n TestConfig.ElementTitleUri, TestConfig.ElementDescriptionUri], [\n updatedTitle, updatedDescription])\n', (2912, 3051), False, 'import SubmitDatasetUtils, ManifestRDFUtils, TestConfig\n'), ((3071, 3133), 'ManifestRDFUtils.readManifestFile', 'ManifestRDFUtils.readManifestFile', (['TestConfig.ManifestFilePath'], {}), '(TestConfig.ManifestFilePath)\n', (3104, 3133), False, 'import SubmitDatasetUtils, ManifestRDFUtils, TestConfig\n'), ((3658, 3820), 'ManifestRDFUtils.writeToManifestFile', 'ManifestRDFUtils.writeToManifestFile', (['TestConfig.ManifestFilePath', 'TestConfig.NamespaceDictionary', 'TestConfig.ElementUriList', 'TestConfig.ElementValueList'], {}), '(TestConfig.ManifestFilePath,\n TestConfig.NamespaceDictionary, TestConfig.ElementUriList, TestConfig.\n ElementValueList)\n', (3694, 3820), False, 'import SubmitDatasetUtils, ManifestRDFUtils, TestConfig\n'), ((3837, 3924), 'ManifestRDFUtils.getElementValuesFromManifest', 'ManifestRDFUtils.getElementValuesFromManifest', (['rdfGraph', 'TestConfig.ElementUriList'], {}), '(rdfGraph, TestConfig.\n ElementUriList)\n', (3882, 3924), False, 'import SubmitDatasetUtils, ManifestRDFUtils, TestConfig\n'), ((4115, 4277), 'ManifestRDFUtils.writeToManifestFile', 'ManifestRDFUtils.writeToManifestFile', (['TestConfig.ManifestFilePath', 'TestConfig.NamespaceDictionary', 'TestConfig.ElementUriList', 'TestConfig.ElementValueList'], {}), '(TestConfig.ManifestFilePath,\n TestConfig.NamespaceDictionary, TestConfig.ElementUriList, TestConfig.\n ElementValueList)\n', (4151, 4277), False, 'import SubmitDatasetUtils, ManifestRDFUtils, TestConfig\n'), ((4296, 4398), 'ManifestRDFUtils.getDictionaryFromManifest', 'ManifestRDFUtils.getDictionaryFromManifest', (['TestConfig.ManifestFilePath', 'TestConfig.ElementUriList'], {}), '(TestConfig.ManifestFilePath,\n TestConfig.ElementUriList)\n', (4338, 4398), False, 'import SubmitDatasetUtils, ManifestRDFUtils, TestConfig\n'), ((1678, 1720), 'rdflib.URIRef', 'URIRef', (["(ManifestRDFUtils.oxds + 'Grouping')"], {}), "(ManifestRDFUtils.oxds + 'Grouping')\n", (1684, 1720), False, 'from rdflib import URIRef, Namespace, RDF\n'), ((3230, 3323), 'ManifestRDFUtils.compareRDFGraphs', 'ManifestRDFUtils.compareRDFGraphs', (['initialGraph', 'updatedGraph', 'TestConfig.ElementUriList'], {}), '(initialGraph, updatedGraph, TestConfig.\n ElementUriList)\n', (3263, 3323), False, 'import SubmitDatasetUtils, ManifestRDFUtils, TestConfig\n'), ((3445, 3535), 'ManifestRDFUtils.compareRDFGraphs', 'ManifestRDFUtils.compareRDFGraphs', (['updatedGraph', 'readGraph', 'TestConfig.ElementUriList'], {}), '(updatedGraph, readGraph, TestConfig.\n ElementUriList)\n', (3478, 3535), False, 'import SubmitDatasetUtils, ManifestRDFUtils, TestConfig\n'), ((1871, 1907), 'rdflib.URIRef', 'URIRef', (["(TestConfig.oxds + 'Grouping')"], {}), "(TestConfig.oxds + 'Grouping')\n", (1877, 1907), False, 'from rdflib import URIRef, Namespace, RDF\n'), ((1762, 1798), 'rdflib.URIRef', 'URIRef', (["(TestConfig.oxds + 'Grouping')"], {}), "(TestConfig.oxds + 'Grouping')\n", (1768, 1798), False, 'from rdflib import URIRef, Namespace, RDF\n')]
|
# Generated by Django 3.2 on 2021-04-25 13:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("jobs", "0002_auto_20210425_1615"),
]
operations = [
migrations.AddField(
model_name="listing",
name="currency",
field=models.CharField(default="$", max_length=5),
),
]
|
[
"django.db.models.CharField"
] |
[((347, 390), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""$"""', 'max_length': '(5)'}), "(default='$', max_length=5)\n", (363, 390), False, 'from django.db import migrations, models\n')]
|
from rlcard.utils.utils import init_standard_deck
class LimitholdemDealer(object):
def __init__(self, np_random):
''' Initialize a limitholdem dealer class
'''
self.np_random = np_random
self.deck = init_standard_deck()
self.shuffle()
self.pot = 0
def shuffle(self):
''' Shuffle the deck
'''
self.np_random.shuffle(self.deck)
def deal_card(self):
''' Deal one card from the deck
Returns:
(Card): The drawn card from the deck
'''
return self.deck.pop()
|
[
"rlcard.utils.utils.init_standard_deck"
] |
[((237, 257), 'rlcard.utils.utils.init_standard_deck', 'init_standard_deck', ([], {}), '()\n', (255, 257), False, 'from rlcard.utils.utils import init_standard_deck\n')]
|
"""
This library contains metrics to quantify the shape of a waveform
1. threshold_amplitude - only look at a metric while oscillatory amplitude is above a set percentile threshold
2. rdratio - Ratio of rise time and decay time
3. pt_duration - Peak and trough durations and their ratio
3. symPT - symmetry between peak and trough
4. symRD - symmetry between rise and decay
5. pt_sharp - calculate sharpness of oscillatory extrema
6. rd_steep - calculate rise and decay steepness
7. ptsr - calculate extrema sharpness ratio
8. rdsr - calculate rise-decay steepness ratio
9. average_waveform_trigger - calculate the average waveform of an oscillation by triggering on peak or trough
10. gips_swm - identify a repeated waveform in the signal
11. rd_diff - normalized difference between rise and decay time
"""
from __future__ import division
import numpy as np
from analysis_helpers.misshapen.nonshape import ampT, bandpass_default, findpt
def threshold_amplitude(x, metric, samples, percentile, frange, Fs, filter_fn=None, filter_kwargs=None):
"""
Exclude from analysis the samples in which the amplitude falls below a defined percentile
Parameters
----------
x : numpy array
raw time series
metric : numpy array
series of measures corresponding to time samples in 'samples' (e.g. peak sharpness)
samples : numpy array
time samples at which metric was computer (e.g. peaks)
percentile : float
percentile cutoff for exclusion (e.g. 10 = bottom 10% excluded)
frange : [lo, hi]
frequency range of interest for calculating amplitude
Fs : float
Sampling rate (Hz)
Returns
-------
metric_new : numpy array
same as input 'metric' but only for samples above the amplitude threshold
samples_new : numpy array
samples above the amplitude threshold
"""
# Do nothing if threshold is 0
if percentile == 0:
return metric, samples
# Default filter function
if filter_fn is None:
filter_fn = bandpass_default
if filter_kwargs is None:
filter_kwargs = {}
# Calculate amplitude time series and threshold
amp = ampT(x, frange, Fs, rmv_edge = False, filter_fn=filter_fn, filter_kwargs=filter_kwargs)
amp = amp[samples]
amp_threshold = np.percentile(amp, percentile)
# Update samples used
samples_new = samples[amp>=amp_threshold]
metric_new = metric[amp>=amp_threshold]
return metric_new, samples_new
def rdratio(Ps, Ts):
"""
Calculate the ratio between rise time and decay time for oscillations
Note: must have the same number of peaks and troughs
Note: the final rise or decay is unused
Parameters
----------
Ps : numpy arrays 1d
time points of oscillatory peaks
Ts : numpy arrays 1d
time points of osillatory troughs
Returns
-------
rdr : array-like 1d
rise-decay ratios for each oscillation
"""
# Assure input has the same number of peaks and troughs
if len(Ts) != len(Ps):
raise ValueError('Length of peaks and troughs arrays must be equal')
# Assure Ps and Ts are numpy arrays
if type(Ps)==list or type(Ts)==list:
print('Converted Ps and Ts to numpy arrays')
Ps = np.array(Ps)
Ts = np.array(Ts)
# Calculate rise and decay times
if Ts[0] < Ps[0]:
riset = Ps[:-1] - Ts[:-1]
decayt = Ts[1:] - Ps[:-1]
else:
riset = Ps[1:] - Ts[:-1]
decayt = Ts[:-1] - Ps[:-1]
# Calculate ratio between each rise and decay time
rdr = riset / decayt.astype(float)
return riset, decayt, rdr
def pt_duration(Ps, Ts, zeroxR, zeroxD):
"""
Calculate the ratio between peak and trough durations
NOTE: must have the same number of peaks and troughs
NOTE: the durations of the first and last extrema will be estimated by using the only zerox they have
Parameters
----------
Ps : numpy arrays 1d
time points of oscillatory peaks
Ts : numpy arrays 1d
time points of osillatory troughs
zeroxR : array-like 1d
indices at which oscillatory rising zerocrossings occur
zeroxD : array-like 1d
indices at which oscillatory decaying zerocrossings occur
Returns
-------
Ps_dur : array-like 1d
peak-trough duration ratios for each oscillation
Ts_dur : array-like 1d
peak-trough duration ratios for each oscillation
ptr : array-like 1d
peak-trough duration ratios for each oscillation
"""
# Assure input has the same number of peaks and troughs
if len(Ts) != len(Ps):
raise ValueError('Length of peaks and troughs arrays must be equal')
# Assure Ps and Ts are numpy arrays
if type(Ps)==list or type(Ts)==list:
print('Converted Ps and Ts to numpy arrays')
Ps = np.array(Ps)
Ts = np.array(Ts)
# Calculate the duration of each peak and trough until last
Ps_dur = np.zeros(len(Ps))
Ts_dur = np.zeros(len(Ts))
if Ps[0] < Ts[0]:
# treat first extrema differently
Ps_dur[0] = 2*(zeroxD[0] - Ps[0])
# duration of each peak
for i in range(1, len(Ps)-1):
Ps_dur[i] = (zeroxD[i] - zeroxR[i-1])
# duration of each trough
for i in range(len(Ts)-1):
Ts_dur[i] = (zeroxR[i] - zeroxD[i])
else:
Ts_dur[0] = 2*(zeroxR[0] - Ts[0])
for i in range(len(Ps)-1):
Ps_dur[i] = (zeroxD[i] - zeroxR[i])
# duration of each trough
for i in range(1, len(Ts)-1):
Ts_dur[i] = (zeroxR[i] - zeroxD[i-1])
# Treat last extrema differently
if Ps[-1] < Ts[-1]:
Ps_dur[-1] = (zeroxD[-1] - zeroxR[-1])
Ts_dur[-1] = 2*(Ts[-1] - zeroxD[-1])
else:
Ps_dur[-1] = 2*(Ps[-1] - zeroxR[-1])
Ts_dur[-1] = (zeroxR[-1] - zeroxD[-1])
ptr = Ps_dur/Ts_dur
return Ps_dur, Ts_dur, ptr
def symPT(x, Ps, Ts, window_half):
"""
Measure of asymmetry between oscillatory peaks and troughs
Parameters
----------
x : array-like 1d
voltage time series
Ps : array-like 1d
time points of oscillatory peaks
Ts : array-like 1d
time points of oscillatory troughs
window_half : int
Number of samples around extrema to analyze, in EACH DIRECTION
Returns
-------
sym : array-like 1d
measure of symmetry between each trough-peak pair
Result of 0 means the peak and trough are perfectly symmetric
Notes
-----
Opt 2: Roemer; The metric should be between 0 and 1
Inner product of Peak and Trough divided by the squareroot of the product of SSQ_peak and SSQ_trough
I'll need to fine tune this to make it more complicated and less susceptible to noise
"""
# Assure input has the same number of peaks and troughs
if len(Ts) != len(Ps):
raise ValueError('Length of peaks and troughs arrays must be equal')
E = len(Ps)
sym = np.zeros(E)
for e in range(E):
# Find region around each peak and trough. Make extrema be 0
peak = x[Ps[e]-window_half:Ps[e]+window_half+1] - x[Ps[e]]
peak = -peak
trough = x[Ts[e]-window_half:Ts[e]+window_half+1] - x[Ts[e]]
# Compare the two measures
peakenergy = np.sum(peak**2)
troughenergy = np.sum(trough**2)
energy = np.max((peakenergy,troughenergy))
diffenergy = np.sum((peak-trough)**2)
sym[e] = diffenergy / energy
return sym
def symRD(x, Ts, window_full):
"""
Measure of asymmetry between oscillatory peaks and troughs
Parameters
----------
x : array-like 1d
voltage time series
Ts : array-like 1d
time points of oscillatory troughs
window_full : int
Number of samples after peak to analyze for decay and before peak to analyze for rise
Returns
-------
sym : array-like 1d
measure of symmetry between each rise and decay
"""
T = len(Ts)
sym = np.zeros(T)
for t in range(T):
# Find regions for the rise and the decay
rise = x[Ts[t]:Ts[t]+window_full+1] - x[Ts[t]]
decay = x[Ts[t]-window_full:Ts[t]+1] - x[Ts[t]]
# Ensure the minimum value is 0
rise[rise<0] = 0
decay[decay<0] = 0
# Make rises and decays go the same direction
rise = np.flipud(rise)
# Calculate absolute difference between each point in the rise and decay
diffenergy = np.sum(np.abs(rise-decay))
# Normalize this difference by the max voltage value at each point
rise_decay_maxes = np.max(np.vstack((rise,decay)),axis=0)
energy = np.sum(rise_decay_maxes)
# Compare the two measures
sym[t] = diffenergy / energy
return sym
def pt_sharp(x, Ps, Ts, window_half, method='diff'):
"""
Calculate the sharpness of extrema
Parameters
----------
x : array-like 1d
voltage time series
Ps : array-like 1d
time points of oscillatory peaks
Ts : array-like 1d
time points of oscillatory troughs
window_half : int
Number of samples in each direction around extrema to use for sharpness estimation
Returns
-------
Psharps : array-like 1d
sharpness of peaks
Tsharps : array-like 1d
sharpness of troughs
"""
# Assure input has the same number of peaks and troughs
if len(Ts) != len(Ps):
raise ValueError('Length of peaks and troughs arrays must be equal')
# Calculate the sharpness of each peak
P = len(Ps)
Psharps = np.zeros(P)
for e in range(P):
if method == 'deriv':
Edata = x[Ps[e]-window_half: Ps[e]+window_half+1]
Psharps[e] = np.mean(np.abs(np.diff(Edata)))
elif method == 'diff':
Psharps[e] = np.mean((x[Ps[e]]-x[Ps[e]-window_half],x[Ps[e]]-x[Ps[e]+window_half]))
T = len(Ts)
Tsharps = np.zeros(T)
for e in range(T):
if method == 'deriv':
Edata = x[Ts[e]-window_half: Ts[e]+window_half+1]
Tsharps[e] = np.mean(np.abs(np.diff(Edata)))
elif method == 'diff':
Tsharps[e] = np.mean((x[Ts[e]-window_half]-x[Ts[e]],x[Ts[e]+window_half]-x[Ts[e]]))
return Psharps, Tsharps
def rd_steep(x, Ps, Ts):
"""
Calculate the max steepness of rises and decays
Parameters
----------
x : array-like 1d
voltage time series
Ps : array-like 1d
time points of oscillatory peaks
Ts : array-like 1d
time points of oscillatory troughs
Returns
-------
risesteep : array-like 1d
max steepness in each period for rise
decaysteep : array-like 1d
max steepness in each period for decay
"""
# Assure input has the same number of peaks and troughs
if len(Ts) != len(Ps):
raise ValueError('Length of peaks and troughs arrays must be equal')
# Calculate rise and decay steepness
E = len(Ps) - 1
risesteep = np.zeros(E)
for t in range(E):
if Ts[0] < Ps[0]:
rise = x[Ts[t]:Ps[t]+1]
else:
rise = x[Ts[t]:Ps[t+1]+1]
risesteep[t] = np.max(np.diff(rise))
decaysteep = np.zeros(E)
for p in range(E):
if Ts[0] < Ps[0]:
decay = x[Ps[p]:Ts[p+1]+1]
else:
decay = x[Ps[p]:Ts[p]+1]
decaysteep[p] = -np.min(np.diff(decay))
return risesteep, decaysteep
def ptsr(Psharp,Tsharp, log = True, polarity = True):
if polarity:
sharpnessratio = Psharp/Tsharp
else:
sharpnessratio = np.max((Psharp/Tsharp,Tsharp/Psharp))
if log:
sharpnessratio = np.log10(sharpnessratio)
return sharpnessratio
def rdsr(Rsteep,Dsteep, log = True, polarity = True):
if polarity:
steepnessratio = Rsteep/Dsteep
else:
steepnessratio = np.max((Rsteep/Dsteep,Dsteep/Rsteep))
if log:
steepnessratio = np.log10(steepnessratio)
return steepnessratio
def average_waveform_trigger(x, f_range, Fs, avgwave_halflen, trigger = 'trough'):
"""
Calculate the average waveform of a signal by triggering on the peaks or troughs
Parameters
----------
x : array-like 1d
voltage time series
f_range : (low, high), Hz
frequency range for narrowband signal of interest
Fs : float
The sampling rate
avgwave_halflen : float
length of time for the averaged signal to be recorded in the positive and negative direction
trigger : str
'trough' to trigger the averaging on each trough
'peak' to trigger the averaging on each peak
Returns
-------
avg_wave : array-like 1d
the average waveform in 'x' in the frequency 'f_range' triggered on 'trigger'
"""
# Set up the parameters for averaging
dt = 1/float(Fs)
t_avg_wave = np.arange(-avgwave_halflen,avgwave_halflen+dt, dt)
N_samples_halflen = int(avgwave_halflen*Fs)
# Find the trigger points for averaging
Ps, Ts = findpt(x, f_range, Fs, boundary = N_samples_halflen+1)
if trigger == 'trough':
trig_samps = Ts
elif trigger == 'peak':
trig_samps = Ps
else:
raise ValueError('Trigger not implemented')
# Do the averaging at each trigger
avg_wave = np.zeros(int(N_samples_halflen*2+1))
N_triggers = len(trig_samps)
for i in range(N_triggers):
avg_wave += x[trig_samps[i]-N_samples_halflen:trig_samps[i]+N_samples_halflen+1]
avg_wave = avg_wave/N_triggers
return t_avg_wave, avg_wave
def gips_swm(x, Fs, L, G,
max_iterations = 100, T = 1, window_starts_custom = None):
"""
Sliding window matching methods to find recurring patterns in a time series
using the method by <NAME> in J Neuro Methods 2017.
See matlab code at: https://github.com/bartgips/SWM
Calculate the average waveform of a signal by triggering on the peaks or troughs
Note should high-pass if looking at high frequency activity so that it does not converge on a low frequency motif
L and G should be chosen to be about the size of the motif of interest, and the N derived should be about the number of occurrences
Parameters
----------
x : array-like 1d
voltage time series
Fs : float
The sampling rate (samples per second)
L : float
Window length (seconds)
G : float
Minimum window spacing (seconds)
T : float
temperature parameter. Controls acceptance probability
max_iterations : int
Maximum number of iterations for the pattern finder
window_starts_custom : np.ndarray (1d)
Pre-set locations of initial windows (instead of evenly spaced by 2G)
Returns
-------
avg_wave : np.ndarray (1d)
the average waveform in 'x' in the frequency 'f_range' triggered on 'trigger'
window_starts : np.ndarray (1d)
indices at which each window begins for the final set of windows
J : np.ndarray (1d)
History of costs
"""
# Initialize window positions, separated by 2*G
L_samp = int(L*Fs)
G_samp = int(G*Fs)
if window_starts_custom is None:
window_starts = np.arange(0,len(x)-L_samp,2*G_samp)
else:
window_starts = window_starts_custom
# Calculate the total number of windows
N_windows = len(window_starts)
# Calculate initial cost
J = np.zeros(max_iterations)
J[0] = _gips_compute_J(x, window_starts, L_samp)
# Randomly sample windows with replacement
random_window_idx = np.random.choice(range(N_windows),size=max_iterations)
# Optimize X
iter_num = 1
while iter_num < max_iterations:
print(iter_num)
# Pick a random window position
window_idx_replace = random_window_idx[iter_num]
# Find a new allowed position for the window
# OH. CHANGE IT IN THE WINDOW ARRAY. at the end have all windows
window_starts_temp = np.copy(window_starts)
window_starts_temp[window_idx_replace] = _gips_find_new_windowidx(window_starts, G_samp, L_samp, len(x)-L_samp)
# Calculate the cost
J_temp = _gips_compute_J(x, window_starts_temp, L_samp)
# Calculate the change in cost function
deltaJ = J_temp - J[iter_num-1]
# Calculate the acceptance probability
p_accept = np.exp(-deltaJ/float(T))
# Accept update to J with a certain probability
if np.random.rand() < p_accept:
# Update J
J[iter_num] = J_temp
# Update X
window_starts = window_starts_temp
else:
# Update J
J[iter_num] = J[iter_num-1]
# Update iteration number
iter_num += 1
# Calculate average wave
avg_wave = np.zeros(L_samp)
for w in range(N_windows):
avg_wave = avg_wave + x[window_starts[w]:window_starts[w]+L_samp]
avg_wave = avg_wave/float(N_windows)
return avg_wave, window_starts, J
def _gips_compute_J(x, window_starts, L_samp):
"""Compute the cost, which is the average distance between all windows"""
# Get all windows and zscore them
N_windows = len(window_starts)
windows = np.zeros((N_windows,L_samp))
for w in range(N_windows):
temp = x[window_starts[w]:window_starts[w]+L_samp]
windows[w] = (temp - np.mean(temp))/np.std(temp)
# Calculate distances for all pairs of windows
d = []
for i in range(N_windows):
for j in range(i+1,N_windows):
window_diff = windows[i]-windows[j]
d_temp = 1/float(L_samp) * np.sum(window_diff**2)
d.append(d_temp)
# Calculate cost
J = 1/float(2*(N_windows-1))*np.sum(d)
return J
def _gips_find_new_windowidx(window_starts, G_samp, L_samp, N_samp):
"""Find a new sample for the starting window"""
found = False
while found == False:
# Generate a random sample
new_samp = np.random.randint(N_samp)
# Check how close the sample is to other window starts
dists = np.abs(window_starts - new_samp)
if np.min(dists) > G_samp:
return new_samp
def rd_diff(Ps, Ts):
"""
Calculate the normalized difference between rise and decay times,
as Gips, 2017 refers to as the "skewnwss index"
SI = (T_up-T_down)/(T_up+T_down)
Parameters
----------
Ps : numpy arrays 1d
time points of oscillatory peaks
Ts : numpy arrays 1d
time points of osillatory troughs
Returns
-------
rdr : array-like 1d
rise-decay ratios for each oscillation
"""
# Assure input has the same number of peaks and troughs
if len(Ts) != len(Ps):
raise ValueError('Length of peaks and troughs arrays must be equal')
# Assure Ps and Ts are numpy arrays
if type(Ps)==list or type(Ts)==list:
print('Converted Ps and Ts to numpy arrays')
Ps = np.array(Ps)
Ts = np.array(Ts)
# Calculate rise and decay times
if Ts[0] < Ps[0]:
riset = Ps[:-1] - Ts[:-1]
decayt = Ts[1:] - Ps[:-1]
else:
riset = Ps[1:] - Ts[:-1]
decayt = Ts[:-1] - Ps[:-1]
# Calculate ratio between each rise and decay time
rdr = (riset-decayt) / float(riset+decayt)
return riset, decayt, rdr
|
[
"numpy.sum",
"numpy.abs",
"numpy.random.randint",
"numpy.arange",
"numpy.mean",
"numpy.copy",
"numpy.std",
"numpy.max",
"numpy.log10",
"analysis_helpers.misshapen.nonshape.ampT",
"numpy.flipud",
"numpy.percentile",
"analysis_helpers.misshapen.nonshape.findpt",
"numpy.min",
"numpy.vstack",
"numpy.zeros",
"numpy.diff",
"numpy.array",
"numpy.random.rand"
] |
[((2175, 2265), 'analysis_helpers.misshapen.nonshape.ampT', 'ampT', (['x', 'frange', 'Fs'], {'rmv_edge': '(False)', 'filter_fn': 'filter_fn', 'filter_kwargs': 'filter_kwargs'}), '(x, frange, Fs, rmv_edge=False, filter_fn=filter_fn, filter_kwargs=\n filter_kwargs)\n', (2179, 2265), False, 'from analysis_helpers.misshapen.nonshape import ampT, bandpass_default, findpt\n'), ((2306, 2336), 'numpy.percentile', 'np.percentile', (['amp', 'percentile'], {}), '(amp, percentile)\n', (2319, 2336), True, 'import numpy as np\n'), ((7005, 7016), 'numpy.zeros', 'np.zeros', (['E'], {}), '(E)\n', (7013, 7016), True, 'import numpy as np\n'), ((8037, 8048), 'numpy.zeros', 'np.zeros', (['T'], {}), '(T)\n', (8045, 8048), True, 'import numpy as np\n'), ((9626, 9637), 'numpy.zeros', 'np.zeros', (['P'], {}), '(P)\n', (9634, 9637), True, 'import numpy as np\n'), ((9968, 9979), 'numpy.zeros', 'np.zeros', (['T'], {}), '(T)\n', (9976, 9979), True, 'import numpy as np\n'), ((11036, 11047), 'numpy.zeros', 'np.zeros', (['E'], {}), '(E)\n', (11044, 11047), True, 'import numpy as np\n'), ((11248, 11259), 'numpy.zeros', 'np.zeros', (['E'], {}), '(E)\n', (11256, 11259), True, 'import numpy as np\n'), ((12901, 12954), 'numpy.arange', 'np.arange', (['(-avgwave_halflen)', '(avgwave_halflen + dt)', 'dt'], {}), '(-avgwave_halflen, avgwave_halflen + dt, dt)\n', (12910, 12954), True, 'import numpy as np\n'), ((13058, 13112), 'analysis_helpers.misshapen.nonshape.findpt', 'findpt', (['x', 'f_range', 'Fs'], {'boundary': '(N_samples_halflen + 1)'}), '(x, f_range, Fs, boundary=N_samples_halflen + 1)\n', (13064, 13112), False, 'from analysis_helpers.misshapen.nonshape import ampT, bandpass_default, findpt\n'), ((15443, 15467), 'numpy.zeros', 'np.zeros', (['max_iterations'], {}), '(max_iterations)\n', (15451, 15467), True, 'import numpy as np\n'), ((16818, 16834), 'numpy.zeros', 'np.zeros', (['L_samp'], {}), '(L_samp)\n', (16826, 16834), True, 'import numpy as np\n'), ((17236, 17265), 'numpy.zeros', 'np.zeros', (['(N_windows, L_samp)'], {}), '((N_windows, L_samp))\n', (17244, 17265), True, 'import numpy as np\n'), ((3278, 3290), 'numpy.array', 'np.array', (['Ps'], {}), '(Ps)\n', (3286, 3290), True, 'import numpy as np\n'), ((3304, 3316), 'numpy.array', 'np.array', (['Ts'], {}), '(Ts)\n', (3312, 3316), True, 'import numpy as np\n'), ((4865, 4877), 'numpy.array', 'np.array', (['Ps'], {}), '(Ps)\n', (4873, 4877), True, 'import numpy as np\n'), ((4891, 4903), 'numpy.array', 'np.array', (['Ts'], {}), '(Ts)\n', (4899, 4903), True, 'import numpy as np\n'), ((7323, 7340), 'numpy.sum', 'np.sum', (['(peak ** 2)'], {}), '(peak ** 2)\n', (7329, 7340), True, 'import numpy as np\n'), ((7362, 7381), 'numpy.sum', 'np.sum', (['(trough ** 2)'], {}), '(trough ** 2)\n', (7368, 7381), True, 'import numpy as np\n'), ((7397, 7431), 'numpy.max', 'np.max', (['(peakenergy, troughenergy)'], {}), '((peakenergy, troughenergy))\n', (7403, 7431), True, 'import numpy as np\n'), ((7452, 7480), 'numpy.sum', 'np.sum', (['((peak - trough) ** 2)'], {}), '((peak - trough) ** 2)\n', (7458, 7480), True, 'import numpy as np\n'), ((8396, 8411), 'numpy.flipud', 'np.flipud', (['rise'], {}), '(rise)\n', (8405, 8411), True, 'import numpy as np\n'), ((8701, 8725), 'numpy.sum', 'np.sum', (['rise_decay_maxes'], {}), '(rise_decay_maxes)\n', (8707, 8725), True, 'import numpy as np\n'), ((11628, 11670), 'numpy.max', 'np.max', (['(Psharp / Tsharp, Tsharp / Psharp)'], {}), '((Psharp / Tsharp, Tsharp / Psharp))\n', (11634, 11670), True, 'import numpy as np\n'), ((11703, 11727), 'numpy.log10', 'np.log10', (['sharpnessratio'], {}), '(sharpnessratio)\n', (11711, 11727), True, 'import numpy as np\n'), ((11901, 11943), 'numpy.max', 'np.max', (['(Rsteep / Dsteep, Dsteep / Rsteep)'], {}), '((Rsteep / Dsteep, Dsteep / Rsteep))\n', (11907, 11943), True, 'import numpy as np\n'), ((11976, 12000), 'numpy.log10', 'np.log10', (['steepnessratio'], {}), '(steepnessratio)\n', (11984, 12000), True, 'import numpy as np\n'), ((15998, 16020), 'numpy.copy', 'np.copy', (['window_starts'], {}), '(window_starts)\n', (16005, 16020), True, 'import numpy as np\n'), ((17738, 17747), 'numpy.sum', 'np.sum', (['d'], {}), '(d)\n', (17744, 17747), True, 'import numpy as np\n'), ((17983, 18008), 'numpy.random.randint', 'np.random.randint', (['N_samp'], {}), '(N_samp)\n', (18000, 18008), True, 'import numpy as np\n'), ((18088, 18120), 'numpy.abs', 'np.abs', (['(window_starts - new_samp)'], {}), '(window_starts - new_samp)\n', (18094, 18120), True, 'import numpy as np\n'), ((18955, 18967), 'numpy.array', 'np.array', (['Ps'], {}), '(Ps)\n', (18963, 18967), True, 'import numpy as np\n'), ((18981, 18993), 'numpy.array', 'np.array', (['Ts'], {}), '(Ts)\n', (18989, 18993), True, 'import numpy as np\n'), ((8522, 8542), 'numpy.abs', 'np.abs', (['(rise - decay)'], {}), '(rise - decay)\n', (8528, 8542), True, 'import numpy as np\n'), ((8652, 8676), 'numpy.vstack', 'np.vstack', (['(rise, decay)'], {}), '((rise, decay))\n', (8661, 8676), True, 'import numpy as np\n'), ((11215, 11228), 'numpy.diff', 'np.diff', (['rise'], {}), '(rise)\n', (11222, 11228), True, 'import numpy as np\n'), ((16484, 16500), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (16498, 16500), True, 'import numpy as np\n'), ((17399, 17411), 'numpy.std', 'np.std', (['temp'], {}), '(temp)\n', (17405, 17411), True, 'import numpy as np\n'), ((18132, 18145), 'numpy.min', 'np.min', (['dists'], {}), '(dists)\n', (18138, 18145), True, 'import numpy as np\n'), ((9866, 9945), 'numpy.mean', 'np.mean', (['(x[Ps[e]] - x[Ps[e] - window_half], x[Ps[e]] - x[Ps[e] + window_half])'], {}), '((x[Ps[e]] - x[Ps[e] - window_half], x[Ps[e]] - x[Ps[e] + window_half]))\n', (9873, 9945), True, 'import numpy as np\n'), ((10208, 10287), 'numpy.mean', 'np.mean', (['(x[Ts[e] - window_half] - x[Ts[e]], x[Ts[e] + window_half] - x[Ts[e]])'], {}), '((x[Ts[e] - window_half] - x[Ts[e]], x[Ts[e] + window_half] - x[Ts[e]]))\n', (10215, 10287), True, 'import numpy as np\n'), ((11431, 11445), 'numpy.diff', 'np.diff', (['decay'], {}), '(decay)\n', (11438, 11445), True, 'import numpy as np\n'), ((17384, 17397), 'numpy.mean', 'np.mean', (['temp'], {}), '(temp)\n', (17391, 17397), True, 'import numpy as np\n'), ((17632, 17656), 'numpy.sum', 'np.sum', (['(window_diff ** 2)'], {}), '(window_diff ** 2)\n', (17638, 17656), True, 'import numpy as np\n'), ((9793, 9807), 'numpy.diff', 'np.diff', (['Edata'], {}), '(Edata)\n', (9800, 9807), True, 'import numpy as np\n'), ((10135, 10149), 'numpy.diff', 'np.diff', (['Edata'], {}), '(Edata)\n', (10142, 10149), True, 'import numpy as np\n')]
|
import glob
import shutil
import os
from pdf2image import convert_from_path, convert_from_bytes
# main entry
def run_help():
print("Hello !")
pass
def run_compress():
print("Hello !")
pass
def run_folder_pdfs_2_cbz(dir_name):
full_name = dir_name + "/" + "*.pdf"
full_list = glob.glob(full_name)
for file in full_list:
print('Processing file :'+file)
run_pdf_2_cbz(file)
pass
def run_pdf_2_cbz(full_name):
file_name = os.path.basename(full_name)
last_name = os.path.splitext(full_name)
first_name = file_name.strip(last_name[1]).lstrip().rstrip()
dir_name = os.path.dirname(full_name)
target_dir = dir_name + '/' + first_name
os.mkdir(target_dir)
pages = convert_from_path(full_name, 150, poppler_path=r'./poppler/bin')
counter = 0
for page in pages:
counter_str = f'{counter:04d}'
page.save(target_dir + '/' + first_name + '_{}.jpg'.format(counter_str), 'JPEG')
counter += 1
pass
new_file = shutil.make_archive(target_dir, 'zip', dir_name, target_dir)
shutil.move(new_file, dir_name + '/' + first_name + '.cbz')
shutil.rmtree(target_dir)
pass
def run_rar_2_zip():
print("Hello !")
pass
if __name__ == "__main__":
folder = 'D:/tmp'
run_folder_pdfs_2_cbz(folder)
|
[
"os.mkdir",
"pdf2image.convert_from_path",
"shutil.make_archive",
"os.path.basename",
"os.path.dirname",
"os.path.splitext",
"shutil.move",
"glob.glob",
"shutil.rmtree"
] |
[((305, 325), 'glob.glob', 'glob.glob', (['full_name'], {}), '(full_name)\n', (314, 325), False, 'import glob\n'), ((478, 505), 'os.path.basename', 'os.path.basename', (['full_name'], {}), '(full_name)\n', (494, 505), False, 'import os\n'), ((522, 549), 'os.path.splitext', 'os.path.splitext', (['full_name'], {}), '(full_name)\n', (538, 549), False, 'import os\n'), ((631, 657), 'os.path.dirname', 'os.path.dirname', (['full_name'], {}), '(full_name)\n', (646, 657), False, 'import os\n'), ((707, 727), 'os.mkdir', 'os.mkdir', (['target_dir'], {}), '(target_dir)\n', (715, 727), False, 'import os\n'), ((740, 803), 'pdf2image.convert_from_path', 'convert_from_path', (['full_name', '(150)'], {'poppler_path': '"""./poppler/bin"""'}), "(full_name, 150, poppler_path='./poppler/bin')\n", (757, 803), False, 'from pdf2image import convert_from_path, convert_from_bytes\n'), ((1021, 1081), 'shutil.make_archive', 'shutil.make_archive', (['target_dir', '"""zip"""', 'dir_name', 'target_dir'], {}), "(target_dir, 'zip', dir_name, target_dir)\n", (1040, 1081), False, 'import shutil\n'), ((1086, 1145), 'shutil.move', 'shutil.move', (['new_file', "(dir_name + '/' + first_name + '.cbz')"], {}), "(new_file, dir_name + '/' + first_name + '.cbz')\n", (1097, 1145), False, 'import shutil\n'), ((1150, 1175), 'shutil.rmtree', 'shutil.rmtree', (['target_dir'], {}), '(target_dir)\n', (1163, 1175), False, 'import shutil\n')]
|
# Author: <NAME>
import csv
import json
with open("all_groups.json", "r", encoding="utf-8") as f:
groups = json.load(f)
with open("all_groups.csv", "w", newline="", encoding="utf8") as csvfile:
csvwriter = csv.writer(csvfile, delimiter=';', quotechar='"', quoting=csv.QUOTE_MINIMAL)
csvwriter.writerow(("Name", "Visibility", "Teams enabled", "Owners", "Members"))
csvwriter.writerows([
group['displayName'],
group['visibility'],
group['hasTeams'],
";".join(filter(None, group['owners'])),
";".join(filter(None, group['members'])),
]
for group in groups)
|
[
"json.load",
"csv.writer"
] |
[((113, 125), 'json.load', 'json.load', (['f'], {}), '(f)\n', (122, 125), False, 'import json\n'), ((217, 293), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""";"""', 'quotechar': '"""\\""""', 'quoting': 'csv.QUOTE_MINIMAL'}), '(csvfile, delimiter=\';\', quotechar=\'"\', quoting=csv.QUOTE_MINIMAL)\n', (227, 293), False, 'import csv\n')]
|
# Copyright 2016 Intel Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_utils import versionutils
from ironic.api.controllers.v1 import versions as api_versions
from ironic.common import release_mappings
from ironic.conductor import rpcapi
from ironic.db.sqlalchemy import models
from ironic.objects import base as obj_base
from ironic.tests import base
def _check_versions_compatibility(conf_version, actual_version):
"""Checks the configured version against the actual version.
Returns True if the configured version is <= the actual version;
otherwise returns False.
:param conf_version: configured version, a string with dots
:param actual_version: actual version, a string with dots
:returns: True if the configured version is <= the actual version;
False otherwise.
"""
conf_cap = versionutils.convert_version_to_tuple(conf_version)
actual_cap = versionutils.convert_version_to_tuple(actual_version)
return conf_cap <= actual_cap
class ReleaseMappingsTestCase(base.TestCase):
"""Tests the dict release_mappings.RELEASE_MAPPING.
Tests whether the dict release_mappings.RELEASE_MAPPING is correct,
valid and consistent.
"""
def test_structure(self):
for value in release_mappings.RELEASE_MAPPING.values():
self.assertIsInstance(value, dict)
self.assertEqual({'api', 'rpc', 'objects'}, set(value))
self.assertIsInstance(value['api'], str)
(major, minor) = value['api'].split('.')
self.assertEqual(1, int(major))
self.assertLessEqual(int(minor), api_versions.MINOR_MAX_VERSION)
self.assertIsInstance(value['rpc'], str)
self.assertIsInstance(value['objects'], dict)
for obj_value in value['objects'].values():
self.assertIsInstance(obj_value, list)
for ver in obj_value:
self.assertIsInstance(ver, str)
tuple_ver = versionutils.convert_version_to_tuple(ver)
self.assertEqual(2, len(tuple_ver))
def test_object_names_are_registered(self):
registered_objects = set(obj_base.IronicObjectRegistry.obj_classes())
for mapping in release_mappings.RELEASE_MAPPING.values():
objects = set(mapping['objects'])
self.assertTrue(objects.issubset(registered_objects))
def test_current_rpc_version(self):
self.assertEqual(rpcapi.ConductorAPI.RPC_API_VERSION,
release_mappings.RELEASE_MAPPING['master']['rpc'])
def test_current_object_versions(self):
registered_objects = obj_base.IronicObjectRegistry.obj_classes()
obj_versions = release_mappings.get_object_versions(
releases=['master'])
for obj, vers in obj_versions.items():
# vers is a set of versions, not ordered
self.assertIn(registered_objects[obj][0].VERSION, vers)
def test_contains_all_db_objects(self):
self.assertIn('master', release_mappings.RELEASE_MAPPING)
model_names = set((s.__name__ for s in models.Base.__subclasses__()))
exceptions = set(['NodeTag', 'ConductorHardwareInterfaces',
'NodeTrait', 'BIOSSetting', 'DeployTemplateStep'])
# NOTE(xek): As a rule, all models which can be changed between
# releases or are sent through RPC should have their counterpart
# versioned objects.
model_names -= exceptions
# NodeTrait maps to two objects
model_names |= set(['Trait', 'TraitList'])
object_names = set(
release_mappings.RELEASE_MAPPING['master']['objects'])
self.assertEqual(model_names, object_names)
def test_rpc_and_objects_versions_supported(self):
registered_objects = obj_base.IronicObjectRegistry.obj_classes()
for versions in release_mappings.RELEASE_MAPPING.values():
self.assertTrue(_check_versions_compatibility(
versions['rpc'], rpcapi.ConductorAPI.RPC_API_VERSION))
for obj_name, obj_vers in versions['objects'].items():
for ver in obj_vers:
self.assertTrue(_check_versions_compatibility(
ver, registered_objects[obj_name][0].VERSION))
class GetObjectVersionsTestCase(base.TestCase):
TEST_MAPPING = {
'7.0': {
'api': '1.30',
'rpc': '1.40',
'objects': {
'Node': ['1.21'],
'Conductor': ['1.2'],
'Port': ['1.6'],
'Portgroup': ['1.3'],
}
},
'8.0': {
'api': '1.30',
'rpc': '1.40',
'objects': {
'Node': ['1.22'],
'Conductor': ['1.2'],
'Chassis': ['1.3'],
'Port': ['1.6'],
'Portgroup': ['1.5', '1.4'],
}
},
'master': {
'api': '1.34',
'rpc': '1.40',
'objects': {
'Node': ['1.23'],
'Conductor': ['1.2'],
'Chassis': ['1.3'],
'Port': ['1.7'],
'Portgroup': ['1.5'],
}
},
}
TEST_MAPPING['ocata'] = TEST_MAPPING['7.0']
def test_get_object_versions(self):
with mock.patch.dict(release_mappings.RELEASE_MAPPING,
self.TEST_MAPPING, clear=True):
actual_versions = release_mappings.get_object_versions()
expected_versions = {
'Node': set(['1.21', '1.22', '1.23']),
'Conductor': set(['1.2']),
'Chassis': set(['1.3']),
'Port': set(['1.6', '1.7']),
'Portgroup': set(['1.3', '1.4', '1.5']),
}
self.assertEqual(expected_versions, actual_versions)
def test_get_object_versions_releases(self):
with mock.patch.dict(release_mappings.RELEASE_MAPPING,
self.TEST_MAPPING, clear=True):
actual_versions = release_mappings.get_object_versions(
releases=['ocata'])
expected_versions = {
'Node': set(['1.21']),
'Conductor': set(['1.2']),
'Port': set(['1.6']),
'Portgroup': set(['1.3']),
}
self.assertEqual(expected_versions, actual_versions)
def test_get_object_versions_objects(self):
with mock.patch.dict(release_mappings.RELEASE_MAPPING,
self.TEST_MAPPING, clear=True):
actual_versions = release_mappings.get_object_versions(
objects=['Portgroup', 'Chassis'])
expected_versions = {
'Portgroup': set(['1.3', '1.4', '1.5']),
'Chassis': set(['1.3']),
}
self.assertEqual(expected_versions, actual_versions)
def test_get_object_versions_releases_objects(self):
with mock.patch.dict(release_mappings.RELEASE_MAPPING,
self.TEST_MAPPING, clear=True):
actual_versions = release_mappings.get_object_versions(
releases=['7.0'], objects=['Portgroup', 'Chassis'])
expected_versions = {
'Portgroup': set(['1.3']),
}
self.assertEqual(expected_versions, actual_versions)
|
[
"ironic.db.sqlalchemy.models.Base.__subclasses__",
"ironic.objects.base.IronicObjectRegistry.obj_classes",
"ironic.common.release_mappings.get_object_versions",
"mock.patch.dict",
"oslo_utils.versionutils.convert_version_to_tuple",
"ironic.common.release_mappings.RELEASE_MAPPING.values"
] |
[((1395, 1446), 'oslo_utils.versionutils.convert_version_to_tuple', 'versionutils.convert_version_to_tuple', (['conf_version'], {}), '(conf_version)\n', (1432, 1446), False, 'from oslo_utils import versionutils\n'), ((1464, 1517), 'oslo_utils.versionutils.convert_version_to_tuple', 'versionutils.convert_version_to_tuple', (['actual_version'], {}), '(actual_version)\n', (1501, 1517), False, 'from oslo_utils import versionutils\n'), ((1814, 1855), 'ironic.common.release_mappings.RELEASE_MAPPING.values', 'release_mappings.RELEASE_MAPPING.values', ([], {}), '()\n', (1853, 1855), False, 'from ironic.common import release_mappings\n'), ((2792, 2833), 'ironic.common.release_mappings.RELEASE_MAPPING.values', 'release_mappings.RELEASE_MAPPING.values', ([], {}), '()\n', (2831, 2833), False, 'from ironic.common import release_mappings\n'), ((3200, 3243), 'ironic.objects.base.IronicObjectRegistry.obj_classes', 'obj_base.IronicObjectRegistry.obj_classes', ([], {}), '()\n', (3241, 3243), True, 'from ironic.objects import base as obj_base\n'), ((3267, 3324), 'ironic.common.release_mappings.get_object_versions', 'release_mappings.get_object_versions', ([], {'releases': "['master']"}), "(releases=['master'])\n", (3303, 3324), False, 'from ironic.common import release_mappings\n'), ((4371, 4414), 'ironic.objects.base.IronicObjectRegistry.obj_classes', 'obj_base.IronicObjectRegistry.obj_classes', ([], {}), '()\n', (4412, 4414), True, 'from ironic.objects import base as obj_base\n'), ((4439, 4480), 'ironic.common.release_mappings.RELEASE_MAPPING.values', 'release_mappings.RELEASE_MAPPING.values', ([], {}), '()\n', (4478, 4480), False, 'from ironic.common import release_mappings\n'), ((2724, 2767), 'ironic.objects.base.IronicObjectRegistry.obj_classes', 'obj_base.IronicObjectRegistry.obj_classes', ([], {}), '()\n', (2765, 2767), True, 'from ironic.objects import base as obj_base\n'), ((5908, 5993), 'mock.patch.dict', 'mock.patch.dict', (['release_mappings.RELEASE_MAPPING', 'self.TEST_MAPPING'], {'clear': '(True)'}), '(release_mappings.RELEASE_MAPPING, self.TEST_MAPPING, clear=True\n )\n', (5923, 5993), False, 'import mock\n'), ((6049, 6087), 'ironic.common.release_mappings.get_object_versions', 'release_mappings.get_object_versions', ([], {}), '()\n', (6085, 6087), False, 'from ironic.common import release_mappings\n'), ((6505, 6590), 'mock.patch.dict', 'mock.patch.dict', (['release_mappings.RELEASE_MAPPING', 'self.TEST_MAPPING'], {'clear': '(True)'}), '(release_mappings.RELEASE_MAPPING, self.TEST_MAPPING, clear=True\n )\n', (6520, 6590), False, 'import mock\n'), ((6646, 6702), 'ironic.common.release_mappings.get_object_versions', 'release_mappings.get_object_versions', ([], {'releases': "['ocata']"}), "(releases=['ocata'])\n", (6682, 6702), False, 'from ironic.common import release_mappings\n'), ((7058, 7143), 'mock.patch.dict', 'mock.patch.dict', (['release_mappings.RELEASE_MAPPING', 'self.TEST_MAPPING'], {'clear': '(True)'}), '(release_mappings.RELEASE_MAPPING, self.TEST_MAPPING, clear=True\n )\n', (7073, 7143), False, 'import mock\n'), ((7199, 7269), 'ironic.common.release_mappings.get_object_versions', 'release_mappings.get_object_versions', ([], {'objects': "['Portgroup', 'Chassis']"}), "(objects=['Portgroup', 'Chassis'])\n", (7235, 7269), False, 'from ironic.common import release_mappings\n'), ((7569, 7654), 'mock.patch.dict', 'mock.patch.dict', (['release_mappings.RELEASE_MAPPING', 'self.TEST_MAPPING'], {'clear': '(True)'}), '(release_mappings.RELEASE_MAPPING, self.TEST_MAPPING, clear=True\n )\n', (7584, 7654), False, 'import mock\n'), ((7710, 7802), 'ironic.common.release_mappings.get_object_versions', 'release_mappings.get_object_versions', ([], {'releases': "['7.0']", 'objects': "['Portgroup', 'Chassis']"}), "(releases=['7.0'], objects=['Portgroup',\n 'Chassis'])\n", (7746, 7802), False, 'from ironic.common import release_mappings\n'), ((2543, 2585), 'oslo_utils.versionutils.convert_version_to_tuple', 'versionutils.convert_version_to_tuple', (['ver'], {}), '(ver)\n', (2580, 2585), False, 'from oslo_utils import versionutils\n'), ((3664, 3692), 'ironic.db.sqlalchemy.models.Base.__subclasses__', 'models.Base.__subclasses__', ([], {}), '()\n', (3690, 3692), False, 'from ironic.db.sqlalchemy import models\n')]
|
"""
This file is for the methods concerning everything naive bayes
TODO:
1. convert nominal values to numbers DONE!
2. 2/3 of the data is training data, rest is test DONE from previous!
3. k is specified at runtime ? DONE searching the nearest neighbors
4. majority vote between neighbors DONE
5. reverse trasform from number to class.
6. error rate? mean error over 100 samples?
7. confusion matrix?
"""
import math
from operator import itemgetter
def calculate_error(dataclasses: list):
"""
calculates the error rate, = misclassified_data / total data
:param dataclasses: a 1-dimensional list containg the orignal and predicted class of each instance in data
:return: error rate ( <=1.0)
"""
wrong = 0
correct = 0
for d in dataclasses:
if d[0] == d[1]:
correct += 1
else:
wrong += 1
return wrong / (wrong+correct)
def get_confusion_matrix(classes: list, dataclasses: list):
"""
creates a confusion matrix
:param classes: is a one-dimensional list containing the class names
:param dataclasses: a 1-dimensional list containg the orignal and predicted class of each instance in data
:return: a 2-dimensional list with the first row being the actual class and every other row corresponding
to the number of instances being predicted as class x
"""
confmatrix = []
for x in classes:
line = [x]
for y in classes:
line.append(sum(x == inst[0] and y == inst[1] for inst in dataclasses))
confmatrix.append(line)
return confmatrix
def sqreuclidean_distance(input1: list, input2: list):
"""
function that calculates the squared euclidean distance for our task
:param input1: instance 1
:param input2: instance 2
:return: distance
"""
distance = 0
for i in range(len(input1)-1):
distance += pow(input1[i] - input2[i], 2)
return distance
def search_nearest(trainingset: list, inputvector: list, k: int):
"""
function that searches for k nearest neighbors
:param trainingset: already separated and transformed train set
:param inputvector: input instance from test data
:param k: the number of searched neighbors
:return: k nearest neighbors
"""
distances = []
near_neighbours = []
for inst in trainingset:
distances.append([inst, sqreuclidean_distance(inst, inputvector)])
distances = sorted(distances, key=itemgetter(1))
for i in range(k):
near_neighbours.append(distances[k][0])
return near_neighbours
def getting_class(list_of_neighbors: list, classes: list):
"""
function that get a certain class from majority vote
:param list_of_neighbors: list of vectors(neighbors)
:param classes: is a one-dimensional list containing the class names
:return: choosen class
"""
votecounter = []
for k in classes:
votecounter.append([k, 0])
for k in range(len(list_of_neighbors)):
target = list_of_neighbors[k][-1]
votecounter[classes.index(target)][1] += 1
votecountersorted = sorted(votecounter, key=itemgetter(1), reverse=True)
return votecountersorted[0][0]
def get_predictions(train_data: list, data: list, k: int, classes: list):
"""
function to get the predicted classes of each instance in data
:param train_data: training set of the data
:param data: test set of the data
:param k: the paramaeter k in k-Nearest-Neighbors
:param classes: list of the classes in the data
:return:
"""
dataclasses = []
for d in data:
nearestneighbors = search_nearest(train_data, d, k)
dataclasses.append([d[-1], getting_class(nearestneighbors, classes)])
return dataclasses
|
[
"operator.itemgetter"
] |
[((2475, 2488), 'operator.itemgetter', 'itemgetter', (['(1)'], {}), '(1)\n', (2485, 2488), False, 'from operator import itemgetter\n'), ((3142, 3155), 'operator.itemgetter', 'itemgetter', (['(1)'], {}), '(1)\n', (3152, 3155), False, 'from operator import itemgetter\n')]
|
from .id_converter import IDConverter
from .errors import EmojiNotFound
from nextcord import Emoji
from nextcord.ext.abc import ContextBase
import re
import nextcord
class EmojiConverter(IDConverter[Emoji]):
"""Converts to a :class:`~Emoji`.
All lookups are done for the local guild first, if available. If that lookup
fails, then it checks the client's global cache.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by extracting ID from the emoji.
3. Lookup by name
.. versionchanged:: 1.5
Raise :exc:`.EmojiNotFound` instead of generic :exc:`.BadArgument`
"""
async def convert(self, ctx: ContextBase, argument: str) -> Emoji:
match = self._get_id_match(argument) or re.match(r'<a?:[a-zA-Z0-9\_]{1,32}:([0-9]{15,20})>$', argument)
result = None
bot = ctx.bot
guild = ctx.guild
if match is None:
# Try to get the emoji by name. Try local guild first.
if guild:
result = nextcord.utils.get(guild.emojis, name=argument)
if result is None:
result = nextcord.utils.get(bot.emojis, name=argument)
else:
emoji_id = int(match.group(1))
# Try to look up emoji by id.
result = bot.get_emoji(emoji_id)
if result is None:
raise EmojiNotFound(argument)
return result
async def convert_from_id(self, ctx: ContextBase, id: int) -> Emoji:
if result := ctx.bot.get_emoji(id):
return result
raise EmojiNotFound(str(id))
|
[
"re.match",
"nextcord.utils.get"
] |
[((758, 821), 're.match', 're.match', (['"""<a?:[a-zA-Z0-9\\\\_]{1,32}:([0-9]{15,20})>$"""', 'argument'], {}), "('<a?:[a-zA-Z0-9\\\\_]{1,32}:([0-9]{15,20})>$', argument)\n", (766, 821), False, 'import re\n'), ((1033, 1080), 'nextcord.utils.get', 'nextcord.utils.get', (['guild.emojis'], {'name': 'argument'}), '(guild.emojis, name=argument)\n', (1051, 1080), False, 'import nextcord\n'), ((1138, 1183), 'nextcord.utils.get', 'nextcord.utils.get', (['bot.emojis'], {'name': 'argument'}), '(bot.emojis, name=argument)\n', (1156, 1183), False, 'import nextcord\n')]
|
import gc
import os
from glob import glob
import numpy as np
from PIL import Image
import pickle
from tqdm import tqdm_notebook, tqdm
from models.network import U_Net, R2U_Net, AttU_Net, R2AttU_Net
from models.linknet import LinkNet34
from models.deeplabv3.deeplabv3plus import DeepLabV3Plus
from backboned_unet import Unet
import segmentation_models_pytorch as smp
from torchvision import transforms
import cv2
from albumentations import CLAHE
import json
from models.Transpose_unet.unet.model import Unet as Unet_t
from models.octave_unet.unet.model import OctaveUnet
from sklearn.model_selection import KFold, StratifiedKFold
import matplotlib.pyplot as plt
import copy
import torch
class Test(object):
def __init__(self, model_type, image_size, mean, std, t=None):
# Models
self.unet = None
self.image_size = image_size # 模型的输入大小
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
self.model_type = model_type
self.t = t
self.mean = mean
self.std = std
def build_model(self):
"""Build generator and discriminator."""
if self.model_type == 'U_Net':
self.unet = U_Net(img_ch=3, output_ch=1)
elif self.model_type == 'AttU_Net':
self.unet = AttU_Net(img_ch=3, output_ch=1)
elif self.model_type == 'unet_resnet34':
# self.unet = Unet(backbone_name='resnet34', classes=1)
self.unet = smp.Unet('resnet34', encoder_weights='imagenet', activation=None)
elif self.model_type == 'unet_resnet50':
self.unet = smp.Unet('resnet50', encoder_weights='imagenet', activation=None)
elif self.model_type == 'unet_se_resnext50_32x4d':
self.unet = smp.Unet('se_resnext50_32x4d', encoder_weights='imagenet', activation=None)
elif self.model_type == 'unet_densenet121':
self.unet = smp.Unet('densenet121', encoder_weights='imagenet', activation=None)
elif self.model_type == 'unet_resnet34_t':
self.unet = Unet_t('resnet34', encoder_weights='imagenet', activation=None, use_ConvTranspose2d=True)
elif self.model_type == 'unet_resnet34_oct':
self.unet = OctaveUnet('resnet34', encoder_weights='imagenet', activation=None)
elif self.model_type == 'pspnet_resnet34':
self.unet = smp.PSPNet('resnet34', encoder_weights='imagenet', classes=1, activation=None)
elif self.model_type == 'linknet':
self.unet = LinkNet34(num_classes=1)
elif self.model_type == 'deeplabv3plus':
self.unet = DeepLabV3Plus(model_backbone='res50_atrous', num_classes=1)
# self.unet = DeepLabV3Plus(num_classes=1)
# print('build model done!')
self.unet.to(self.device)
def test_model(
self,
thresholds_classify,
thresholds_seg,
average_threshold,
stage_cla,
stage_seg,
n_splits,
test_best_model=True,
less_than_sum=2048*2,
seg_average_vote=True,
images_path=None,
masks_path=None
):
"""
Args:
thresholds_classify: list, 各个分类模型的阈值,高于这个阈值的置为1,否则置为0
thresholds_seg: list,各个分割模型的阈值
average_threshold: 分割后使用平均策略时所使用的平均阈值
stage_cla: 第几阶段的权重作为分类结果
stage_seg: 第几阶段的权重作为分割结果
n_splits: list, 测试哪几折的结果进行平均
test_best_model: 是否要使用最优模型测试,若不是的话,则取最新的模型测试
less_than_sum: list, 预测图片中有预测出的正样本总和小于这个值时,则忽略所有
seg_average_vote: bool,True:平均,False:投票
"""
# 对于每一折加载模型,对所有测试集测试,并取平均
with torch.no_grad():
for index, (image_path, mask_path) in enumerate(tqdm(zip(images_path, masks_path), total=len(images_path))):
img = Image.open(image_path).convert('RGB')
pred_nfolds = 0
for fold in n_splits:
# 加载分类模型,进行测试
self.unet = None
self.build_model()
if test_best_model:
unet_path = os.path.join('checkpoints', self.model_type,
self.model_type + '_{}_{}_best.pth'.format(stage_cla, fold))
else:
unet_path = os.path.join('checkpoints', self.model_type,
self.model_type + '_{}_{}.pth'.format(stage_cla, fold))
# print("Load classify weight from %s" % unet_path)
self.unet.load_state_dict(torch.load(unet_path)['state_dict'])
self.unet.eval()
seg_unet = copy.deepcopy(self.unet)
# 加载分割模型,进行测试s
if test_best_model:
unet_path = os.path.join('checkpoints', self.model_type,
self.model_type + '_{}_{}_best.pth'.format(stage_seg, fold))
else:
unet_path = os.path.join('checkpoints', self.model_type,
self.model_type + '_{}_{}.pth'.format(stage_seg, fold))
# print('Load segmentation weight from %s.' % unet_path)
seg_unet.load_state_dict(torch.load(unet_path)['state_dict'])
seg_unet.eval()
pred = self.tta(img, self.unet)
# 首先经过阈值和像素阈值,判断该图像中是否有掩模
pred = np.where(pred > thresholds_classify[fold], 1, 0)
if np.sum(pred) < less_than_sum[fold]:
pred[:] = 0
# 如果有掩膜的话,加载分割模型进行测试
if np.sum(pred) > 0:
pred = self.tta(img, seg_unet)
# 如果不是采用平均策略,即投票策略,则进行阈值处理,变成0或1
if not seg_average_vote:
pred = np.where(pred > thresholds_seg[fold], 1, 0)
pred_nfolds += pred
if not seg_average_vote:
vote_model_num = len(n_splits)
vote_ticket = round(vote_model_num / 2.0)
pred = np.where(pred_nfolds > vote_ticket, 1, 0)
# print("Using voting strategy, Ticket / Vote models: %d / %d" % (vote_ticket, vote_model_num))
else:
# print('Using average strategy.')
pred = pred_nfolds / len(n_splits)
pred = np.where(pred > average_threshold, 1, 0)
pred = cv2.resize(pred, (1024, 1024))
mask = Image.open(mask_path)
mask = np.around(np.array(mask.convert('L'))/256.)
self.combine_display(img, mask, pred, 'demo')
def image_transform(self, image):
"""对样本进行预处理
"""
resize = transforms.Resize(self.image_size)
to_tensor = transforms.ToTensor()
normalize = transforms.Normalize(self.mean, self.std)
transform_compose = transforms.Compose([resize, to_tensor, normalize])
return transform_compose(image)
def detection(self, image, model):
"""对输入样本进行检测
Args:
image: 待检测样本,Image
model: 要使用的网络
Return:
pred: 检测结果
"""
image = self.image_transform(image)
image = torch.unsqueeze(image, dim=0)
image = image.float().to(self.device)
pred = torch.sigmoid(model(image))
# 预测出的结果
pred = pred.view(self.image_size, self.image_size)
pred = pred.detach().cpu().numpy()
return pred
def tta(self, image, model):
"""执行TTA预测
Args:
image: Image图片
model: 要使用的网络
Return:
pred: 最后预测的结果
"""
preds = np.zeros([self.image_size, self.image_size])
# 768大小
# image_resize = image.resize((768, 768))
# resize_pred = self.detection(image_resize)
# resize_pred_img = Image.fromarray(resize_pred)
# resize_pred_img = resize_pred_img.resize((1024, 1024))
# preds += np.asarray(resize_pred_img)
# 左右翻转
image_hflip = image.transpose(Image.FLIP_LEFT_RIGHT)
hflip_pred = self.detection(image_hflip, model)
hflip_pred_img = Image.fromarray(hflip_pred)
pred_img = hflip_pred_img.transpose(Image.FLIP_LEFT_RIGHT)
preds += np.asarray(pred_img)
# CLAHE
aug = CLAHE(p=1.0)
image_np = np.asarray(image)
clahe_image = aug(image=image_np)['image']
clahe_image = Image.fromarray(clahe_image)
clahe_pred = self.detection(clahe_image, model)
preds += clahe_pred
# 原图
original_pred = self.detection(image, model)
preds += original_pred
# 求平均
pred = preds / 3.0
return pred
# dice for threshold selection
def dice_overall(self, preds, targs):
n = preds.shape[0] # batch size为多少
preds = preds.view(n, -1)
targs = targs.view(n, -1)
# preds, targs = preds.to(self.device), targs.to(self.device)
preds, targs = preds.cpu(), targs.cpu()
# tensor之间按位相成,求两个集合的交(只有1×1等于1)后。按照第二个维度求和,得到[batch size]大小的tensor,每一个值代表该输入图片真实类标与预测类标的交集大小
intersect = (preds * targs).sum(-1).float()
# tensor之间按位相加,求两个集合的并。然后按照第二个维度求和,得到[batch size]大小的tensor,每一个值代表该输入图片真实类标与预测类标的并集大小
union = (preds + targs).sum(-1).float()
'''
输入图片真实类标与预测类标无并集有两种情况:第一种为预测与真实均没有类标,此时并集之和为0;第二种为真实有类标,但是预测完全错误,此时并集之和不为0;
寻找输入图片真实类标与预测类标并集之和为0的情况,将其交集置为1,并集置为2,最后还有一个2*交集/并集,值为1;
其余情况,直接按照2*交集/并集计算,因为上面的并集并没有减去交集,所以需要拿2*交集,其最大值为1
'''
u0 = union == 0
intersect[u0] = 1
union[u0] = 2
return (2. * intersect / union).mean()
def combine_display(self, image_raw, mask, pred, title_diplay):
plt.suptitle(title_diplay)
plt.subplot(1, 3, 1)
plt.title('image_raw')
plt.imshow(image_raw)
plt.subplot(1, 3, 2)
plt.title('mask')
plt.imshow(mask)
plt.subplot(1, 3, 3)
plt.title('pred')
plt.imshow(pred)
plt.show()
if __name__ == "__main__":
mean = (0.485, 0.456, 0.406)
std = (0.229, 0.224, 0.225)
# mean = (0.490, 0.490, 0.490)
# std = (0.229, 0.229, 0.229)
model_name = 'unet_resnet34'
# stage_cla表示使用第几阶段的权重作为分类模型,stage_seg表示使用第几阶段的权重作为分割模型,对应不同的image_size,index表示为交叉验证的第几个
# image_size TODO
stage_cla, stage_seg = 2, 3
if stage_cla == 1:
image_size = 768
elif stage_cla == 2:
image_size = 1024
with open('checkpoints/'+model_name+'/result_stage2.json', 'r', encoding='utf-8') as json_file:
config_cla = json.load(json_file)
with open('checkpoints/'+model_name+'/result_stage3.json', 'r', encoding='utf-8') as json_file:
config_seg = json.load(json_file)
n_splits = [0] # 0, 1, 2, 3, 4
thresholds_classify, thresholds_seg, less_than_sum = [0 for x in range(5)], [0 for x in range(5)], [0 for x in range(5)]
for x in n_splits:
thresholds_classify[x] = config_cla[str(x)][0]
less_than_sum[x] = config_cla[str(x)][1]
thresholds_seg[x] = config_seg[str(x)][0]
seg_average_vote = False
average_threshold = np.sum(np.asarray(thresholds_seg))/len(n_splits)
test_best_mode = True
print("stage_cla: %d, stage_seg: %d" % (stage_cla, stage_seg))
print('test fold: ', n_splits)
print('thresholds_classify: ', thresholds_classify)
if seg_average_vote:
print('Using average stategy, average_threshold: %f' % average_threshold)
else:
print('Using vating strategy, thresholds_seg: ', thresholds_seg)
print('less_than_sum: ', less_than_sum)
# 只有test的样本路径
with open('dataset_static_mask.pkl', 'rb') as f:
static = pickle.load(f)
images_path, masks_path, masks_bool = static[0], static[1], static[2]
# 只有stage1的训练集的样本路径
with open('dataset_static_mask_stage1.pkl', 'rb') as f:
static_stage1 = pickle.load(f)
images_path_stage1, masks_path_stage1, masks_bool_stage1 = static_stage1[0], static_stage1[1], static_stage1[2]
skf = StratifiedKFold(n_splits=5, shuffle=True, random_state=1)
split = skf.split(images_path, masks_bool)
split_stage1 = skf.split(images_path_stage1, masks_bool_stage1)
val_image_nfolds = list()
val_mask_nfolds = list()
for index, ((train_index, val_index), (train_stage1_index, val_stage1_index)) in enumerate(zip(split, split_stage1)):
val_image = [images_path[x] for x in val_index]
val_mask = [masks_path[x] for x in val_index]
val_image_stage1 = [images_path_stage1[x] for x in val_stage1_index]
val_mask_stage1 = [masks_path_stage1[x] for x in val_stage1_index]
val_image_fold = val_image + val_image_stage1
val_mask_fold = val_mask + val_mask_stage1
val_image_nfolds.append(val_image_fold)
val_mask_nfolds.append(val_mask_fold)
val_image_fold0 = val_image_nfolds[0]
val_mask_fold0 = val_mask_nfolds[0]
solver = Test(model_name, image_size, mean, std)
solver.test_model(
thresholds_classify=thresholds_classify,
thresholds_seg=thresholds_seg,
average_threshold=average_threshold,
stage_cla=stage_cla,
stage_seg=stage_seg,
n_splits=n_splits,
test_best_model=test_best_mode,
less_than_sum=less_than_sum,
seg_average_vote=seg_average_vote,
images_path=images_path,
masks_path=masks_path
)
|
[
"matplotlib.pyplot.title",
"numpy.sum",
"matplotlib.pyplot.suptitle",
"pickle.load",
"torchvision.transforms.Normalize",
"torch.no_grad",
"segmentation_models_pytorch.Unet",
"matplotlib.pyplot.imshow",
"torch.load",
"models.deeplabv3.deeplabv3plus.DeepLabV3Plus",
"models.Transpose_unet.unet.model.Unet",
"models.linknet.LinkNet34",
"torchvision.transforms.Compose",
"models.octave_unet.unet.model.OctaveUnet",
"segmentation_models_pytorch.PSPNet",
"albumentations.CLAHE",
"cv2.resize",
"copy.deepcopy",
"matplotlib.pyplot.show",
"numpy.asarray",
"models.network.U_Net",
"torch.cuda.is_available",
"torch.unsqueeze",
"torchvision.transforms.Resize",
"matplotlib.pyplot.subplot",
"json.load",
"models.network.AttU_Net",
"numpy.zeros",
"PIL.Image.open",
"numpy.where",
"sklearn.model_selection.StratifiedKFold",
"PIL.Image.fromarray",
"torchvision.transforms.ToTensor"
] |
[((12300, 12357), 'sklearn.model_selection.StratifiedKFold', 'StratifiedKFold', ([], {'n_splits': '(5)', 'shuffle': '(True)', 'random_state': '(1)'}), '(n_splits=5, shuffle=True, random_state=1)\n', (12315, 12357), False, 'from sklearn.model_selection import KFold, StratifiedKFold\n'), ((6888, 6922), 'torchvision.transforms.Resize', 'transforms.Resize', (['self.image_size'], {}), '(self.image_size)\n', (6905, 6922), False, 'from torchvision import transforms\n'), ((6943, 6964), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (6962, 6964), False, 'from torchvision import transforms\n'), ((6985, 7026), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['self.mean', 'self.std'], {}), '(self.mean, self.std)\n', (7005, 7026), False, 'from torchvision import transforms\n'), ((7056, 7106), 'torchvision.transforms.Compose', 'transforms.Compose', (['[resize, to_tensor, normalize]'], {}), '([resize, to_tensor, normalize])\n', (7074, 7106), False, 'from torchvision import transforms\n'), ((7404, 7433), 'torch.unsqueeze', 'torch.unsqueeze', (['image'], {'dim': '(0)'}), '(image, dim=0)\n', (7419, 7433), False, 'import torch\n'), ((7858, 7902), 'numpy.zeros', 'np.zeros', (['[self.image_size, self.image_size]'], {}), '([self.image_size, self.image_size])\n', (7866, 7902), True, 'import numpy as np\n'), ((8350, 8377), 'PIL.Image.fromarray', 'Image.fromarray', (['hflip_pred'], {}), '(hflip_pred)\n', (8365, 8377), False, 'from PIL import Image\n'), ((8462, 8482), 'numpy.asarray', 'np.asarray', (['pred_img'], {}), '(pred_img)\n', (8472, 8482), True, 'import numpy as np\n'), ((8514, 8526), 'albumentations.CLAHE', 'CLAHE', ([], {'p': '(1.0)'}), '(p=1.0)\n', (8519, 8526), False, 'from albumentations import CLAHE\n'), ((8546, 8563), 'numpy.asarray', 'np.asarray', (['image'], {}), '(image)\n', (8556, 8563), True, 'import numpy as np\n'), ((8637, 8665), 'PIL.Image.fromarray', 'Image.fromarray', (['clahe_image'], {}), '(clahe_image)\n', (8652, 8665), False, 'from PIL import Image\n'), ((9954, 9980), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['title_diplay'], {}), '(title_diplay)\n', (9966, 9980), True, 'import matplotlib.pyplot as plt\n'), ((9989, 10009), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(1)'], {}), '(1, 3, 1)\n', (10000, 10009), True, 'import matplotlib.pyplot as plt\n'), ((10018, 10040), 'matplotlib.pyplot.title', 'plt.title', (['"""image_raw"""'], {}), "('image_raw')\n", (10027, 10040), True, 'import matplotlib.pyplot as plt\n'), ((10049, 10070), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image_raw'], {}), '(image_raw)\n', (10059, 10070), True, 'import matplotlib.pyplot as plt\n'), ((10080, 10100), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(2)'], {}), '(1, 3, 2)\n', (10091, 10100), True, 'import matplotlib.pyplot as plt\n'), ((10109, 10126), 'matplotlib.pyplot.title', 'plt.title', (['"""mask"""'], {}), "('mask')\n", (10118, 10126), True, 'import matplotlib.pyplot as plt\n'), ((10135, 10151), 'matplotlib.pyplot.imshow', 'plt.imshow', (['mask'], {}), '(mask)\n', (10145, 10151), True, 'import matplotlib.pyplot as plt\n'), ((10161, 10181), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(3)'], {}), '(1, 3, 3)\n', (10172, 10181), True, 'import matplotlib.pyplot as plt\n'), ((10190, 10207), 'matplotlib.pyplot.title', 'plt.title', (['"""pred"""'], {}), "('pred')\n", (10199, 10207), True, 'import matplotlib.pyplot as plt\n'), ((10216, 10232), 'matplotlib.pyplot.imshow', 'plt.imshow', (['pred'], {}), '(pred)\n', (10226, 10232), True, 'import matplotlib.pyplot as plt\n'), ((10242, 10252), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (10250, 10252), True, 'import matplotlib.pyplot as plt\n'), ((10825, 10845), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (10834, 10845), False, 'import json\n'), ((10972, 10992), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (10981, 10992), False, 'import json\n'), ((11949, 11963), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (11960, 11963), False, 'import pickle\n'), ((12150, 12164), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (12161, 12164), False, 'import pickle\n'), ((1191, 1219), 'models.network.U_Net', 'U_Net', ([], {'img_ch': '(3)', 'output_ch': '(1)'}), '(img_ch=3, output_ch=1)\n', (1196, 1219), False, 'from models.network import U_Net, R2U_Net, AttU_Net, R2AttU_Net\n'), ((3653, 3668), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3666, 3668), False, 'import torch\n'), ((11395, 11421), 'numpy.asarray', 'np.asarray', (['thresholds_seg'], {}), '(thresholds_seg)\n', (11405, 11421), True, 'import numpy as np\n'), ((909, 934), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (932, 934), False, 'import torch\n'), ((1288, 1319), 'models.network.AttU_Net', 'AttU_Net', ([], {'img_ch': '(3)', 'output_ch': '(1)'}), '(img_ch=3, output_ch=1)\n', (1296, 1319), False, 'from models.network import U_Net, R2U_Net, AttU_Net, R2AttU_Net\n'), ((6594, 6624), 'cv2.resize', 'cv2.resize', (['pred', '(1024, 1024)'], {}), '(pred, (1024, 1024))\n', (6604, 6624), False, 'import cv2\n'), ((6648, 6669), 'PIL.Image.open', 'Image.open', (['mask_path'], {}), '(mask_path)\n', (6658, 6669), False, 'from PIL import Image\n'), ((1462, 1527), 'segmentation_models_pytorch.Unet', 'smp.Unet', (['"""resnet34"""'], {'encoder_weights': '"""imagenet"""', 'activation': 'None'}), "('resnet34', encoder_weights='imagenet', activation=None)\n", (1470, 1527), True, 'import segmentation_models_pytorch as smp\n'), ((4698, 4722), 'copy.deepcopy', 'copy.deepcopy', (['self.unet'], {}), '(self.unet)\n', (4711, 4722), False, 'import copy\n'), ((5523, 5571), 'numpy.where', 'np.where', (['(pred > thresholds_classify[fold])', '(1)', '(0)'], {}), '(pred > thresholds_classify[fold], 1, 0)\n', (5531, 5571), True, 'import numpy as np\n'), ((6212, 6253), 'numpy.where', 'np.where', (['(pred_nfolds > vote_ticket)', '(1)', '(0)'], {}), '(pred_nfolds > vote_ticket, 1, 0)\n', (6220, 6253), True, 'import numpy as np\n'), ((6529, 6569), 'numpy.where', 'np.where', (['(pred > average_threshold)', '(1)', '(0)'], {}), '(pred > average_threshold, 1, 0)\n', (6537, 6569), True, 'import numpy as np\n'), ((1601, 1666), 'segmentation_models_pytorch.Unet', 'smp.Unet', (['"""resnet50"""'], {'encoder_weights': '"""imagenet"""', 'activation': 'None'}), "('resnet50', encoder_weights='imagenet', activation=None)\n", (1609, 1666), True, 'import segmentation_models_pytorch as smp\n'), ((3813, 3835), 'PIL.Image.open', 'Image.open', (['image_path'], {}), '(image_path)\n', (3823, 3835), False, 'from PIL import Image\n'), ((5595, 5607), 'numpy.sum', 'np.sum', (['pred'], {}), '(pred)\n', (5601, 5607), True, 'import numpy as np\n'), ((5732, 5744), 'numpy.sum', 'np.sum', (['pred'], {}), '(pred)\n', (5738, 5744), True, 'import numpy as np\n'), ((1750, 1825), 'segmentation_models_pytorch.Unet', 'smp.Unet', (['"""se_resnext50_32x4d"""'], {'encoder_weights': '"""imagenet"""', 'activation': 'None'}), "('se_resnext50_32x4d', encoder_weights='imagenet', activation=None)\n", (1758, 1825), True, 'import segmentation_models_pytorch as smp\n'), ((4592, 4613), 'torch.load', 'torch.load', (['unet_path'], {}), '(unet_path)\n', (4602, 4613), False, 'import torch\n'), ((5323, 5344), 'torch.load', 'torch.load', (['unet_path'], {}), '(unet_path)\n', (5333, 5344), False, 'import torch\n'), ((5946, 5989), 'numpy.where', 'np.where', (['(pred > thresholds_seg[fold])', '(1)', '(0)'], {}), '(pred > thresholds_seg[fold], 1, 0)\n', (5954, 5989), True, 'import numpy as np\n'), ((1902, 1970), 'segmentation_models_pytorch.Unet', 'smp.Unet', (['"""densenet121"""'], {'encoder_weights': '"""imagenet"""', 'activation': 'None'}), "('densenet121', encoder_weights='imagenet', activation=None)\n", (1910, 1970), True, 'import segmentation_models_pytorch as smp\n'), ((2046, 2139), 'models.Transpose_unet.unet.model.Unet', 'Unet_t', (['"""resnet34"""'], {'encoder_weights': '"""imagenet"""', 'activation': 'None', 'use_ConvTranspose2d': '(True)'}), "('resnet34', encoder_weights='imagenet', activation=None,\n use_ConvTranspose2d=True)\n", (2052, 2139), True, 'from models.Transpose_unet.unet.model import Unet as Unet_t\n'), ((2213, 2280), 'models.octave_unet.unet.model.OctaveUnet', 'OctaveUnet', (['"""resnet34"""'], {'encoder_weights': '"""imagenet"""', 'activation': 'None'}), "('resnet34', encoder_weights='imagenet', activation=None)\n", (2223, 2280), False, 'from models.octave_unet.unet.model import OctaveUnet\n'), ((2357, 2435), 'segmentation_models_pytorch.PSPNet', 'smp.PSPNet', (['"""resnet34"""'], {'encoder_weights': '"""imagenet"""', 'classes': '(1)', 'activation': 'None'}), "('resnet34', encoder_weights='imagenet', classes=1, activation=None)\n", (2367, 2435), True, 'import segmentation_models_pytorch as smp\n'), ((2503, 2527), 'models.linknet.LinkNet34', 'LinkNet34', ([], {'num_classes': '(1)'}), '(num_classes=1)\n', (2512, 2527), False, 'from models.linknet import LinkNet34\n'), ((2601, 2660), 'models.deeplabv3.deeplabv3plus.DeepLabV3Plus', 'DeepLabV3Plus', ([], {'model_backbone': '"""res50_atrous"""', 'num_classes': '(1)'}), "(model_backbone='res50_atrous', num_classes=1)\n", (2614, 2660), False, 'from models.deeplabv3.deeplabv3plus import DeepLabV3Plus\n')]
|
#!/usr/bin/env python
#
# Copyright (c) 2020 Intel Corporation
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
#
"""
a sensor that reports the state of all traffic lights
"""
import rospy
from carla_msgs.msg import CarlaTrafficLightStatusList,\
CarlaTrafficLightInfoList
from carla_ros_bridge.traffic import TrafficLight
from carla_ros_bridge.pseudo_actor import PseudoActor
class TrafficLightsSensor(PseudoActor):
"""
a sensor that reports the state of all traffic lights
"""
def __init__(self, uid, name, parent, node, actor_list):
"""
Constructor
:param uid: unique identifier for this object
:type uid: int
:param name: name identiying the sensor
:type name: string
:param parent: the parent of this
:type parent: carla_ros_bridge.Parent
:param node: node-handle
:type node: carla_ros_bridge.CarlaRosBridge
:param actor_list: current list of actors
:type actor_list: map(carla-actor-id -> python-actor-object)
"""
super(TrafficLightsSensor, self).__init__(uid=uid,
name=name,
parent=parent,
node=node)
self.actor_list = actor_list
self.traffic_light_status = CarlaTrafficLightStatusList()
self.traffic_light_actors = []
self.traffic_lights_info_publisher = rospy.Publisher(
self.get_topic_prefix() + "/info",
CarlaTrafficLightInfoList,
queue_size=10,
latch=True)
self.traffic_lights_status_publisher = rospy.Publisher(
self.get_topic_prefix() + "/status",
CarlaTrafficLightStatusList,
queue_size=10,
latch=True)
def destroy(self):
"""
Function to destroy this object.
:return:
"""
self.actor_list = None
super(TrafficLightsSensor, self).destroy()
@staticmethod
def get_blueprint_name():
"""
Get the blueprint identifier for the pseudo sensor
:return: name
"""
return "sensor.pseudo.traffic_lights"
def update(self, frame, timestamp):
"""
Get the state of all known traffic lights
"""
traffic_light_status = CarlaTrafficLightStatusList()
traffic_light_actors = []
for actor_id in self.actor_list:
actor = self.actor_list[actor_id]
if isinstance(actor, TrafficLight):
traffic_light_actors.append(actor)
traffic_light_status.traffic_lights.append(actor.get_status())
if traffic_light_actors != self.traffic_light_actors:
self.traffic_light_actors = traffic_light_actors
traffic_light_info_list = CarlaTrafficLightInfoList()
for traffic_light in traffic_light_actors:
traffic_light_info_list.traffic_lights.append(traffic_light.get_info())
self.traffic_lights_info_publisher.publish(traffic_light_info_list)
if traffic_light_status != self.traffic_light_status:
self.traffic_light_status = traffic_light_status
self.traffic_lights_status_publisher.publish(traffic_light_status)
|
[
"carla_msgs.msg.CarlaTrafficLightInfoList",
"carla_msgs.msg.CarlaTrafficLightStatusList"
] |
[((1440, 1469), 'carla_msgs.msg.CarlaTrafficLightStatusList', 'CarlaTrafficLightStatusList', ([], {}), '()\n', (1467, 1469), False, 'from carla_msgs.msg import CarlaTrafficLightStatusList, CarlaTrafficLightInfoList\n'), ((2448, 2477), 'carla_msgs.msg.CarlaTrafficLightStatusList', 'CarlaTrafficLightStatusList', ([], {}), '()\n', (2475, 2477), False, 'from carla_msgs.msg import CarlaTrafficLightStatusList, CarlaTrafficLightInfoList\n'), ((2939, 2966), 'carla_msgs.msg.CarlaTrafficLightInfoList', 'CarlaTrafficLightInfoList', ([], {}), '()\n', (2964, 2966), False, 'from carla_msgs.msg import CarlaTrafficLightStatusList, CarlaTrafficLightInfoList\n')]
|
from adminsortable2.admin import SortableAdminMixin
from django.contrib import admin
from psalter.models import PsalmTopic
class PsalmTopicAdmin(SortableAdminMixin, admin.ModelAdmin):
list_display = ("topic_name", "psalms")
fields = ("topic_name", "psalms")
admin.site.register(PsalmTopic, PsalmTopicAdmin)
|
[
"django.contrib.admin.site.register"
] |
[((271, 319), 'django.contrib.admin.site.register', 'admin.site.register', (['PsalmTopic', 'PsalmTopicAdmin'], {}), '(PsalmTopic, PsalmTopicAdmin)\n', (290, 319), False, 'from django.contrib import admin\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import unittest
import warnings
import pytest
from nose.plugins.attrib import attr
from six import StringIO
from conans.client import tools
from conans.client.output import ConanOutput
from conans.client.tools.oss import cpu_count
from conans.client.tools.win import build_sln_command
from conans.errors import ConanException
from conans.model.settings import Settings
from conans.test.utils.mocks import MockSettings
from conans.test.utils.test_files import temp_folder
from conans.util.files import load, save
@attr('visual_studio')
@pytest.mark.tool_visual_studio
class BuildSLNCommandTest(unittest.TestCase):
def test_no_configuration(self):
dummy = """GlobalSection
EndGlobalSection
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Win32 = Debug|Win32
Debug|x64 = Debug|x64
Release|Win32 = Release|Win32
Release|x64 = Release|x64
EndGlobalSection
"""
folder = temp_folder()
path = os.path.join(folder, "dummy.sln")
save(path, dummy)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
new_out = StringIO()
command = build_sln_command(Settings({}), sln_path=path, targets=None, upgrade_project=False,
build_type='Debug', arch="x86", parallel=False,
output=ConanOutput(new_out))
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn('/p:Configuration="Debug" /p:UseEnv=false /p:Platform="x86"', command)
self.assertIn("WARN: ***** The configuration Debug|x86 does not exist in this solution *****",
new_out.getvalue())
# use platforms
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
new_out = StringIO()
command = build_sln_command(Settings({}), sln_path=path, targets=None, upgrade_project=False,
build_type='Debug', arch="x86", parallel=False,
platforms={"x86": "Win32"}, output=ConanOutput(new_out))
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn('/p:Configuration="Debug" /p:UseEnv=false /p:Platform="Win32"', command)
self.assertNotIn("WARN", new_out.getvalue())
self.assertNotIn("ERROR", new_out.getvalue())
def test_no_arch(self):
with self.assertRaises(ConanException):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
new_out = StringIO()
build_sln_command(Settings({}), sln_path='dummy.sln', targets=None,
upgrade_project=False, build_type='Debug', arch=None, parallel=False,
output=ConanOutput(new_out))
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
def test_no_build_type(self):
with self.assertRaises(ConanException):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
new_out = StringIO()
build_sln_command(Settings({}), sln_path='dummy.sln', targets=None,
upgrade_project=False, build_type=None, arch='x86', parallel=False,
output=ConanOutput(new_out))
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
def test_positive(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
output = ConanOutput(StringIO())
command = build_sln_command(Settings({}), sln_path='dummy.sln', targets=None,
upgrade_project=False, build_type='Debug', arch='x86',
parallel=False, output=output)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn('msbuild "dummy.sln"', command)
self.assertIn('/p:Platform="x86"', command)
self.assertNotIn('devenv "dummy.sln" /upgrade', command)
self.assertNotIn('/m:%s' % cpu_count(output=output), command)
self.assertNotIn('/target:teapot', command)
def test_upgrade(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
output = ConanOutput(StringIO())
command = build_sln_command(Settings({}), sln_path='dummy.sln', targets=None,
upgrade_project=True, build_type='Debug', arch='x86_64',
parallel=False, output=output)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn('msbuild "dummy.sln"', command)
self.assertIn('/p:Platform="x64"', command)
self.assertIn('devenv "dummy.sln" /upgrade', command)
self.assertNotIn('/m:%s' % cpu_count(output=output), command)
self.assertNotIn('/target:teapot', command)
with tools.environment_append({"CONAN_SKIP_VS_PROJECTS_UPGRADE": "1"}):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
output = ConanOutput(StringIO())
command = build_sln_command(Settings({}), sln_path='dummy.sln', targets=None,
upgrade_project=True, build_type='Debug', arch='x86_64',
parallel=False, output=output)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn('msbuild "dummy.sln"', command)
self.assertIn('/p:Platform="x64"', command)
self.assertNotIn('devenv "dummy.sln" /upgrade', command)
self.assertNotIn('/m:%s' % cpu_count(output=output), command)
self.assertNotIn('/target:teapot', command)
with tools.environment_append({"CONAN_SKIP_VS_PROJECTS_UPGRADE": "False"}):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
output = ConanOutput(StringIO())
command = build_sln_command(Settings({}), sln_path='dummy.sln', targets=None,
upgrade_project=True, build_type='Debug', arch='x86_64',
parallel=False, output=output)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn('devenv "dummy.sln" /upgrade', command)
def test_parallel(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
output = ConanOutput(StringIO())
command = build_sln_command(Settings({}), sln_path='dummy.sln', targets=None,
upgrade_project=True, build_type='Debug', arch='armv7',
parallel=False, output=output)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn('msbuild "dummy.sln"', command)
self.assertIn('/p:Platform="ARM"', command)
self.assertIn('devenv "dummy.sln" /upgrade', command)
self.assertNotIn('/m:%s' % cpu_count(output=output), command)
self.assertNotIn('/target:teapot', command)
def test_target(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
output = ConanOutput(StringIO())
command = build_sln_command(Settings({}), sln_path='dummy.sln', targets=['teapot'],
upgrade_project=False, build_type='Debug', arch='armv8',
parallel=False, output=output)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertIn('msbuild "dummy.sln"', command)
self.assertIn('/p:Platform="ARM64"', command)
self.assertNotIn('devenv "dummy.sln" /upgrade', command)
self.assertNotIn('/m:%s' % cpu_count(output=output), command)
self.assertIn('/target:teapot', command)
def test_toolset(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
new_out = StringIO()
command = build_sln_command(MockSettings({"compiler": "Visual Studio",
"compiler.version": "17",
"build_type": "Debug",
"compiler.runtime": "MDd",
"cppstd": "17"}),
sln_path='dummy.sln', targets=None,
upgrade_project=False, build_type='Debug', arch='armv7',
parallel=False, toolset="v110", output=ConanOutput(new_out))
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertTrue(command.startswith('msbuild "dummy.sln" /p:Configuration="Debug" '
'/p:UseEnv=false '
'/p:Platform="ARM" '
'/p:PlatformToolset="v110" '
'/verbosity:minimal '
'/p:ForceImportBeforeCppTargets='), command)
def test_properties_file(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
new_out = StringIO()
command = build_sln_command(MockSettings({"compiler": "Visual Studio",
"compiler.version": "17",
"build_type": "Debug",
"compiler.runtime": "MDd",
"cppstd": "17"}),
sln_path='dummy.sln', targets=None,
upgrade_project=False, build_type='Debug', arch='armv7',
parallel=False, output=ConanOutput(new_out))
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
self.assertTrue(command.startswith('msbuild "dummy.sln" /p:Configuration="Debug" '
'/p:UseEnv=false '
'/p:Platform="ARM" '
'/verbosity:minimal '
'/p:ForceImportBeforeCppTargets='), command)
path_tmp = command.split("/p:ForceImportBeforeCppTargets=")[1][1:-1] # remove quotes
self.assertTrue(os.path.exists(path_tmp))
contents = load(path_tmp)
self.assertIn("<RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>", contents)
self.assertIn("<AdditionalOptions>/std:c++17 %(AdditionalOptions)</AdditionalOptions>",
contents)
|
[
"conans.util.files.load",
"warnings.simplefilter",
"conans.test.utils.mocks.MockSettings",
"conans.client.tools.environment_append",
"os.path.exists",
"six.StringIO",
"conans.util.files.save",
"conans.test.utils.test_files.temp_folder",
"warnings.catch_warnings",
"nose.plugins.attrib.attr",
"conans.client.tools.oss.cpu_count",
"conans.model.settings.Settings",
"os.path.join",
"conans.client.output.ConanOutput"
] |
[((574, 595), 'nose.plugins.attrib.attr', 'attr', (['"""visual_studio"""'], {}), "('visual_studio')\n", (578, 595), False, 'from nose.plugins.attrib import attr\n'), ((1017, 1030), 'conans.test.utils.test_files.temp_folder', 'temp_folder', ([], {}), '()\n', (1028, 1030), False, 'from conans.test.utils.test_files import temp_folder\n'), ((1046, 1079), 'os.path.join', 'os.path.join', (['folder', '"""dummy.sln"""'], {}), "(folder, 'dummy.sln')\n", (1058, 1079), False, 'import os\n'), ((1088, 1105), 'conans.util.files.save', 'save', (['path', 'dummy'], {}), '(path, dummy)\n', (1092, 1105), False, 'from conans.util.files import load, save\n'), ((11709, 11723), 'conans.util.files.load', 'load', (['path_tmp'], {}), '(path_tmp)\n', (11713, 11723), False, 'from conans.util.files import load, save\n'), ((1120, 1156), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (1143, 1156), False, 'import warnings\n'), ((1175, 1206), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (1196, 1206), False, 'import warnings\n'), ((1230, 1240), 'six.StringIO', 'StringIO', ([], {}), '()\n', (1238, 1240), False, 'from six import StringIO\n'), ((1897, 1933), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (1920, 1933), False, 'import warnings\n'), ((1952, 1983), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (1973, 1983), False, 'import warnings\n'), ((2007, 2017), 'six.StringIO', 'StringIO', ([], {}), '()\n', (2015, 2017), False, 'from six import StringIO\n'), ((3871, 3907), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (3894, 3907), False, 'import warnings\n'), ((3926, 3957), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (3947, 3957), False, 'import warnings\n'), ((4712, 4748), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (4735, 4748), False, 'import warnings\n'), ((4767, 4798), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (4788, 4798), False, 'import warnings\n'), ((5524, 5589), 'conans.client.tools.environment_append', 'tools.environment_append', (["{'CONAN_SKIP_VS_PROJECTS_UPGRADE': '1'}"], {}), "({'CONAN_SKIP_VS_PROJECTS_UPGRADE': '1'})\n", (5548, 5589), False, 'from conans.client import tools\n'), ((6471, 6540), 'conans.client.tools.environment_append', 'tools.environment_append', (["{'CONAN_SKIP_VS_PROJECTS_UPGRADE': 'False'}"], {}), "({'CONAN_SKIP_VS_PROJECTS_UPGRADE': 'False'})\n", (6495, 6540), False, 'from conans.client import tools\n'), ((7204, 7240), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (7227, 7240), False, 'import warnings\n'), ((7259, 7290), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (7280, 7290), False, 'import warnings\n'), ((8042, 8078), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (8065, 8078), False, 'import warnings\n'), ((8097, 8128), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (8118, 8128), False, 'import warnings\n'), ((8890, 8926), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (8913, 8926), False, 'import warnings\n'), ((8945, 8976), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (8966, 8976), False, 'import warnings\n'), ((9000, 9010), 'six.StringIO', 'StringIO', ([], {}), '()\n', (9008, 9010), False, 'from six import StringIO\n'), ((10287, 10323), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (10310, 10323), False, 'import warnings\n'), ((10342, 10373), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (10363, 10373), False, 'import warnings\n'), ((10397, 10407), 'six.StringIO', 'StringIO', ([], {}), '()\n', (10405, 10407), False, 'from six import StringIO\n'), ((11664, 11688), 'os.path.exists', 'os.path.exists', (['path_tmp'], {}), '(path_tmp)\n', (11678, 11688), False, 'import os\n'), ((1281, 1293), 'conans.model.settings.Settings', 'Settings', (['{}'], {}), '({})\n', (1289, 1293), False, 'from conans.model.settings import Settings\n'), ((2058, 2070), 'conans.model.settings.Settings', 'Settings', (['{}'], {}), '({})\n', (2066, 2070), False, 'from conans.model.settings import Settings\n'), ((2722, 2758), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (2745, 2758), False, 'import warnings\n'), ((2781, 2812), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (2802, 2812), False, 'import warnings\n'), ((2840, 2850), 'six.StringIO', 'StringIO', ([], {}), '()\n', (2848, 2850), False, 'from six import StringIO\n'), ((3326, 3362), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (3349, 3362), False, 'import warnings\n'), ((3385, 3416), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (3406, 3416), False, 'import warnings\n'), ((3444, 3454), 'six.StringIO', 'StringIO', ([], {}), '()\n', (3452, 3454), False, 'from six import StringIO\n'), ((3992, 4002), 'six.StringIO', 'StringIO', ([], {}), '()\n', (4000, 4002), False, 'from six import StringIO\n'), ((4044, 4056), 'conans.model.settings.Settings', 'Settings', (['{}'], {}), '({})\n', (4052, 4056), False, 'from conans.model.settings import Settings\n'), ((4583, 4607), 'conans.client.tools.oss.cpu_count', 'cpu_count', ([], {'output': 'output'}), '(output=output)\n', (4592, 4607), False, 'from conans.client.tools.oss import cpu_count\n'), ((4833, 4843), 'six.StringIO', 'StringIO', ([], {}), '()\n', (4841, 4843), False, 'from six import StringIO\n'), ((4885, 4897), 'conans.model.settings.Settings', 'Settings', (['{}'], {}), '({})\n', (4893, 4897), False, 'from conans.model.settings import Settings\n'), ((5423, 5447), 'conans.client.tools.oss.cpu_count', 'cpu_count', ([], {'output': 'output'}), '(output=output)\n', (5432, 5447), False, 'from conans.client.tools.oss import cpu_count\n'), ((5608, 5644), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (5631, 5644), False, 'import warnings\n'), ((5667, 5698), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (5688, 5698), False, 'import warnings\n'), ((6559, 6595), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (6582, 6595), False, 'import warnings\n'), ((6618, 6649), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (6639, 6649), False, 'import warnings\n'), ((7325, 7335), 'six.StringIO', 'StringIO', ([], {}), '()\n', (7333, 7335), False, 'from six import StringIO\n'), ((7377, 7389), 'conans.model.settings.Settings', 'Settings', (['{}'], {}), '({})\n', (7385, 7389), False, 'from conans.model.settings import Settings\n'), ((7914, 7938), 'conans.client.tools.oss.cpu_count', 'cpu_count', ([], {'output': 'output'}), '(output=output)\n', (7923, 7938), False, 'from conans.client.tools.oss import cpu_count\n'), ((8163, 8173), 'six.StringIO', 'StringIO', ([], {}), '()\n', (8171, 8173), False, 'from six import StringIO\n'), ((8215, 8227), 'conans.model.settings.Settings', 'Settings', (['{}'], {}), '({})\n', (8223, 8227), False, 'from conans.model.settings import Settings\n'), ((8764, 8788), 'conans.client.tools.oss.cpu_count', 'cpu_count', ([], {'output': 'output'}), '(output=output)\n', (8773, 8788), False, 'from conans.client.tools.oss import cpu_count\n'), ((9051, 9190), 'conans.test.utils.mocks.MockSettings', 'MockSettings', (["{'compiler': 'Visual Studio', 'compiler.version': '17', 'build_type':\n 'Debug', 'compiler.runtime': 'MDd', 'cppstd': '17'}"], {}), "({'compiler': 'Visual Studio', 'compiler.version': '17',\n 'build_type': 'Debug', 'compiler.runtime': 'MDd', 'cppstd': '17'})\n", (9063, 9190), False, 'from conans.test.utils.mocks import MockSettings\n'), ((10448, 10587), 'conans.test.utils.mocks.MockSettings', 'MockSettings', (["{'compiler': 'Visual Studio', 'compiler.version': '17', 'build_type':\n 'Debug', 'compiler.runtime': 'MDd', 'cppstd': '17'}"], {}), "({'compiler': 'Visual Studio', 'compiler.version': '17',\n 'build_type': 'Debug', 'compiler.runtime': 'MDd', 'cppstd': '17'})\n", (10460, 10587), False, 'from conans.test.utils.mocks import MockSettings\n'), ((1482, 1502), 'conans.client.output.ConanOutput', 'ConanOutput', (['new_out'], {}), '(new_out)\n', (1493, 1502), False, 'from conans.client.output import ConanOutput\n'), ((2287, 2307), 'conans.client.output.ConanOutput', 'ConanOutput', (['new_out'], {}), '(new_out)\n', (2298, 2307), False, 'from conans.client.output import ConanOutput\n'), ((2885, 2897), 'conans.model.settings.Settings', 'Settings', (['{}'], {}), '({})\n', (2893, 2897), False, 'from conans.model.settings import Settings\n'), ((3489, 3501), 'conans.model.settings.Settings', 'Settings', (['{}'], {}), '({})\n', (3497, 3501), False, 'from conans.model.settings import Settings\n'), ((5737, 5747), 'six.StringIO', 'StringIO', ([], {}), '()\n', (5745, 5747), False, 'from six import StringIO\n'), ((5793, 5805), 'conans.model.settings.Settings', 'Settings', (['{}'], {}), '({})\n', (5801, 5805), False, 'from conans.model.settings import Settings\n'), ((6366, 6390), 'conans.client.tools.oss.cpu_count', 'cpu_count', ([], {'output': 'output'}), '(output=output)\n', (6375, 6390), False, 'from conans.client.tools.oss import cpu_count\n'), ((6688, 6698), 'six.StringIO', 'StringIO', ([], {}), '()\n', (6696, 6698), False, 'from six import StringIO\n'), ((6744, 6756), 'conans.model.settings.Settings', 'Settings', (['{}'], {}), '({})\n', (6752, 6756), False, 'from conans.model.settings import Settings\n'), ((9656, 9676), 'conans.client.output.ConanOutput', 'ConanOutput', (['new_out'], {}), '(new_out)\n', (9667, 9676), False, 'from conans.client.output import ConanOutput\n'), ((11037, 11057), 'conans.client.output.ConanOutput', 'ConanOutput', (['new_out'], {}), '(new_out)\n', (11048, 11057), False, 'from conans.client.output import ConanOutput\n'), ((3080, 3100), 'conans.client.output.ConanOutput', 'ConanOutput', (['new_out'], {}), '(new_out)\n', (3091, 3100), False, 'from conans.client.output import ConanOutput\n'), ((3682, 3702), 'conans.client.output.ConanOutput', 'ConanOutput', (['new_out'], {}), '(new_out)\n', (3693, 3702), False, 'from conans.client.output import ConanOutput\n')]
|
from dataclasses import dataclass
import functions as fx
import glow.gwas.log_reg as lr
import glow.gwas.approx_firth as af
import pandas as pd
from nptyping import Float, NDArray
import numpy as np
import pytest
from typing import Any
@dataclass
class TestData:
phenotypes: NDArray[(Any, ), Float]
covariates: NDArray[(Any, Any), Float]
offset: NDArray[(Any, ), Float]
def _get_test_data(use_offset, use_intercept):
test_file = 'test-data/r/sex2withoffset.txt'
df = pd.read_table(test_file, delimiter='\t').astype('float64')
phenotypes = df['case']
covariates = df.loc[:, 'age':'dia']
if use_intercept:
covariates.loc[:, 'intercept'] = 1
offset = df['offset']
if not use_offset:
offset = offset * 0
return TestData(phenotypes.to_numpy(), covariates.to_numpy(), offset.to_numpy())
def _compare_full_firth_beta(test_data, golden_firth_beta):
beta_init = np.zeros(test_data.covariates.shape[1])
X = test_data.covariates
y = test_data.phenotypes
offset = test_data.offset
test_firth_fit = af._fit_firth(beta_init=beta_init, X=X, y=y, offset=offset)
test_firth_beta = test_firth_fit.beta
assert np.allclose(golden_firth_beta, test_firth_beta)
def test_full_firth():
# table = read.table("sex2withoffset.txt", header=True)
# logistf(case ~ age+oc+vic+vicl+vis+dia+offset(offset), data=table)
golden_firth_beta = [
-1.1715911, # age
0.1568537, # oc
2.4752617, # vic
-2.2125007, # vicl
-0.8604622, # vis
2.7397140, # dia
-0.5679234 # intercept
]
test_data = _get_test_data(use_offset=True, use_intercept=True)
_compare_full_firth_beta(test_data, golden_firth_beta)
def test_full_firth_no_offset():
# logistf(case ~ age+oc+vic+vicl+vis+dia, data=table)
golden_firth_beta = [
-1.10598130, # age
-0.06881673, # oc
2.26887464, # vic
-2.11140816, # vicl
-0.78831694, # vis
3.09601263, # dia
0.12025404 # intercept
]
test_data = _get_test_data(use_offset=False, use_intercept=True)
_compare_full_firth_beta(test_data, golden_firth_beta)
def test_full_firth_no_intercept():
# logistf(case ~ age+oc+vic+vicl+vis+dia+offset(offset)-1, data=table)
golden_firth_beta = [
-1.2513849, # age
-0.3141151, # oc
2.2066573, # vic
-2.2988439, # vicl
-0.9922712, # vis
2.7046574 # dia
]
test_data = _get_test_data(use_offset=True, use_intercept=False)
_compare_full_firth_beta(test_data, golden_firth_beta)
def test_null_firth_fit_no_offset():
golden_firth_beta = [
-1.10598130, # age
-0.06881673, # oc
2.26887464, # vic
-2.11140816, # vicl
-0.78831694, # vis
3.09601263, # dia
0.12025404 # intercept
]
test_data = _get_test_data(use_offset=False, use_intercept=True)
fit = af.perform_null_firth_fit(test_data.phenotypes,
test_data.covariates,
~np.isnan(test_data.phenotypes),
None,
includes_intercept=True)
assert np.allclose(fit, test_data.covariates @ golden_firth_beta)
def _read_regenie_df(file, trait, num_snps):
df = pd.read_table(file, sep=r'\s+')
df = df[df['ID'] <= num_snps]
df['phenotype'] = trait
return df
def compare_corrections_to_regenie(spark,
pvalue_threshold,
output_prefix,
compare_all_cols,
uncorrected,
corrected,
missing=[]):
(genotype_df, phenotype_df, covariate_df, offset_df) = fx.get_input_dfs(spark,
binary=True,
missing=missing)
glowgr_df = lr.logistic_regression(genotype_df,
phenotype_df,
covariate_df,
offset_df,
correction=lr.correction_approx_firth,
pvalue_threshold=pvalue_threshold,
values_column='values').toPandas()
fx.compare_to_regenie(output_prefix, glowgr_df, compare_all_cols)
correction_counts = glowgr_df.correctionSucceeded.value_counts(dropna=False).to_dict()
if uncorrected > 0:
# null in Spark DataFrame converts to nan in pandas
assert correction_counts[np.nan] == uncorrected
if corrected > 0:
assert correction_counts[True] == corrected
assert False not in correction_counts
return glowgr_df
@pytest.mark.min_spark('3')
def test_correct_all_versus_regenie(spark):
compare_corrections_to_regenie(spark,
0.9999,
'test_bin_out_firth_',
compare_all_cols=True,
uncorrected=0,
corrected=200)
@pytest.mark.min_spark('3')
def test_correct_half_versus_regenie(spark):
compare_corrections_to_regenie(spark,
0.5,
'test_bin_out_half_firth_',
compare_all_cols=False,
uncorrected=103,
corrected=97)
@pytest.mark.min_spark('3')
def test_correct_missing_versus_regenie(spark):
compare_corrections_to_regenie(
spark,
0.9999,
'test_bin_out_missing_firth_',
compare_all_cols=True,
uncorrected=0,
corrected=200,
missing=['35_35', '136_136', '77_77', '100_100', '204_204', '474_474'])
|
[
"numpy.allclose",
"numpy.zeros",
"numpy.isnan",
"functions.compare_to_regenie",
"glow.gwas.approx_firth._fit_firth",
"pandas.read_table",
"glow.gwas.log_reg.logistic_regression",
"pytest.mark.min_spark",
"functions.get_input_dfs"
] |
[((4970, 4996), 'pytest.mark.min_spark', 'pytest.mark.min_spark', (['"""3"""'], {}), "('3')\n", (4991, 4996), False, 'import pytest\n'), ((5345, 5371), 'pytest.mark.min_spark', 'pytest.mark.min_spark', (['"""3"""'], {}), "('3')\n", (5366, 5371), False, 'import pytest\n'), ((5725, 5751), 'pytest.mark.min_spark', 'pytest.mark.min_spark', (['"""3"""'], {}), "('3')\n", (5746, 5751), False, 'import pytest\n'), ((923, 962), 'numpy.zeros', 'np.zeros', (['test_data.covariates.shape[1]'], {}), '(test_data.covariates.shape[1])\n', (931, 962), True, 'import numpy as np\n'), ((1073, 1132), 'glow.gwas.approx_firth._fit_firth', 'af._fit_firth', ([], {'beta_init': 'beta_init', 'X': 'X', 'y': 'y', 'offset': 'offset'}), '(beta_init=beta_init, X=X, y=y, offset=offset)\n', (1086, 1132), True, 'import glow.gwas.approx_firth as af\n'), ((1186, 1233), 'numpy.allclose', 'np.allclose', (['golden_firth_beta', 'test_firth_beta'], {}), '(golden_firth_beta, test_firth_beta)\n', (1197, 1233), True, 'import numpy as np\n'), ((3262, 3320), 'numpy.allclose', 'np.allclose', (['fit', '(test_data.covariates @ golden_firth_beta)'], {}), '(fit, test_data.covariates @ golden_firth_beta)\n', (3273, 3320), True, 'import numpy as np\n'), ((3377, 3408), 'pandas.read_table', 'pd.read_table', (['file'], {'sep': '"""\\\\s+"""'}), "(file, sep='\\\\s+')\n", (3390, 3408), True, 'import pandas as pd\n'), ((3887, 3940), 'functions.get_input_dfs', 'fx.get_input_dfs', (['spark'], {'binary': '(True)', 'missing': 'missing'}), '(spark, binary=True, missing=missing)\n', (3903, 3940), True, 'import functions as fx\n'), ((4531, 4596), 'functions.compare_to_regenie', 'fx.compare_to_regenie', (['output_prefix', 'glowgr_df', 'compare_all_cols'], {}), '(output_prefix, glowgr_df, compare_all_cols)\n', (4552, 4596), True, 'import functions as fx\n'), ((491, 531), 'pandas.read_table', 'pd.read_table', (['test_file'], {'delimiter': '"""\t"""'}), "(test_file, delimiter='\\t')\n", (504, 531), True, 'import pandas as pd\n'), ((3116, 3146), 'numpy.isnan', 'np.isnan', (['test_data.phenotypes'], {}), '(test_data.phenotypes)\n', (3124, 3146), True, 'import numpy as np\n'), ((4109, 4290), 'glow.gwas.log_reg.logistic_regression', 'lr.logistic_regression', (['genotype_df', 'phenotype_df', 'covariate_df', 'offset_df'], {'correction': 'lr.correction_approx_firth', 'pvalue_threshold': 'pvalue_threshold', 'values_column': '"""values"""'}), "(genotype_df, phenotype_df, covariate_df, offset_df,\n correction=lr.correction_approx_firth, pvalue_threshold=\n pvalue_threshold, values_column='values')\n", (4131, 4290), True, 'import glow.gwas.log_reg as lr\n')]
|
"""Support for the SmartWeather weather service."""
import logging
from typing import Dict, List
from homeassistant.components.weather import (
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TEMP_LOW,
ATTR_FORECAST_TIME,
ATTR_FORECAST_WIND_BEARING,
ATTR_FORECAST_WIND_SPEED,
ATTR_WEATHER_HUMIDITY,
ATTR_WEATHER_PRESSURE,
ATTR_WEATHER_TEMPERATURE,
ATTR_WEATHER_WIND_BEARING,
ATTR_WEATHER_WIND_SPEED,
WeatherEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_ID,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant
from homeassistant.util.dt import utc_from_timestamp
from homeassistant.util.temperature import celsius_to_fahrenheit
from pysmartweatherio import FORECAST_TYPE_DAILY
from .const import (
DOMAIN,
ATTR_CURRENT_ICON,
ATTR_FCST_UV,
ATTR_TEMP_HIGH_TODAY,
ATTR_TEMP_LOW_TODAY,
DEFAULT_ATTRIBUTION,
DEVICE_TYPE_WEATHER,
CONDITION_CLASSES,
)
from .entity import SmartWeatherEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities
) -> None:
"""Add a weather entity from station_id."""
unit_system = "metric" if hass.config.units.is_metric else "imperial"
fcst_coordinator = hass.data[DOMAIN][entry.entry_id]["fcst_coordinator"]
if not fcst_coordinator.data:
return
coordinator = hass.data[DOMAIN][entry.entry_id]["coordinator"]
if not coordinator.data:
return
station_info = hass.data[DOMAIN][entry.entry_id]["station"]
if not station_info:
return
fcst_type = hass.data[DOMAIN][entry.entry_id]["fcst_type"]
if not fcst_type:
return
weather_entity = SmartWeatherWeather(
coordinator,
entry.data,
DEVICE_TYPE_WEATHER,
station_info,
fcst_coordinator,
unit_system,
fcst_type,
)
async_add_entities([weather_entity], True)
return True
class SmartWeatherWeather(SmartWeatherEntity, WeatherEntity):
"""Representation of a weather entity."""
def __init__(
self,
coordinator,
entries,
device_type,
server,
fcst_coordinator,
unit_system,
fcst_type,
) -> None:
"""Initialize the SmartWeather weather entity."""
super().__init__(
coordinator, entries, device_type, server, fcst_coordinator, None
)
self._name = f"{DOMAIN.capitalize()} {entries[CONF_ID]}"
self._unit_system = unit_system
self._forecast_type = fcst_type
@property
def name(self) -> str:
"""Return the name of the sensor."""
return self._name
@property
def temperature(self) -> int:
"""Return the temperature."""
if self._current is not None:
return self._current.air_temperature
return None
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def humidity(self) -> int:
"""Return the humidity."""
if self._current is not None:
return self._current.relative_humidity
return None
@property
def wind_speed(self) -> float:
"""Return the wind speed."""
if self._current is not None:
return self._current.wind_avg
return None
@property
def wind_gust(self) -> float:
"""Return the wind Gust."""
if self._current is not None:
return self._current.wind_gust
return None
@property
def wind_bearing(self) -> int:
"""Return the wind bearing."""
if self._current is not None:
return self._current.wind_bearing
return None
@property
def precipitation(self) -> float:
"""Return the precipitation."""
if self._current is not None:
return round(self._current.precip_accum_local_day, 1)
return None
@property
def pressure(self) -> int:
"""Return the pressure."""
if self._current is not None:
if self._unit_system == "imperial":
return round(self._current.sea_level_pressure, 3)
return round(self._current.sea_level_pressure, 2)
return None
@property
def uv(self) -> int:
"""Return the UV Index."""
if self._current is not None:
return round(self._current.uv, 1)
return None
@property
def current_condition(self) -> int:
"""Return Current Condition Icon."""
if self._forecast is not None:
return self._forecast.current_icon
return None
@property
def condition(self) -> str:
"""Return the weather condition."""
return next(
(k for k, v in CONDITION_CLASSES.items() if self.current_condition in v),
None,
)
@property
def temp_high_today(self) -> float:
"""Return Todays High Temp Forecast."""
if self._forecast is not None:
if self._unit_system == "imperial":
return celsius_to_fahrenheit(self._forecast.temp_high_today)
return self._forecast.temp_high_today
return None
@property
def temp_low_today(self) -> float:
"""Return Todays Low Temp Forecast."""
if self._forecast is not None:
if self._unit_system == "imperial":
return celsius_to_fahrenheit(self._forecast.temp_low_today)
return self._forecast.temp_low_today
return None
@property
def attribution(self) -> str:
"""Return the attribution."""
return DEFAULT_ATTRIBUTION
@property
def device_state_attributes(self) -> Dict:
"""Return SmartWeather specific attributes."""
return {
ATTR_CURRENT_ICON: self.current_condition,
ATTR_FCST_UV: self.uv,
ATTR_WEATHER_HUMIDITY: self.humidity,
ATTR_WEATHER_PRESSURE: self.pressure,
ATTR_WEATHER_TEMPERATURE: self.temperature,
ATTR_WEATHER_WIND_BEARING: self.wind_bearing,
ATTR_WEATHER_WIND_SPEED: self.wind_speed,
ATTR_TEMP_HIGH_TODAY: self.temp_high_today,
ATTR_TEMP_LOW_TODAY: self.temp_low_today,
}
@property
def forecast(self) -> List:
"""Return the forecast."""
if self.fcst_coordinator.data is None or len(self.fcst_coordinator.data) < 2:
return None
data = []
for forecast in self.fcst_coordinator.data:
condition = next(
(k for k, v in CONDITION_CLASSES.items() if forecast.icon in v),
None,
)
if self._forecast_type == FORECAST_TYPE_DAILY:
data.append(
{
ATTR_FORECAST_TIME: utc_from_timestamp(
forecast.epochtime
).isoformat(),
ATTR_FORECAST_TEMP: forecast.temp_high,
ATTR_FORECAST_TEMP_LOW: forecast.temp_low,
ATTR_FORECAST_PRECIPITATION: round(forecast.precip, 1)
if forecast.precip is not None
else None,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: forecast.precip_probability,
ATTR_FORECAST_CONDITION: condition,
ATTR_FORECAST_WIND_SPEED: forecast.wind_avg,
ATTR_FORECAST_WIND_BEARING: forecast.wind_bearing,
}
)
else:
data.append(
{
ATTR_FORECAST_TIME: utc_from_timestamp(
forecast.epochtime
).isoformat(),
ATTR_FORECAST_TEMP: forecast.temperature,
ATTR_FORECAST_PRECIPITATION: round(forecast.precip, 1)
if forecast.precip is not None
else None,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: forecast.precip_probability,
ATTR_FORECAST_CONDITION: condition,
ATTR_FORECAST_WIND_SPEED: forecast.wind_avg,
ATTR_FORECAST_WIND_BEARING: forecast.wind_bearing,
}
)
return data
|
[
"homeassistant.util.dt.utc_from_timestamp",
"homeassistant.util.temperature.celsius_to_fahrenheit",
"logging.getLogger"
] |
[((1137, 1164), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1154, 1164), False, 'import logging\n'), ((5275, 5328), 'homeassistant.util.temperature.celsius_to_fahrenheit', 'celsius_to_fahrenheit', (['self._forecast.temp_high_today'], {}), '(self._forecast.temp_high_today)\n', (5296, 5328), False, 'from homeassistant.util.temperature import celsius_to_fahrenheit\n'), ((5610, 5662), 'homeassistant.util.temperature.celsius_to_fahrenheit', 'celsius_to_fahrenheit', (['self._forecast.temp_low_today'], {}), '(self._forecast.temp_low_today)\n', (5631, 5662), False, 'from homeassistant.util.temperature import celsius_to_fahrenheit\n'), ((7032, 7070), 'homeassistant.util.dt.utc_from_timestamp', 'utc_from_timestamp', (['forecast.epochtime'], {}), '(forecast.epochtime)\n', (7050, 7070), False, 'from homeassistant.util.dt import utc_from_timestamp\n'), ((7889, 7927), 'homeassistant.util.dt.utc_from_timestamp', 'utc_from_timestamp', (['forecast.epochtime'], {}), '(forecast.epochtime)\n', (7907, 7927), False, 'from homeassistant.util.dt import utc_from_timestamp\n')]
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Helper CGI for POST uploads.
Utility library contains the main logic behind simulating the blobstore
uploading mechanism.
Contents:
GenerateBlobKey: Function for generation unique blob-keys.
UploadCGIHandler: Main CGI handler class for post uploads.
"""
import base64
import cStringIO
import datetime
import random
import time
import hashlib
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api.blobstore import blobstore
try:
from email.mime import base
from email.mime import multipart
from email import generator
except ImportError:
from email import Generator as generator
from email import MIMEBase as base
from email import MIMEMultipart as multipart
STRIPPED_HEADERS = frozenset(('content-length',
'content-md5',
'content-type',
))
MAX_STRING_NAME_LENGTH = 500
class Error(Exception):
"""Base class for upload processing errors."""
class InvalidMIMETypeFormatError(Error):
"""MIME type was formatted incorrectly."""
class UploadEntityTooLargeError(Error):
"""Entity being uploaded exceeded the allowed size."""
class FilenameOrContentTypeTooLargeError(Error):
"""The filename or content type exceeded the allowed size."""
def __init__(self, invalid_field):
Error.__init__(self,
'The %s exceeds the maximum allowed length of %s.' % (
invalid_field, MAX_STRING_NAME_LENGTH))
class InvalidMetadataError(Error):
"""The filename or content type of the entity was not a valid UTF-8 string."""
def GenerateBlobKey(time_func=time.time, random_func=random.random):
"""Generate a unique BlobKey.
BlobKey is generated using the current time stamp combined with a random
number. The two values are subject to an md5 digest and base64 url-safe
encoded. The new key is checked against the possibility of existence within
the datastore and the random number is regenerated until there is no match.
Args:
time_func: Function used for generating the timestamp. Used for
dependency injection. Allows for predictable results during tests.
Must return a floating point UTC timestamp.
random_func: Function used for generating the random number. Used for
dependency injection. Allows for predictable results during tests.
Returns:
String version of BlobKey that is unique within the BlobInfo datastore.
None if there are too many name conflicts.
"""
timestamp = str(time_func())
tries = 0
while tries < 10:
number = str(random_func())
digester = hashlib.md5()
digester.update(timestamp)
digester.update(number)
blob_key = base64.urlsafe_b64encode(digester.digest())
datastore_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND,
blob_key,
namespace='')
try:
datastore.Get(datastore_key)
tries += 1
except datastore_errors.EntityNotFoundError:
return blob_key
return None
def _SplitMIMEType(mime_type):
"""Split MIME-type in to main and sub type.
Args:
mime_type: full MIME type string.
Returns:
(main, sub):
main: Main part of mime type (application, image, text, etc).
sub: Subtype part of mime type (pdf, png, html, etc).
Raises:
InvalidMIMETypeFormatError: If form item has incorrectly formatted MIME
type.
"""
if mime_type:
mime_type_array = mime_type.split('/')
if len(mime_type_array) == 1:
raise InvalidMIMETypeFormatError('Missing MIME sub-type.')
elif len(mime_type_array) == 2:
main_type, sub_type = mime_type_array
if not(main_type and sub_type):
raise InvalidMIMETypeFormatError(
'Incorrectly formatted MIME type: %s' % mime_type)
return main_type, sub_type
else:
raise InvalidMIMETypeFormatError(
'Incorrectly formatted MIME type: %s' % mime_type)
else:
return 'application', 'octet-stream'
class UploadCGIHandler(object):
"""Class used for handling an upload post.
The main interface to this class is the UploadCGI method. This will receive
the upload form, store the blobs contained in the post and rewrite the blobs
to contain BlobKeys instead of blobs.
"""
def __init__(self,
blob_storage,
generate_blob_key=GenerateBlobKey,
now_func=datetime.datetime.now):
"""Constructor.
Args:
blob_storage: BlobStorage instance where actual blobs are stored.
generate_blob_key: Function used for generating unique blob keys.
now_func: Function that returns the current timestamp.
"""
self.__blob_storage = blob_storage
self.__generate_blob_key = generate_blob_key
self.__now_func = now_func
def StoreBlob(self, form_item, creation):
"""Store form-item to blob storage.
Args:
form_item: FieldStorage instance that represents a specific form field.
This instance should have a non-empty filename attribute, meaning that
it is an uploaded blob rather than a normal form field.
creation: Timestamp to associate with new blobs creation time. This
parameter is provided so that all blobs in the same upload form can have
the same creation date.
Returns:
datastore.Entity('__BlobInfo__') associated with the upload.
"""
main_type, sub_type = _SplitMIMEType(form_item.type)
blob_key = self.__generate_blob_key()
blob_file = form_item.file
if 'Content-Transfer-Encoding' in form_item.headers:
if form_item.headers['Content-Transfer-Encoding'] == 'base64':
blob_file = cStringIO.StringIO(
base64.urlsafe_b64decode(blob_file.read()))
self.__blob_storage.StoreBlob(blob_key, blob_file)
content_type_formatter = base.MIMEBase(main_type, sub_type,
**form_item.type_options)
blob_entity = datastore.Entity('__BlobInfo__',
name=str(blob_key),
namespace='')
try:
blob_entity['content_type'] = (
content_type_formatter['content-type'].decode('utf-8'))
blob_entity['creation'] = creation
blob_entity['filename'] = form_item.filename.decode('utf-8')
except UnicodeDecodeError:
raise InvalidMetadataError(
'The uploaded entity contained invalid UTF-8 metadata. This may be '
'because the page containing the upload form was served with a '
'charset other than "utf-8".')
blob_file.seek(0)
digester = hashlib.md5()
while True:
block = blob_file.read(1 << 20)
if not block:
break
digester.update(block)
blob_entity['md5_hash'] = digester.hexdigest()
blob_entity['size'] = blob_file.tell()
blob_file.seek(0)
datastore.Put(blob_entity)
return blob_entity
def _GenerateMIMEMessage(self,
form,
boundary=None,
max_bytes_per_blob=None,
max_bytes_total=None,
bucket_name=None):
"""Generate a new post from original form.
Also responsible for storing blobs in the datastore.
Args:
form: Instance of cgi.FieldStorage representing the whole form
derived from original post data.
boundary: Boundary to use for resulting form. Used only in tests so
that the boundary is always consistent.
max_bytes_per_blob: The maximum size in bytes that any single blob
in the form is allowed to be.
max_bytes_total: The maximum size in bytes that the total of all blobs
in the form is allowed to be.
bucket_name: The name of the Google Storage bucket to uplad the file.
Returns:
A MIMEMultipart instance representing the new HTTP post which should be
forwarded to the developers actual CGI handler. DO NOT use the return
value of this method to generate a string unless you know what you're
doing and properly handle folding whitespace (from rfc822) properly.
Raises:
UploadEntityTooLargeError: The upload exceeds either the
max_bytes_per_blob or max_bytes_total limits.
FilenameOrContentTypeTooLargeError: The filename or the content_type of
the upload is larger than the allowed size for a string type in the
datastore.
"""
message = multipart.MIMEMultipart('form-data', boundary)
for name, value in form.headers.items():
if name.lower() not in STRIPPED_HEADERS:
message.add_header(name, value)
def IterateForm():
"""Flattens form in to single sequence of cgi.FieldStorage instances.
The resulting cgi.FieldStorage objects are a little bit irregular in
their structure. A single name can have mulitple sub-items. In this
case, the root FieldStorage object has a list associated with that field
name. Otherwise, the root FieldStorage object just refers to a single
nested instance.
Lists of FieldStorage instances occur when a form has multiple values
for the same name.
Yields:
cgi.FieldStorage irrespective of their nesting level.
"""
for key in sorted(form):
form_item = form[key]
if isinstance(form_item, list):
for list_item in form_item:
yield list_item
else:
yield form_item
creation = self.__now_func()
total_bytes_uploaded = 0
created_blobs = []
upload_too_large = False
filename_too_large = False
content_type_too_large = False
for form_item in IterateForm():
disposition_parameters = {'name': form_item.name}
if form_item.filename is None:
variable = base.MIMEBase('text', 'plain')
variable.set_payload(form_item.value)
else:
if not form_item.filename:
continue
disposition_parameters['filename'] = form_item.filename
main_type, sub_type = _SplitMIMEType(form_item.type)
form_item.file.seek(0, 2)
content_length = form_item.file.tell()
form_item.file.seek(0)
total_bytes_uploaded += content_length
if max_bytes_per_blob is not None:
if max_bytes_per_blob < content_length:
upload_too_large = True
break
if max_bytes_total is not None:
if max_bytes_total < total_bytes_uploaded:
upload_too_large = True
break
if form_item.filename is not None:
if MAX_STRING_NAME_LENGTH < len(form_item.filename):
filename_too_large = True
break
if form_item.type is not None:
if MAX_STRING_NAME_LENGTH < len(form_item.type):
content_type_too_large = True
break
blob_entity = self.StoreBlob(form_item, creation)
created_blobs.append(blob_entity)
variable = base.MIMEBase('message',
'external-body',
access_type=blobstore.BLOB_KEY_HEADER,
blob_key=blob_entity.key().name())
form_item.file.seek(0)
digester = hashlib.md5()
while True:
block = form_item.file.read(1 << 20)
if not block:
break
digester.update(block)
blob_key = base64.urlsafe_b64encode(digester.hexdigest())
form_item.file.seek(0)
external = base.MIMEBase(main_type,
sub_type,
**form_item.type_options)
headers = dict(form_item.headers)
headers['Content-Length'] = str(content_length)
headers[blobstore.UPLOAD_INFO_CREATION_HEADER] = (
blobstore._format_creation(creation))
if bucket_name:
headers[blobstore.CLOUD_STORAGE_OBJECT_HEADER] = (
'/gs/%s/fake-%s-%s' % (bucket_name, blob_entity.key().name(),
blob_key))
headers['Content-MD5'] = blob_key
for key, value in headers.iteritems():
external.add_header(key, value)
external_disposition_parameters = dict(disposition_parameters)
external_disposition_parameters['filename'] = form_item.filename
if not external.get('Content-Disposition'):
external.add_header('Content-Disposition',
'form-data',
**external_disposition_parameters)
variable.set_payload([external])
variable.add_header('Content-Disposition',
'form-data',
**disposition_parameters)
message.attach(variable)
if upload_too_large or filename_too_large or content_type_too_large:
for blob in created_blobs:
datastore.Delete(blob)
if upload_too_large:
raise UploadEntityTooLargeError()
elif filename_too_large:
raise FilenameOrContentTypeTooLargeError('filename')
else:
raise FilenameOrContentTypeTooLargeError('content-type')
return message
def GenerateMIMEMessageString(self,
form,
boundary=None,
max_bytes_per_blob=None,
max_bytes_total=None,
bucket_name=None):
"""Generate a new post string from original form.
Args:
form: Instance of cgi.FieldStorage representing the whole form
derived from original post data.
boundary: Boundary to use for resulting form. Used only in tests so
that the boundary is always consistent.
max_bytes_per_blob: The maximum size in bytes that any single blob
in the form is allowed to be.
max_bytes_total: The maximum size in bytes that the total of all blobs
in the form is allowed to be.
bucket_name: The name of the Google Storage bucket to uplad the file.
Returns:
A string rendering of a MIMEMultipart instance.
"""
message = self._GenerateMIMEMessage(form,
boundary=boundary,
max_bytes_per_blob=max_bytes_per_blob,
max_bytes_total=max_bytes_total,
bucket_name=bucket_name)
message_out = cStringIO.StringIO()
gen = generator.Generator(message_out, maxheaderlen=0)
gen.flatten(message, unixfrom=False)
return message_out.getvalue()
|
[
"hashlib.md5",
"google.appengine.api.datastore.Get",
"email.Generator.Generator",
"email.MIMEMultipart.MIMEMultipart",
"google.appengine.api.datastore.Key.from_path",
"email.MIMEBase.MIMEBase",
"google.appengine.api.blobstore.blobstore._format_creation",
"google.appengine.api.datastore.Delete",
"cStringIO.StringIO",
"google.appengine.api.datastore.Put"
] |
[((3244, 3257), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (3255, 3257), False, 'import hashlib\n'), ((3396, 3469), 'google.appengine.api.datastore.Key.from_path', 'datastore.Key.from_path', (['blobstore.BLOB_INFO_KIND', 'blob_key'], {'namespace': '""""""'}), "(blobstore.BLOB_INFO_KIND, blob_key, namespace='')\n", (3419, 3469), False, 'from google.appengine.api import datastore\n'), ((6496, 6556), 'email.MIMEBase.MIMEBase', 'base.MIMEBase', (['main_type', 'sub_type'], {}), '(main_type, sub_type, **form_item.type_options)\n', (6509, 6556), True, 'from email import MIMEBase as base\n'), ((7275, 7288), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (7286, 7288), False, 'import hashlib\n'), ((7528, 7554), 'google.appengine.api.datastore.Put', 'datastore.Put', (['blob_entity'], {}), '(blob_entity)\n', (7541, 7554), False, 'from google.appengine.api import datastore\n'), ((9129, 9175), 'email.MIMEMultipart.MIMEMultipart', 'multipart.MIMEMultipart', (['"""form-data"""', 'boundary'], {}), "('form-data', boundary)\n", (9152, 9175), True, 'from email import MIMEMultipart as multipart\n'), ((15139, 15159), 'cStringIO.StringIO', 'cStringIO.StringIO', ([], {}), '()\n', (15157, 15159), False, 'import cStringIO\n'), ((15170, 15218), 'email.Generator.Generator', 'generator.Generator', (['message_out'], {'maxheaderlen': '(0)'}), '(message_out, maxheaderlen=0)\n', (15189, 15218), True, 'from email import Generator as generator\n'), ((3573, 3601), 'google.appengine.api.datastore.Get', 'datastore.Get', (['datastore_key'], {}), '(datastore_key)\n', (3586, 3601), False, 'from google.appengine.api import datastore\n'), ((10477, 10507), 'email.MIMEBase.MIMEBase', 'base.MIMEBase', (['"""text"""', '"""plain"""'], {}), "('text', 'plain')\n", (10490, 10507), True, 'from email import MIMEBase as base\n'), ((11918, 11931), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (11929, 11931), False, 'import hashlib\n'), ((12192, 12252), 'email.MIMEBase.MIMEBase', 'base.MIMEBase', (['main_type', 'sub_type'], {}), '(main_type, sub_type, **form_item.type_options)\n', (12205, 12252), True, 'from email import MIMEBase as base\n'), ((12488, 12524), 'google.appengine.api.blobstore.blobstore._format_creation', 'blobstore._format_creation', (['creation'], {}), '(creation)\n', (12514, 12524), False, 'from google.appengine.api.blobstore import blobstore\n'), ((13556, 13578), 'google.appengine.api.datastore.Delete', 'datastore.Delete', (['blob'], {}), '(blob)\n', (13572, 13578), False, 'from google.appengine.api import datastore\n')]
|
from setuptools import setup
requirements = [
'numpy',
'pandas',
'scikit-learn',
'matplotlib'
]
setup(
name='ds_internship_task2',
version='0.1',
url='https://github.com/acivgin1/M1-DS-internship',
description='Testing standard classifiers on titanic dataset',
entry_points={'console_scripts': ['run-all=ds_internship_task2.command_line:main']},
author='<NAME>',
author_email='<EMAIL>',
packages=['ds_internship_task2'],
install_requires=requirements
)
|
[
"setuptools.setup"
] |
[((114, 492), 'setuptools.setup', 'setup', ([], {'name': '"""ds_internship_task2"""', 'version': '"""0.1"""', 'url': '"""https://github.com/acivgin1/M1-DS-internship"""', 'description': '"""Testing standard classifiers on titanic dataset"""', 'entry_points': "{'console_scripts': ['run-all=ds_internship_task2.command_line:main']}", 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'packages': "['ds_internship_task2']", 'install_requires': 'requirements'}), "(name='ds_internship_task2', version='0.1', url=\n 'https://github.com/acivgin1/M1-DS-internship', description=\n 'Testing standard classifiers on titanic dataset', entry_points={\n 'console_scripts': ['run-all=ds_internship_task2.command_line:main']},\n author='<NAME>', author_email='<EMAIL>', packages=[\n 'ds_internship_task2'], install_requires=requirements)\n", (119, 492), False, 'from setuptools import setup\n')]
|
import io
from setuptools import setup
setup(name='NotSoFastQC',
description='A tool to generate FastQC-like graphs from a FastQC file',
long_description=io.open('README.md', encoding='utf-8').read(),
long_description_content_type='text/markdown',
version='1.1',
url='https://github.com/jamesfox96/NotSoFastQC',
packages=['NotSoFastQC'],
install_requires=['tabulate>=0.8.7',
'pandas>=0.25.0rc0',
'matplotlib>=3.3.2',
'seaborn>=0.11.0',
'numpy>=1.17.0rc1',
'scipy>=1.5.4'],
entry_points={'console_scripts': ['NotSoFastQC=NotSoFastQC.__main__:main']},
)
|
[
"io.open"
] |
[((167, 205), 'io.open', 'io.open', (['"""README.md"""'], {'encoding': '"""utf-8"""'}), "('README.md', encoding='utf-8')\n", (174, 205), False, 'import io\n')]
|
import cv2
import numpy as np
import os
def load_image(path: str) -> np.ndarray:
"""Загрузка ихображения
:param path: путь к файлу с изображением
:return: загруженное изображение
"""
if type(path) != str:
raise TypeError(f'Тип переменной path {type(path)} не является строкой')
if not os.path.exists(path):
raise FileNotFoundError(f'Файла {path} не существует.')
image = cv2.imread(path)
return image
def extract_contours(image: np.ndarray) -> np.ndarray:
"""Поиск контуров на изображени
:param image: предварительно обработанное изображение с нанесенными рамками
:return: контуры на изображении
"""
# Диапазон цвета которого не может быть в документе (в нашем случае - синий)
lower_range = np.array([110, 50, 50])
upper_range = np.array([130, 255, 255])
image_mask = cv2.inRange(image, lower_range, upper_range)
thresh = cv2.Canny(image_mask, 10, 250)
contours_of_frames, _ = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
return contours_of_frames
def extract_frames(contours_of_frames: np.ndarray) -> list:
"""Поиск рамок на изображении
:param contours_of_frames: контуры на изображении
:return: полученные координаты рамок
"""
frames = []
# перебираем все найденные контуры в цикле
for contours_of_frame in contours_of_frames:
rect = cv2.minAreaRect(contours_of_frame) # пытаемся вписать прямоугольник
box = cv2.boxPoints(rect) # поиск четырех вершин прямоугольника
box = np.int0(box) # округление координат
area = int(rect[1][0] * rect[1][1]) # вычисление площади
if area > 250:
frames.append(box[[1, 3]])
return np.array(frames).tolist()
def save_frames(path_to_image_with_fields: str,
create_debug_form: bool = False, path_to_blank_image: str = None) -> (dict, np.ndarray):
"""По полученному изображению с нанесенными на него рамками (прямоугольники синего цвета по контурам мест, где
пользователь будет вводить свои данные) вывести координаты полей, а также изображение с нанесенными на него заново
рамками для проверки. Полученный в ходе работы данной программы массив значений можно использовать напрямую для
подачи в текущую версию основного скрипта в качестве sogl{number}_fields.json.
:param path_to_image_with_fields: путь к изображению с нанесенными рамками, которые выделяют нужные поля
:param create_debug_form: если True, на загруженное пустое изображение наносятся рамки в соответствии с полученными
их координатами. это действие осуществляется для проверки, что все сработало нормально
:param path_to_blank_image: пусть к исходному пустому изображению без нанесенных рамок
:return: словарь с координатами полей, а также изображение с нанесенными заново рамками
"""
image_with_frames = load_image(path_to_image_with_fields)
image_with_frames = cv2.cvtColor(image_with_frames, cv2.COLOR_BGR2HSV)
contours_of_frames = extract_contours(image_with_frames)
frames = extract_frames(contours_of_frames)
dict_with_values = {str(i): frame for i, frame in enumerate(frames)}
if create_debug_form:
template = load_image(path_to_blank_image) # пустой бланк
for location in frames:
y = (location[0][1], location[1][1])
x = (location[0][0], location[1][0])
# нанесение рамок на форму по инструкции
cv2.rectangle(template, (x[0], y[0]), (x[1], y[1]), (0, 255, 0), 3)
return dict_with_values, template
return dict_with_values, None
|
[
"cv2.Canny",
"numpy.int0",
"cv2.cvtColor",
"os.path.exists",
"cv2.rectangle",
"cv2.imread",
"cv2.boxPoints",
"numpy.array",
"cv2.minAreaRect",
"cv2.inRange"
] |
[((420, 436), 'cv2.imread', 'cv2.imread', (['path'], {}), '(path)\n', (430, 436), False, 'import cv2\n'), ((772, 795), 'numpy.array', 'np.array', (['[110, 50, 50]'], {}), '([110, 50, 50])\n', (780, 795), True, 'import numpy as np\n'), ((814, 839), 'numpy.array', 'np.array', (['[130, 255, 255]'], {}), '([130, 255, 255])\n', (822, 839), True, 'import numpy as np\n'), ((858, 902), 'cv2.inRange', 'cv2.inRange', (['image', 'lower_range', 'upper_range'], {}), '(image, lower_range, upper_range)\n', (869, 902), False, 'import cv2\n'), ((917, 947), 'cv2.Canny', 'cv2.Canny', (['image_mask', '(10)', '(250)'], {}), '(image_mask, 10, 250)\n', (926, 947), False, 'import cv2\n'), ((2981, 3031), 'cv2.cvtColor', 'cv2.cvtColor', (['image_with_frames', 'cv2.COLOR_BGR2HSV'], {}), '(image_with_frames, cv2.COLOR_BGR2HSV)\n', (2993, 3031), False, 'import cv2\n'), ((321, 341), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (335, 341), False, 'import os\n'), ((1411, 1445), 'cv2.minAreaRect', 'cv2.minAreaRect', (['contours_of_frame'], {}), '(contours_of_frame)\n', (1426, 1445), False, 'import cv2\n'), ((1494, 1513), 'cv2.boxPoints', 'cv2.boxPoints', (['rect'], {}), '(rect)\n', (1507, 1513), False, 'import cv2\n'), ((1567, 1579), 'numpy.int0', 'np.int0', (['box'], {}), '(box)\n', (1574, 1579), True, 'import numpy as np\n'), ((1745, 1761), 'numpy.array', 'np.array', (['frames'], {}), '(frames)\n', (1753, 1761), True, 'import numpy as np\n'), ((3507, 3574), 'cv2.rectangle', 'cv2.rectangle', (['template', '(x[0], y[0])', '(x[1], y[1])', '(0, 255, 0)', '(3)'], {}), '(template, (x[0], y[0]), (x[1], y[1]), (0, 255, 0), 3)\n', (3520, 3574), False, 'import cv2\n')]
|
# -*- coding: utf-8 -*-
# test_formats.py
"""
sfftk.formats modules unit tests
"""
from __future__ import division
import os
import shlex
import unittest
import __init__ as tests
from .. import schema
from ..core.parser import parse_args
from ..formats import am, seg, map, mod, stl, surf
__author__ = "<NAME>, PhD"
__email__ = "<EMAIL>, <EMAIL>"
__date__ = "2017-03-28"
__updated__ = '2018-02-14'
class TestFormats(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.segmentations_path = os.path.join(tests.TEST_DATA_PATH, 'segmentations')
# schema version
cls.schema_version = schema.SFFSegmentation().version
# files
cls.am_file = os.path.join(cls.segmentations_path, 'test_data.am')
cls.seg_file = os.path.join(cls.segmentations_path, 'test_data.seg')
cls.map_file = os.path.join(cls.segmentations_path, 'test_data.map')
cls.map_multi0_file = os.path.join(cls.segmentations_path, 'test_data_multi0.map')
cls.map_multi1_file = os.path.join(cls.segmentations_path, 'test_data_multi1.map')
cls.map_multi2_file = os.path.join(cls.segmentations_path, 'test_data_multi2.map')
cls.mod_file = os.path.join(cls.segmentations_path, 'test_data.mod')
cls.stl_file = os.path.join(cls.segmentations_path, 'test_data.stl')
cls.stl_multi0_file = os.path.join(cls.segmentations_path, 'test_data_multi0.stl')
cls.stl_multi1_file = os.path.join(cls.segmentations_path, 'test_data_multi1.stl')
cls.stl_multi2_file = os.path.join(cls.segmentations_path, 'test_data_multi2.stl')
cls.surf_file = os.path.join(cls.segmentations_path, 'test_data.surf')
# am
cls.am_segmentation = am.AmiraMeshSegmentation(cls.am_file)
# seg
cls.seg_segmentation = seg.SeggerSegmentation(cls.seg_file)
# map
cls.map_segmentation = map.MapSegmentation([cls.map_file])
# map multi
cls.map_multi_segmentation = map.MapSegmentation([cls.map_multi0_file, cls.map_multi1_file, cls.map_multi2_file])
# mod
cls.mod_segmentation = mod.IMODSegmentation(cls.mod_file)
# stl
cls.stl_segmentation = stl.STLSegmentation([cls.stl_file])
# stl multi
cls.stl_multi_segmentation = stl.STLSegmentation([cls.stl_multi0_file, cls.stl_multi1_file, cls.stl_multi2_file])
# surf
cls.surf_segmentation = surf.AmiraHyperSurfaceSegmentation(cls.surf_file)
# read
def test_am_read(self):
"""Read an AmiraMesh (.am) segmentation"""
# assertions
self.assertIsInstance(self.am_segmentation.header, am.AmiraMeshHeader)
self.assertIsInstance(self.am_segmentation.segments, list)
self.assertIsInstance(self.am_segmentation.segments[0], am.AmiraMeshSegment)
def test_seg_read(self):
"""Read a Segger (.seg) segmentation"""
# assertions
self.assertIsInstance(self.seg_segmentation.header, seg.SeggerHeader)
self.assertIsInstance(self.seg_segmentation.segments, list)
self.assertIsInstance(self.seg_segmentation.segments[0], seg.SeggerSegment)
def test_map_read(self):
"""Read an EMDB Map mask (.map) segmentation"""
# assertions
self.assertIsInstance(self.map_segmentation.header, map.MapHeader)
self.assertIsInstance(self.map_segmentation.segments, list)
self.assertIsInstance(self.map_segmentation.segments[0], map.MapSegment)
def test_mod_read(self):
"""Read an IMOD (.mod) segmentation"""
# assertions
self.assertIsInstance(self.mod_segmentation.header, mod.IMODHeader)
self.assertIsInstance(self.mod_segmentation.segments, list)
self.assertIsInstance(self.mod_segmentation.segments[0], mod.IMODSegment)
def test_stl_read(self):
"""Read a Stereo Lithography (.stl) segmentation"""
# assertions
self.assertIsInstance(self.stl_segmentation.header, stl.STLHeader)
self.assertIsInstance(self.stl_segmentation.segments, list)
self.assertIsInstance(self.stl_segmentation.segments[0], stl.STLSegment)
def test_surf_read(self):
"""Read a HyperSurface (.surf) segmentation"""
# assertions
self.assertIsInstance(self.surf_segmentation.header, surf.AmiraHyperSurfaceHeader)
self.assertIsInstance(self.surf_segmentation.segments, list)
self.assertIsInstance(self.surf_segmentation.segments[0], surf.AmiraHyperSurfaceSegment)
# convert
def test_am_convert(self):
"""Convert a segmentation from an AmiraMesh file to an SFFSegmentation object"""
args, configs = parse_args(shlex.split('convert {}'.format(self.am_file)))
sff_segmentation = self.am_segmentation.convert(args, configs)
# assertions
self.assertIsInstance(sff_segmentation, schema.SFFSegmentation)
self.assertEqual(sff_segmentation.name, 'AmiraMesh Segmentation')
self.assertEqual(sff_segmentation.version, self.schema_version)
self.assertEqual(sff_segmentation.software.name, 'Amira')
self.assertEqual(sff_segmentation.software.version, self.am_segmentation.header.designation.version)
# self.assertEqual(sff_segmentation.filePath, os.path.dirname(os.path.abspath(self.am_file)))
self.assertEqual(sff_segmentation.primaryDescriptor, 'threeDVolume')
self.assertEqual(sff_segmentation.transforms[0].id, 0)
def test_seg_convert(self):
"""Convert a segmentation from a Segger file to an SFFSegmentation object"""
args, configs = parse_args(shlex.split('convert {}'.format(self.mod_file)))
sff_segmentation = self.seg_segmentation.convert(args, configs)
# assertions
self.assertIsInstance(sff_segmentation, schema.SFFSegmentation)
self.assertEqual(sff_segmentation.name, 'Segger Segmentation')
self.assertEqual(sff_segmentation.version, self.schema_version)
self.assertEqual(sff_segmentation.software.name, 'segger')
self.assertEqual(sff_segmentation.software.version, self.seg_segmentation.header.version)
# self.assertEqual(sff_segmentation.filePath, os.path.dirname(os.path.abspath(self.seg_file)))
self.assertEqual(sff_segmentation.primaryDescriptor, 'threeDVolume')
self.assertEqual(sff_segmentation.transforms[0].id, 0)
def test_map_convert(self):
"""Convert a segmentation from an EMDB Map mask file to an SFFSegmentation object"""
args, configs = parse_args(shlex.split('convert {}'.format(self.map_file)))
sff_segmentation = self.map_segmentation.convert(args, configs)
# assertions
self.assertIsInstance(sff_segmentation, schema.SFFSegmentation)
self.assertEqual(sff_segmentation.name, 'CCP4 mask segmentation') # might have an extra space at the end
self.assertEqual(sff_segmentation.version, self.schema_version)
self.assertEqual(sff_segmentation.software.name, 'Undefined')
self.assertEqual(sff_segmentation.primaryDescriptor, 'threeDVolume')
self.assertEqual(sff_segmentation.transforms[0].id, 0)
def test_map_multi_convert(self):
"""Convert several EMDB Map mask files to a single SFFSegmentation object"""
args, configs = parse_args(shlex.split(
'convert -m {}'.format(' '.join([self.map_multi0_file, self.map_multi1_file, self.map_multi2_file]))
))
sff_segmentation = self.map_multi_segmentation.convert(args, configs)
# assertions
self.assertIsInstance(sff_segmentation, schema.SFFSegmentation)
self.assertEqual(sff_segmentation.name, 'CCP4 mask segmentation') # might have an extra space at the end
self.assertEqual(sff_segmentation.version, self.schema_version)
self.assertEqual(sff_segmentation.software.name, 'Undefined')
self.assertEqual(sff_segmentation.primaryDescriptor, 'threeDVolume')
self.assertEqual(sff_segmentation.transforms[0].id, 0)
self.assertEqual(len(sff_segmentation.segments), 3)
def test_mod_convert(self):
"""Convert a segmentation from an IMOD file to an SFFSegmentation object"""
args, configs = parse_args(shlex.split('convert {}'.format(self.mod_file)))
sff_segmentation = self.mod_segmentation.convert(args, configs)
# assertions
self.assertIsInstance(sff_segmentation, schema.SFFSegmentation)
self.assertEqual(sff_segmentation.name, 'IMOD-NewModel')
self.assertEqual(sff_segmentation.version, self.schema_version)
self.assertEqual(sff_segmentation.software.name, 'IMOD')
# self.assertEqual(sff_segmentation.filePath, os.path.abspath(self.mod_file))
self.assertEqual(sff_segmentation.primaryDescriptor, 'meshList')
self.assertEqual(sff_segmentation.transforms[0].id, 0)
def test_stl_convert(self):
"""Convert a segmentation from an Stereo Lithography file to an SFFSegmentation object"""
args, configs = parse_args(shlex.split('convert {}'.format(self.stl_file)))
sff_segmentation = self.stl_segmentation.convert(args, configs)
# assertions
self.assertIsInstance(sff_segmentation, schema.SFFSegmentation)
self.assertEqual(sff_segmentation.name, 'STL Segmentation')
self.assertEqual(sff_segmentation.version, self.schema_version)
self.assertEqual(sff_segmentation.software.name, 'Unknown')
self.assertEqual(sff_segmentation.primaryDescriptor, 'meshList')
self.assertEqual(sff_segmentation.transforms[0].id, 0)
def test_stl_multi_convert(self):
"""Convert several STL files into a single SFFSegmentation object"""
args, configs = parse_args(shlex.split(
'convert -m {}'.format(' '.join([self.stl_multi0_file, self.stl_multi1_file, self.stl_multi2_file]))
))
sff_segmentation = self.stl_multi_segmentation.convert(args, configs)
# assertions
self.assertIsInstance(sff_segmentation, schema.SFFSegmentation)
self.assertEqual(sff_segmentation.name, 'STL Segmentation')
self.assertEqual(sff_segmentation.version, self.schema_version)
self.assertEqual(sff_segmentation.software.name, 'Unknown')
self.assertEqual(sff_segmentation.primaryDescriptor, 'meshList')
self.assertEqual(sff_segmentation.transforms[0].id, 0)
self.assertEqual(len(sff_segmentation.segments), 3)
def test_surf_convert(self):
"""Convert a segmentation from a HyperSurface file to an SFFSegmentation object"""
args, configs = parse_args(shlex.split('convert {}'.format(self.surf_file)))
sff_segmentation = self.surf_segmentation.convert(args, configs)
# assertions
self.assertIsInstance(sff_segmentation, schema.SFFSegmentation)
self.assertEqual(sff_segmentation.name, 'Amira HyperSurface Segmentation')
self.assertEqual(sff_segmentation.version, self.schema_version)
self.assertEqual(sff_segmentation.software.name, 'Amira')
self.assertEqual(sff_segmentation.software.version, self.surf_segmentation.header.designation.version)
# self.assertEqual(sff_segmentation.filePath, os.path.abspath(self.surf_file))
self.assertEqual(sff_segmentation.primaryDescriptor, 'meshList')
self.assertEqual(sff_segmentation.transforms[0].id, 0)
|
[
"os.path.join"
] |
[((518, 569), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations')\n", (530, 569), False, 'import os\n'), ((695, 747), 'os.path.join', 'os.path.join', (['cls.segmentations_path', '"""test_data.am"""'], {}), "(cls.segmentations_path, 'test_data.am')\n", (707, 747), False, 'import os\n'), ((771, 824), 'os.path.join', 'os.path.join', (['cls.segmentations_path', '"""test_data.seg"""'], {}), "(cls.segmentations_path, 'test_data.seg')\n", (783, 824), False, 'import os\n'), ((848, 901), 'os.path.join', 'os.path.join', (['cls.segmentations_path', '"""test_data.map"""'], {}), "(cls.segmentations_path, 'test_data.map')\n", (860, 901), False, 'import os\n'), ((932, 992), 'os.path.join', 'os.path.join', (['cls.segmentations_path', '"""test_data_multi0.map"""'], {}), "(cls.segmentations_path, 'test_data_multi0.map')\n", (944, 992), False, 'import os\n'), ((1023, 1083), 'os.path.join', 'os.path.join', (['cls.segmentations_path', '"""test_data_multi1.map"""'], {}), "(cls.segmentations_path, 'test_data_multi1.map')\n", (1035, 1083), False, 'import os\n'), ((1114, 1174), 'os.path.join', 'os.path.join', (['cls.segmentations_path', '"""test_data_multi2.map"""'], {}), "(cls.segmentations_path, 'test_data_multi2.map')\n", (1126, 1174), False, 'import os\n'), ((1198, 1251), 'os.path.join', 'os.path.join', (['cls.segmentations_path', '"""test_data.mod"""'], {}), "(cls.segmentations_path, 'test_data.mod')\n", (1210, 1251), False, 'import os\n'), ((1275, 1328), 'os.path.join', 'os.path.join', (['cls.segmentations_path', '"""test_data.stl"""'], {}), "(cls.segmentations_path, 'test_data.stl')\n", (1287, 1328), False, 'import os\n'), ((1359, 1419), 'os.path.join', 'os.path.join', (['cls.segmentations_path', '"""test_data_multi0.stl"""'], {}), "(cls.segmentations_path, 'test_data_multi0.stl')\n", (1371, 1419), False, 'import os\n'), ((1450, 1510), 'os.path.join', 'os.path.join', (['cls.segmentations_path', '"""test_data_multi1.stl"""'], {}), "(cls.segmentations_path, 'test_data_multi1.stl')\n", (1462, 1510), False, 'import os\n'), ((1541, 1601), 'os.path.join', 'os.path.join', (['cls.segmentations_path', '"""test_data_multi2.stl"""'], {}), "(cls.segmentations_path, 'test_data_multi2.stl')\n", (1553, 1601), False, 'import os\n'), ((1626, 1680), 'os.path.join', 'os.path.join', (['cls.segmentations_path', '"""test_data.surf"""'], {}), "(cls.segmentations_path, 'test_data.surf')\n", (1638, 1680), False, 'import os\n')]
|
import collections
from django.shortcuts import render
from django.urls import reverse, resolve
from django.http import HttpResponseRedirect, JsonResponse, HttpResponse
from django.contrib.auth.decorators import login_required
from django.forms import formset_factory
from .models import NOTTTravel, NOTTChaperon, NOTTChild, OvcCasePersons
from .forms import NOTTForm, ChaperonForm, ChildrenForm
from django.forms.models import model_to_dict
from cpovc_forms.models import OVCBasicCRS, OVCBasicCategory, OVCBasicPerson
from cpovc_registry.models import (
RegPerson, RegPersonsExternalIds, RegOrgUnit, RegPersonsOrgUnits,
RegOrgUnitGeography)
from cpovc_main.functions import get_dict, convert_date
from django.db.models import Q
from cpovc_reports.forms import CaseLoad
from cpovc_reports.models import RPTCaseLoad
from .functions import (
travel_pdf, handle_integration, get_geo, get_person_geo,
get_person_orgs, generate_document)
from cpovc_main.models import SetupGeography
from .params import PARAMS
# @login_required(login_url='/')
def manage_home(request):
"""Main home method and view."""
try:
return render(request, 'management/home.html',
{'form': {}})
except Exception as e:
raise e
else:
pass
# @login_required(login_url='/')
def home_travel(request):
"""Main home method and view."""
try:
form = CaseLoad(request.user)
if request.method == 'POST':
dts, vals = {}, {}
dtls = ['is_void', 'sync_id', 'id']
item_id = request.POST.get('item_id')
data = NOTTTravel.objects.filter(
is_void=False, pk=item_id).values()[0]
for dt in data:
if data[dt] is not None and data[dt] != '' and dt not in dtls:
dval = vals[data[dt]] if data[dt] in vals else data[dt]
if isinstance(dval, (bool)):
dval = 'Yes' if dval else 'No'
dts[dt.replace('_', ' ').capitalize()] = dval
datas = collections.OrderedDict(sorted(dts.items()))
results = {'message': 'Good', 'status': 0, 'dates': '0000',
'data': datas}
return JsonResponse(results, content_type='application/json',
safe=False)
cases = NOTTTravel.objects.filter(is_void=False)
return render(request, 'management/home_travel.html',
{'form': form, 'cases': cases})
except Exception as e:
raise e
else:
pass
# @login_required(login_url='/')
def new_travel(request):
"""Main home method and view."""
try:
if request.method == 'POST':
item_id = request.POST.get('item_id')
print(item_id)
return render(request, 'management/edit_travel.html',
{'form': {}})
except Exception as e:
raise e
else:
pass
# @login_required(login_url='/')
def view_travel(request, id):
"""Main home method and view."""
try:
if request.method == 'POST':
item_id = request.POST.get('item_id')
print(item_id)
travel = NOTTTravel.objects.get(is_void=False, id=id)
chaperons = NOTTChaperon.objects.filter(travel_id=id)
children = NOTTChild.objects.filter(travel_id=id)
return render(request, 'management/view_travel.html',
{'form': {}, 'travel': travel,
'chaperons': chaperons, 'children': children})
except Exception as e:
raise e
else:
pass
# @login_required(login_url='/')
def travel_report(request, id):
"""Main home method and view."""
try:
file_name = 'National_Travel-Authorization_%s' % (id)
fname = '%s.pdf' % (file_name)
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="%s"' % (fname)
travel_pdf(request, response, file_name)
return response
except Exception as e:
raise e
else:
pass
# @login_required(login_url='/')
def edit_travel(request, id):
"""Main home method and view."""
try:
ChaperonFormset = formset_factory(ChaperonForm, extra=0)
ChildrenFormset = formset_factory(ChildrenForm, extra=0)
if request.method == 'POST':
travel = NOTTTravel.objects.get(is_void=False, id=id)
tdate = request.POST.get('travel_date')
return_date = request.POST.get('return_date')
no_applied = request.POST.get('no_applied')
no_cleared = request.POST.get('no_cleared')
no_returned = request.POST.get('no_returned')
comments = request.POST.get('comments')
contacts = request.POST.get('contacts')
sponsor = request.POST.get('sponsor')
reason = request.POST.get('reason')
status_id = request.POST.get('status')
status = 1 if status_id == 'on' else 0
institution_name = request.POST.get('institution_name')
country_name = request.POST.get('country_name')
travel_date = convert_date(tdate)
if return_date:
return_date = convert_date(return_date)
travel.travel_date = travel_date
travel.return_date = return_date
travel.contacts = contacts
travel.comments = comments
travel.sponsor = sponsor
travel.reason = reason
travel.status = status
travel.institution_name = institution_name
travel.country_name = country_name
# travel.save()
# Chaperon
formset = ChaperonFormset(request.POST, prefix='chap')
cformset = ChildrenFormset(request.POST, prefix='child')
print(request.POST)
clear_count, return_count = 0, 0
if formset.is_valid():
if formset.has_changed():
for echap in formset.cleaned_data:
ops = OvcCasePersons.objects.get(pk=echap['person_id'])
ops.person_sex = echap['sex']
ops.person_first_name = echap['first_name']
ops.person_other_names = echap['other_names']
ops.person_surname = echap['surname']
ops.person_identifier = echap['passport_no']
ops.save()
else:
print(formset.errors)
if cformset.is_valid():
if cformset.has_changed():
no_applied = len(cformset.cleaned_data)
for echild in cformset.cleaned_data:
cid = echild['person_id']
cidc = echild['cleared']
cidr = echild['returned']
cid_cleared = True if cidc == 'True' else False
cid_returned = True if cidr == 'True' else False
if cid_cleared:
clear_count += 1
if cid_returned:
return_count += 1
opc = RegPerson.objects.get(pk=cid)
opc.sex_id = echild['sex']
opc.first_name = echild['first_name']
opc.other_names = echild['other_names']
opc.surname = echild['surname']
opc.save()
# Update passport Number
cpp = RegPersonsExternalIds.objects.get(
person_id=cid, is_void=False,
identifier_type_id='IPPN')
cpp.identifier = echild['passport_no']
cpp.save()
# Update Returned / Cleared details
ch = NOTTChild.objects.get(travel_id=id, person_id=cid)
ch.returned = cid_returned
ch.cleared = cid_cleared
ch.save()
print(echild)
no_returned = return_count
no_cleared = clear_count
else:
print(cformset.errors)
travel.no_applied = no_applied
travel.no_cleared = no_cleared
travel.no_returned = no_returned
travel.save()
url = reverse(view_travel, kwargs={'id': id})
return HttpResponseRedirect(url)
travel = NOTTTravel.objects.filter(is_void=False, id=id).values()[0]
travel_date = travel['travel_date'].strftime('%d-%b-%Y')
return_date = None
if travel['return_date']:
return_date = travel['return_date'].strftime('%d-%b-%Y')
travel['travel_date'] = travel_date
travel['return_date'] = return_date
nott_form = NOTTForm(travel)
# Chaperons
chaps = []
chaperons = NOTTChaperon.objects.filter(travel_id=id)
for chap in chaperons:
chap_details = {'first_name': chap.other_person.person_first_name}
chap_details['surname'] = chap.other_person.person_surname
chap_details['other_names'] = chap.other_person.person_other_names
chap_details['sex'] = chap.other_person.person_sex
chap_details['passport_no'] = chap.other_person.person_identifier
chap_details['person_id'] = chap.other_person_id
chap_details['chaperon_id'] = chap.id
chaps.append(chap_details)
chap_formset = ChaperonFormset(initial=chaps, prefix='chap')
# Children
tchildren = []
children = NOTTChild.objects.filter(travel_id=id)
for child in children:
child_details = {'first_name': child.person.first_name}
child_details['surname'] = child.person.surname
child_details['other_names'] = child.person.other_names
child_details['sex'] = child.person.sex_id
child_details['passport_no'] = child.passport
child_details['person_id'] = child.person_id
child_details['child_id'] = child.id
child_details['cleared'] = child.cleared
child_details['returned'] = child.returned
tchildren.append(child_details)
child_formset = ChildrenFormset(initial=tchildren, prefix='child')
return render(request, 'management/edit_travel.html',
{'form': nott_form, 'travel': travel,
'chap_formset': chap_formset,
'child_formset': child_formset})
except Exception as e:
raise e
else:
pass
# Create your views here.
# @login_required(login_url='/')
def integration_home(request):
"""Method to do pivot reports."""
try:
persons = {}
categories = {}
case_data = {}
case_ids = []
user_id = request.user.id
form = CaseLoad(request.user)
user_counties, user_geos = get_person_geo(request)
print('Geos', user_counties, user_geos)
rm_fields = ['is_void', 'account', 'case_serial']
check_fields = ['sex_id', 'case_category_id', 'case_reporter_id',
'family_status_id', 'household_economics',
'risk_level_id', 'mental_condition_id',
'perpetrator_status_id', 'other_condition_id',
'physical_condition_id', 'yesno_id']
vals = get_dict(field_name=check_fields)
if request.method == 'POST':
item_id = request.POST.get('item_id')
case = OVCBasicCRS.objects.get(
case_id=item_id, is_void=False)
cdata = model_to_dict(case)
for cd in cdata:
cdt = cdata[cd]
if len(str(cdt)) < 6 and cdt in vals:
cdt = vals[cdt]
if cdt and cd not in rm_fields:
case_data[cd] = cdt
if cdt and (cd == 'county' or cd == 'constituency'):
cid = 'GPRV' if cd == 'county' else 'GDIS'
cd_name = '%s name' % (cd)
geo = get_geo(int(cdt), cid)
if geo:
geo_name = geo.area_name
case_data[cd_name] = geo_name
results = {'status': 0, 'message': 'Successful', 'dates': '',
'data': case_data}
return JsonResponse(results, content_type='application/json',
safe=False)
cases = OVCBasicCRS.objects.filter(
is_void=False).order_by('-timestamp_created')
if not request.user.is_superuser:
if request.user.username == 'vurugumapper':
cases = cases.filter(account_id=user_id)
else:
cases = cases.filter(county__in=user_counties)
for cs in cases:
case_ids.append(cs.case_id)
case_cats = OVCBasicCategory.objects.filter(
is_void=False, case_id__in=case_ids)
case_pers = OVCBasicPerson.objects.filter(
is_void=False, case_id__in=case_ids)
for ccat in case_cats:
categories[ccat.case_id] = ccat
for cpers in case_pers:
pers_type = cpers.person_type
if pers_type == 'PTCH':
persons[cpers.case_id] = cpers
for c in cases:
cid = c.case_id
category = categories[cid] if cid in categories else None
child = persons[cid] if cid in persons else None
setattr(c, 'category', category)
setattr(c, 'child', child)
return render(request, 'management/integration.html',
{'form': form, 'cases': cases, 'vals': vals})
except Exception as e:
print(e)
raise e
else:
pass
# @login_required
def process_integration(request, case_id):
"""Method to process case."""
try:
case = OVCBasicCRS.objects.get(case_id=case_id, is_void=False)
county_code = int(case.county)
const_code = int(case.constituency)
county_id, const_id = 0, 0
crs_id = str(case_id).replace('-', '')
user_counties, user_geos = get_person_geo(request)
# Get person orgs
ou_ids = get_person_orgs(request)
if request.method == 'POST':
response = handle_integration(request, case, case_id)
print(response)
check_fields = ['sex_id', 'case_category_id', 'case_reporter_id',
'family_status_id', 'household_economics',
'risk_level_id', 'mental_condition_id',
'perpetrator_status_id', 'other_condition_id',
'physical_condition_id', 'yesno_id']
vals = get_dict(field_name=check_fields)
category = OVCBasicCategory.objects.filter(
case_id=case_id, is_void=False)
person = OVCBasicPerson.objects.filter(case_id=case_id, is_void=False)
# Attached Geos and Org Units for the user
# ou_ids = []
org_id = request.session.get('ou_primary', 0)
ou_ids.append(org_id)
ou_attached = request.session.get('ou_attached', 0)
user_level = request.session.get('user_level', 0)
user_type = request.session.get('user_type', 0)
print(org_id, ou_attached, user_level, user_type)
# person_id = request.user.reg_person_id
county = SetupGeography.objects.filter(
area_code=county_code, area_type_id='GPRV')
for c in county:
county_id = c.area_id
# Get constituency
constituency = SetupGeography.objects.filter(
area_code=const_code, area_type_id='GDIS')
for c in constituency:
const_id = c.area_id
ous = RegOrgUnit.objects.filter(is_void=False)
counties = SetupGeography.objects.filter(area_type_id='GPRV')
if user_counties:
counties = counties.filter(area_id__in=user_counties)
if request.user.is_superuser:
all_ou_ids = ['TNGD']
ous = ous.filter(org_unit_type_id__in=all_ou_ids)
geos = SetupGeography.objects.filter(
area_type_id='GDIS', parent_area_id=county_id)
else:
ous = ous.filter(id__in=ou_ids)
geos = SetupGeography.objects.filter(
area_type_id='GDIS', parent_area_id=county_id)
return render(request, 'management/integration_process.html',
{'form': {}, 'case': case, 'vals': vals,
'category': category, 'person': person,
'geos': geos, 'ous': ous, 'counties': counties,
'county_id': county_id, 'const_id': const_id,
'crs_id': crs_id})
except Exception as e:
print('Error processing integration - %s' % (e))
else:
pass
# @login_required
def get_document(request, doc_id, case_id):
"""Some default page for reports home page."""
try:
case = OVCBasicCRS.objects.get(case_id=case_id, is_void=False)
person_id = str(1).zfill(6)
ou = case.case_org_unit
params = {}
params['ref_to'] = ou.org_unit_name if ou else ''
params['ref_from'] = 'HELPLINE 116'
# Get the persons attached to this case
child = {'name': '', 'sex': ''}
mum, dad = '', ''
persons = OVCBasicPerson.objects.filter(case_id=case_id, is_void=False)
for person in persons:
print('person', person.person_type, person.first_name, person.sex)
if person.person_type == 'PTCH':
name = '%s %s' % (person.first_name, person.surname)
sex = 'Male' if person.sex == 'SMAL' else 'Female'
child = {'name': name.upper(), 'sex': sex.upper()}
if person.person_type == 'PTCG':
name = '%s %s' % (person.first_name, person.surname)
sex = 'Male' if person.sex == 'SMAL' else 'Female'
if person.sex == 'SMAL':
dad = name.upper()
else:
mum = name.upper()
params['child'] = child
params['mum'] = mum
params['dad'] = dad
response = HttpResponse(content_type='application/pdf')
fname = 'U%s-%s' % (person_id, str(doc_id))
f_name = 'attachment; filename=%s.pdf' % (fname)
response['Content-Disposition'] = f_name
generate_document(request, response, params, case)
return response
except Exception as e:
print('Error writing report - %s' % (str(e)))
raise e
# @login_required(login_url='/')
def dq_home(request):
"""Main home method and view."""
try:
cases = []
sts = {0: 'Pending', 1: 'Open', 2: 'Closed'}
form = CaseLoad(request.user)
if request.method == 'POST':
print('go on....')
acases = RPTCaseLoad.objects.filter(is_void=False)[:100]
for case in acases:
cs = case.case_status
dt = {"cpims_id": case.case.person_id}
dt['age'] = case.age
dt['case_category'] = case.case_category
dt['case_date'] = case.case_date
dt['sex'] = case.sex
dt['case_status'] = sts[cs] if cs in sts else 'Open'
dt['dob'] = case.dob
dt['intervention'] = case.intervention
dt['org_unit'] = case.org_unit_name
dt['names'] = case.case.person.first_name
cases.append(dt)
print('cases', cases)
data = {'message': 'Successful', 'status': 0, 'data': cases}
return JsonResponse(data, content_type='application/json',
safe=False)
return render(request, 'management/dq_home.html',
{'form': form})
except Exception as e:
print('error - %s' % (e))
raise e
else:
pass
def dq_data(request):
"""Main home method and view."""
try:
cases = []
sdate, edate = None, None
sts = {0: 'Pending', 1: 'Open', 2: 'Closed'}
# Conditions
qa = request.GET.get('q_aspect')
va = request.GET.get('variance')
age = request.GET.get('age')
from_date = request.GET.get('from_date')
to_date = request.GET.get('to_date')
org_unit = request.GET.get('org_unit')
if from_date and to_date:
sdate = convert_date(from_date)
edate = convert_date(to_date)
cage = int(age) if age else 0
vid = int(va) if va else 0
qid = int(qa) if qa else 0
q2 = Q(case_category_id__in=('CTRF', 'CCCT'), age__lt=6)
q3 = Q(case_category_id__in=('CSAB', 'CSHV', 'CCCM', 'CORP'),
age__lt=11)
if qa:
acases = RPTCaseLoad.objects.filter(is_void=False)
if qid == 1:
acases = acases.filter(
Q(age__gte=25) | Q(dob__isnull=True) | Q(age__lt=0))
elif qid == 2:
acases = acases.filter(
Q(case_category_id='CDIS',
age__gt=15) | Q(case_category_id='CSIC',
age__gt=18) | q2 | q3)
elif qid == 3:
acases = acases.filter(
case_category_id__in=('CSHV', 'CSCS'), sex_id='SMAL')
elif qid == 4:
acases = acases.filter(
case_status=1, intervention__isnull=True)
else:
acases = RPTCaseLoad.objects.filter(
Q(age__gte=25) | Q(dob__isnull=True))
if vid == 1:
acases = acases.filter(age=cage)
elif vid == 2:
acases = acases.filter(age__gt=cage)
elif vid == 3:
acases = acases.filter(age__lt=cage)
if edate and sdate:
acases = acases.filter(case_date__range=(sdate, edate))
if org_unit:
acases = acases.filter(org_unit_id=org_unit)
else:
if not request.user.is_superuser:
acases = acases.filter(org_unit_id=org_unit)
for case in acases[:1000]:
cs = case.case_status
fname = case.case.person.first_name
sname = case.case.person.surname[0]
o_name = case.case.person.other_names
oname = o_name[0] if o_name else ''
dt = {"cpims_id": case.case.person_id}
dt['age'] = case.age
dt['case_category'] = case.case_category
dt['case_date'] = case.case_date
dt['sex'] = case.sex
dt['case_status'] = sts[cs] if cs in sts else 'Open'
dt['dob'] = case.dob
dt['org_unit'] = case.org_unit_name
dt['intervention'] = case.intervention
dt['org_unit'] = case.org_unit_name
dt['names'] = '%s %s%s' % (fname, sname, oname)
cases.append(dt)
result = {"data": cases}
return JsonResponse(result, content_type='application/json',
safe=False)
except Exception as e:
print('error - %s' % (e))
raise e
else:
pass
# @login_required(login_url='/')
def se_home(request):
"""Main home method and view."""
try:
form = CaseLoad(request.user)
return render(request, 'management/se_home.html',
{'form': form})
except Exception as e:
raise e
else:
pass
def se_data(request):
"""Main home method and view."""
try:
cases = []
ou_ids = []
org_unit = request.GET.get('org_unit')
county = request.GET.get('county')
persons = RegPersonsOrgUnits.objects.filter(
is_void=False, date_delinked__isnull=True)
check_fields = ['wdn_cadre_type_id', 'vol_cadre_type',
'sw_cadre_type_id', 'scc_cadre_type_id',
'po_cadre_type_id', 'pm_cadre_type_id',
'pa_cadre_type_id', 'cle_cadre_type_id',
'ogo_cadre_type_id', 'nct_cadre_type_id',
'mng_cadre_type_id', 'me_cadre_type_id',
'ict_cadre_type_id', 'hsm_cadre_type_id',
'hou_cadre_type_id', 'hos_cadre_type_id',
'dir_cadre_type_id', 'ddr_cadre_type_id',
'cc_cadre_type_id', 'cadre_type_id',
'adm_cadre_type_id']
vals = get_dict(field_name=check_fields)
county_id = int(county) if county else 0
if org_unit:
print('OU', org_unit)
persons = persons.filter(org_unit_id=org_unit)
# Get Geo Locations
for pers in persons:
if pers.org_unit_id not in ou_ids:
ou_ids.append(pers.org_unit_id)
ous = {}
geos = RegOrgUnitGeography.objects.filter(
is_void=False, org_unit_id__in=ou_ids, area_id__lt=338)
for geo in geos:
if county_id > 0:
if county_id == int(geo.area.parent_area_id):
ous[geo.org_unit_id] = geo.area.parent_area_id
else:
ous[geo.org_unit_id] = geo.area.parent_area_id
if county_id > 0:
persons = persons.filter(org_unit_id__in=ous)
for person in persons:
fname = person.person.first_name
sname = person.person.surname
o_name = person.person.other_names
oname = ' %s' % o_name if o_name else ''
sex = 'Male' if person.person.sex_id == 'SMAL' else 'Female'
did = person.person.designation
ou_id = person.org_unit_id
cid = ous[ou_id] if ou_id in ous else None
ccd = str(cid).zfill(3) if cid else None
cname = PARAMS[ccd] if cid else None
des = vals[did] if did in vals else 'N/A'
age = person.person.years
dob = str(person.person.date_of_birth)
dt = {"cpims_id": person.person_id}
dt['age'] = 'N/A' if dob == '1900-01-01' else age
dt['designation'] = des
dt['sex'] = sex
dt['dob'] = dob
dt['county'] = cname if cname else 'N/A'
dt['org_unit'] = person.org_unit.org_unit_name
dt['names'] = '%s %s%s' % (fname, sname, oname)
cases.append(dt)
result = {"data": cases}
return JsonResponse(result, content_type='application/json',
safe=False)
except Exception as e:
print('error - %s' % (e))
raise e
else:
pass
|
[
"cpovc_forms.models.OVCBasicPerson.objects.filter",
"django.http.JsonResponse",
"cpovc_forms.models.OVCBasicCRS.objects.filter",
"cpovc_registry.models.RegPersonsExternalIds.objects.get",
"cpovc_main.functions.get_dict",
"django.forms.formset_factory",
"django.http.HttpResponseRedirect",
"django.http.HttpResponse",
"cpovc_reports.models.RPTCaseLoad.objects.filter",
"cpovc_registry.models.RegOrgUnit.objects.filter",
"cpovc_forms.models.OVCBasicCategory.objects.filter",
"django.shortcuts.render",
"cpovc_main.functions.convert_date",
"django.urls.reverse",
"cpovc_registry.models.RegPerson.objects.get",
"cpovc_forms.models.OVCBasicCRS.objects.get",
"django.forms.models.model_to_dict",
"cpovc_reports.forms.CaseLoad",
"cpovc_registry.models.RegOrgUnitGeography.objects.filter",
"django.db.models.Q",
"cpovc_registry.models.RegPersonsOrgUnits.objects.filter",
"cpovc_main.models.SetupGeography.objects.filter"
] |
[((1147, 1200), 'django.shortcuts.render', 'render', (['request', '"""management/home.html"""', "{'form': {}}"], {}), "(request, 'management/home.html', {'form': {}})\n", (1153, 1200), False, 'from django.shortcuts import render\n'), ((1411, 1433), 'cpovc_reports.forms.CaseLoad', 'CaseLoad', (['request.user'], {}), '(request.user)\n', (1419, 1433), False, 'from cpovc_reports.forms import CaseLoad\n'), ((2419, 2497), 'django.shortcuts.render', 'render', (['request', '"""management/home_travel.html"""', "{'form': form, 'cases': cases}"], {}), "(request, 'management/home_travel.html', {'form': form, 'cases': cases})\n", (2425, 2497), False, 'from django.shortcuts import render\n'), ((2821, 2881), 'django.shortcuts.render', 'render', (['request', '"""management/edit_travel.html"""', "{'form': {}}"], {}), "(request, 'management/edit_travel.html', {'form': {}})\n", (2827, 2881), False, 'from django.shortcuts import render\n'), ((3392, 3520), 'django.shortcuts.render', 'render', (['request', '"""management/view_travel.html"""', "{'form': {}, 'travel': travel, 'chaperons': chaperons, 'children': children}"], {}), "(request, 'management/view_travel.html', {'form': {}, 'travel':\n travel, 'chaperons': chaperons, 'children': children})\n", (3398, 3520), False, 'from django.shortcuts import render\n'), ((3861, 3905), 'django.http.HttpResponse', 'HttpResponse', ([], {'content_type': '"""application/pdf"""'}), "(content_type='application/pdf')\n", (3873, 3905), False, 'from django.http import HttpResponseRedirect, JsonResponse, HttpResponse\n'), ((4262, 4300), 'django.forms.formset_factory', 'formset_factory', (['ChaperonForm'], {'extra': '(0)'}), '(ChaperonForm, extra=0)\n', (4277, 4300), False, 'from django.forms import formset_factory\n'), ((4327, 4365), 'django.forms.formset_factory', 'formset_factory', (['ChildrenForm'], {'extra': '(0)'}), '(ChildrenForm, extra=0)\n', (4342, 4365), False, 'from django.forms import formset_factory\n'), ((10520, 10671), 'django.shortcuts.render', 'render', (['request', '"""management/edit_travel.html"""', "{'form': nott_form, 'travel': travel, 'chap_formset': chap_formset,\n 'child_formset': child_formset}"], {}), "(request, 'management/edit_travel.html', {'form': nott_form, 'travel':\n travel, 'chap_formset': chap_formset, 'child_formset': child_formset})\n", (10526, 10671), False, 'from django.shortcuts import render\n'), ((11080, 11102), 'cpovc_reports.forms.CaseLoad', 'CaseLoad', (['request.user'], {}), '(request.user)\n', (11088, 11102), False, 'from cpovc_reports.forms import CaseLoad\n'), ((11620, 11653), 'cpovc_main.functions.get_dict', 'get_dict', ([], {'field_name': 'check_fields'}), '(field_name=check_fields)\n', (11628, 11653), False, 'from cpovc_main.functions import get_dict, convert_date\n'), ((13128, 13196), 'cpovc_forms.models.OVCBasicCategory.objects.filter', 'OVCBasicCategory.objects.filter', ([], {'is_void': '(False)', 'case_id__in': 'case_ids'}), '(is_void=False, case_id__in=case_ids)\n', (13159, 13196), False, 'from cpovc_forms.models import OVCBasicCRS, OVCBasicCategory, OVCBasicPerson\n'), ((13230, 13296), 'cpovc_forms.models.OVCBasicPerson.objects.filter', 'OVCBasicPerson.objects.filter', ([], {'is_void': '(False)', 'case_id__in': 'case_ids'}), '(is_void=False, case_id__in=case_ids)\n', (13259, 13296), False, 'from cpovc_forms.models import OVCBasicCRS, OVCBasicCategory, OVCBasicPerson\n'), ((13824, 13920), 'django.shortcuts.render', 'render', (['request', '"""management/integration.html"""', "{'form': form, 'cases': cases, 'vals': vals}"], {}), "(request, 'management/integration.html', {'form': form, 'cases':\n cases, 'vals': vals})\n", (13830, 13920), False, 'from django.shortcuts import render\n'), ((14143, 14198), 'cpovc_forms.models.OVCBasicCRS.objects.get', 'OVCBasicCRS.objects.get', ([], {'case_id': 'case_id', 'is_void': '(False)'}), '(case_id=case_id, is_void=False)\n', (14166, 14198), False, 'from cpovc_forms.models import OVCBasicCRS, OVCBasicCategory, OVCBasicPerson\n'), ((14974, 15007), 'cpovc_main.functions.get_dict', 'get_dict', ([], {'field_name': 'check_fields'}), '(field_name=check_fields)\n', (14982, 15007), False, 'from cpovc_main.functions import get_dict, convert_date\n'), ((15027, 15090), 'cpovc_forms.models.OVCBasicCategory.objects.filter', 'OVCBasicCategory.objects.filter', ([], {'case_id': 'case_id', 'is_void': '(False)'}), '(case_id=case_id, is_void=False)\n', (15058, 15090), False, 'from cpovc_forms.models import OVCBasicCRS, OVCBasicCategory, OVCBasicPerson\n'), ((15121, 15182), 'cpovc_forms.models.OVCBasicPerson.objects.filter', 'OVCBasicPerson.objects.filter', ([], {'case_id': 'case_id', 'is_void': '(False)'}), '(case_id=case_id, is_void=False)\n', (15150, 15182), False, 'from cpovc_forms.models import OVCBasicCRS, OVCBasicCategory, OVCBasicPerson\n'), ((15638, 15711), 'cpovc_main.models.SetupGeography.objects.filter', 'SetupGeography.objects.filter', ([], {'area_code': 'county_code', 'area_type_id': '"""GPRV"""'}), "(area_code=county_code, area_type_id='GPRV')\n", (15667, 15711), False, 'from cpovc_main.models import SetupGeography\n'), ((15834, 15906), 'cpovc_main.models.SetupGeography.objects.filter', 'SetupGeography.objects.filter', ([], {'area_code': 'const_code', 'area_type_id': '"""GDIS"""'}), "(area_code=const_code, area_type_id='GDIS')\n", (15863, 15906), False, 'from cpovc_main.models import SetupGeography\n'), ((15998, 16038), 'cpovc_registry.models.RegOrgUnit.objects.filter', 'RegOrgUnit.objects.filter', ([], {'is_void': '(False)'}), '(is_void=False)\n', (16023, 16038), False, 'from cpovc_registry.models import RegPerson, RegPersonsExternalIds, RegOrgUnit, RegPersonsOrgUnits, RegOrgUnitGeography\n'), ((16058, 16108), 'cpovc_main.models.SetupGeography.objects.filter', 'SetupGeography.objects.filter', ([], {'area_type_id': '"""GPRV"""'}), "(area_type_id='GPRV')\n", (16087, 16108), False, 'from cpovc_main.models import SetupGeography\n'), ((16634, 16894), 'django.shortcuts.render', 'render', (['request', '"""management/integration_process.html"""', "{'form': {}, 'case': case, 'vals': vals, 'category': category, 'person':\n person, 'geos': geos, 'ous': ous, 'counties': counties, 'county_id':\n county_id, 'const_id': const_id, 'crs_id': crs_id}"], {}), "(request, 'management/integration_process.html', {'form': {}, 'case':\n case, 'vals': vals, 'category': category, 'person': person, 'geos':\n geos, 'ous': ous, 'counties': counties, 'county_id': county_id,\n 'const_id': const_id, 'crs_id': crs_id})\n", (16640, 16894), False, 'from django.shortcuts import render\n'), ((17243, 17298), 'cpovc_forms.models.OVCBasicCRS.objects.get', 'OVCBasicCRS.objects.get', ([], {'case_id': 'case_id', 'is_void': '(False)'}), '(case_id=case_id, is_void=False)\n', (17266, 17298), False, 'from cpovc_forms.models import OVCBasicCRS, OVCBasicCategory, OVCBasicPerson\n'), ((17621, 17682), 'cpovc_forms.models.OVCBasicPerson.objects.filter', 'OVCBasicPerson.objects.filter', ([], {'case_id': 'case_id', 'is_void': '(False)'}), '(case_id=case_id, is_void=False)\n', (17650, 17682), False, 'from cpovc_forms.models import OVCBasicCRS, OVCBasicCategory, OVCBasicPerson\n'), ((18470, 18514), 'django.http.HttpResponse', 'HttpResponse', ([], {'content_type': '"""application/pdf"""'}), "(content_type='application/pdf')\n", (18482, 18514), False, 'from django.http import HttpResponseRedirect, JsonResponse, HttpResponse\n'), ((19043, 19065), 'cpovc_reports.forms.CaseLoad', 'CaseLoad', (['request.user'], {}), '(request.user)\n', (19051, 19065), False, 'from cpovc_reports.forms import CaseLoad\n'), ((20049, 20107), 'django.shortcuts.render', 'render', (['request', '"""management/dq_home.html"""', "{'form': form}"], {}), "(request, 'management/dq_home.html', {'form': form})\n", (20055, 20107), False, 'from django.shortcuts import render\n'), ((20928, 20979), 'django.db.models.Q', 'Q', ([], {'case_category_id__in': "('CTRF', 'CCCT')", 'age__lt': '(6)'}), "(case_category_id__in=('CTRF', 'CCCT'), age__lt=6)\n", (20929, 20979), False, 'from django.db.models import Q\n'), ((20993, 21061), 'django.db.models.Q', 'Q', ([], {'case_category_id__in': "('CSAB', 'CSHV', 'CCCM', 'CORP')", 'age__lt': '(11)'}), "(case_category_id__in=('CSAB', 'CSHV', 'CCCM', 'CORP'), age__lt=11)\n", (20994, 21061), False, 'from django.db.models import Q\n'), ((23283, 23348), 'django.http.JsonResponse', 'JsonResponse', (['result'], {'content_type': '"""application/json"""', 'safe': '(False)'}), "(result, content_type='application/json', safe=False)\n", (23295, 23348), False, 'from django.http import HttpResponseRedirect, JsonResponse, HttpResponse\n'), ((23595, 23617), 'cpovc_reports.forms.CaseLoad', 'CaseLoad', (['request.user'], {}), '(request.user)\n', (23603, 23617), False, 'from cpovc_reports.forms import CaseLoad\n'), ((23633, 23691), 'django.shortcuts.render', 'render', (['request', '"""management/se_home.html"""', "{'form': form}"], {}), "(request, 'management/se_home.html', {'form': form})\n", (23639, 23691), False, 'from django.shortcuts import render\n'), ((23997, 24073), 'cpovc_registry.models.RegPersonsOrgUnits.objects.filter', 'RegPersonsOrgUnits.objects.filter', ([], {'is_void': '(False)', 'date_delinked__isnull': '(True)'}), '(is_void=False, date_delinked__isnull=True)\n', (24030, 24073), False, 'from cpovc_registry.models import RegPerson, RegPersonsExternalIds, RegOrgUnit, RegPersonsOrgUnits, RegOrgUnitGeography\n'), ((24794, 24827), 'cpovc_main.functions.get_dict', 'get_dict', ([], {'field_name': 'check_fields'}), '(field_name=check_fields)\n', (24802, 24827), False, 'from cpovc_main.functions import get_dict, convert_date\n'), ((25175, 25269), 'cpovc_registry.models.RegOrgUnitGeography.objects.filter', 'RegOrgUnitGeography.objects.filter', ([], {'is_void': '(False)', 'org_unit_id__in': 'ou_ids', 'area_id__lt': '(338)'}), '(is_void=False, org_unit_id__in=ou_ids,\n area_id__lt=338)\n', (25209, 25269), False, 'from cpovc_registry.models import RegPerson, RegPersonsExternalIds, RegOrgUnit, RegPersonsOrgUnits, RegOrgUnitGeography\n'), ((26753, 26818), 'django.http.JsonResponse', 'JsonResponse', (['result'], {'content_type': '"""application/json"""', 'safe': '(False)'}), "(result, content_type='application/json', safe=False)\n", (26765, 26818), False, 'from django.http import HttpResponseRedirect, JsonResponse, HttpResponse\n'), ((2248, 2314), 'django.http.JsonResponse', 'JsonResponse', (['results'], {'content_type': '"""application/json"""', 'safe': '(False)'}), "(results, content_type='application/json', safe=False)\n", (2260, 2314), False, 'from django.http import HttpResponseRedirect, JsonResponse, HttpResponse\n'), ((5207, 5226), 'cpovc_main.functions.convert_date', 'convert_date', (['tdate'], {}), '(tdate)\n', (5219, 5226), False, 'from cpovc_main.functions import get_dict, convert_date\n'), ((8529, 8568), 'django.urls.reverse', 'reverse', (['view_travel'], {'kwargs': "{'id': id}"}), "(view_travel, kwargs={'id': id})\n", (8536, 8568), False, 'from django.urls import reverse, resolve\n'), ((8588, 8613), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['url'], {}), '(url)\n', (8608, 8613), False, 'from django.http import HttpResponseRedirect, JsonResponse, HttpResponse\n'), ((11760, 11815), 'cpovc_forms.models.OVCBasicCRS.objects.get', 'OVCBasicCRS.objects.get', ([], {'case_id': 'item_id', 'is_void': '(False)'}), '(case_id=item_id, is_void=False)\n', (11783, 11815), False, 'from cpovc_forms.models import OVCBasicCRS, OVCBasicCategory, OVCBasicPerson\n'), ((11853, 11872), 'django.forms.models.model_to_dict', 'model_to_dict', (['case'], {}), '(case)\n', (11866, 11872), False, 'from django.forms.models import model_to_dict\n'), ((12606, 12672), 'django.http.JsonResponse', 'JsonResponse', (['results'], {'content_type': '"""application/json"""', 'safe': '(False)'}), "(results, content_type='application/json', safe=False)\n", (12618, 12672), False, 'from django.http import HttpResponseRedirect, JsonResponse, HttpResponse\n'), ((16354, 16430), 'cpovc_main.models.SetupGeography.objects.filter', 'SetupGeography.objects.filter', ([], {'area_type_id': '"""GDIS"""', 'parent_area_id': 'county_id'}), "(area_type_id='GDIS', parent_area_id=county_id)\n", (16383, 16430), False, 'from cpovc_main.models import SetupGeography\n'), ((16525, 16601), 'cpovc_main.models.SetupGeography.objects.filter', 'SetupGeography.objects.filter', ([], {'area_type_id': '"""GDIS"""', 'parent_area_id': 'county_id'}), "(area_type_id='GDIS', parent_area_id=county_id)\n", (16554, 16601), False, 'from cpovc_main.models import SetupGeography\n'), ((19938, 20001), 'django.http.JsonResponse', 'JsonResponse', (['data'], {'content_type': '"""application/json"""', 'safe': '(False)'}), "(data, content_type='application/json', safe=False)\n", (19950, 20001), False, 'from django.http import HttpResponseRedirect, JsonResponse, HttpResponse\n'), ((20741, 20764), 'cpovc_main.functions.convert_date', 'convert_date', (['from_date'], {}), '(from_date)\n', (20753, 20764), False, 'from cpovc_main.functions import get_dict, convert_date\n'), ((20785, 20806), 'cpovc_main.functions.convert_date', 'convert_date', (['to_date'], {}), '(to_date)\n', (20797, 20806), False, 'from cpovc_main.functions import get_dict, convert_date\n'), ((21113, 21154), 'cpovc_reports.models.RPTCaseLoad.objects.filter', 'RPTCaseLoad.objects.filter', ([], {'is_void': '(False)'}), '(is_void=False)\n', (21139, 21154), False, 'from cpovc_reports.models import RPTCaseLoad\n'), ((5285, 5310), 'cpovc_main.functions.convert_date', 'convert_date', (['return_date'], {}), '(return_date)\n', (5297, 5310), False, 'from cpovc_main.functions import get_dict, convert_date\n'), ((12721, 12762), 'cpovc_forms.models.OVCBasicCRS.objects.filter', 'OVCBasicCRS.objects.filter', ([], {'is_void': '(False)'}), '(is_void=False)\n', (12747, 12762), False, 'from cpovc_forms.models import OVCBasicCRS, OVCBasicCategory, OVCBasicPerson\n'), ((19155, 19196), 'cpovc_reports.models.RPTCaseLoad.objects.filter', 'RPTCaseLoad.objects.filter', ([], {'is_void': '(False)'}), '(is_void=False)\n', (19181, 19196), False, 'from cpovc_reports.models import RPTCaseLoad\n'), ((21880, 21894), 'django.db.models.Q', 'Q', ([], {'age__gte': '(25)'}), '(age__gte=25)\n', (21881, 21894), False, 'from django.db.models import Q\n'), ((21897, 21916), 'django.db.models.Q', 'Q', ([], {'dob__isnull': '(True)'}), '(dob__isnull=True)\n', (21898, 21916), False, 'from django.db.models import Q\n'), ((7270, 7299), 'cpovc_registry.models.RegPerson.objects.get', 'RegPerson.objects.get', ([], {'pk': 'cid'}), '(pk=cid)\n', (7291, 7299), False, 'from cpovc_registry.models import RegPerson, RegPersonsExternalIds, RegOrgUnit, RegPersonsOrgUnits, RegOrgUnitGeography\n'), ((7647, 7741), 'cpovc_registry.models.RegPersonsExternalIds.objects.get', 'RegPersonsExternalIds.objects.get', ([], {'person_id': 'cid', 'is_void': '(False)', 'identifier_type_id': '"""IPPN"""'}), "(person_id=cid, is_void=False,\n identifier_type_id='IPPN')\n", (7680, 7741), False, 'from cpovc_registry.models import RegPerson, RegPersonsExternalIds, RegOrgUnit, RegPersonsOrgUnits, RegOrgUnitGeography\n'), ((21279, 21291), 'django.db.models.Q', 'Q', ([], {'age__lt': '(0)'}), '(age__lt=0)\n', (21280, 21291), False, 'from django.db.models import Q\n'), ((21240, 21254), 'django.db.models.Q', 'Q', ([], {'age__gte': '(25)'}), '(age__gte=25)\n', (21241, 21254), False, 'from django.db.models import Q\n'), ((21257, 21276), 'django.db.models.Q', 'Q', ([], {'dob__isnull': '(True)'}), '(dob__isnull=True)\n', (21258, 21276), False, 'from django.db.models import Q\n'), ((21380, 21418), 'django.db.models.Q', 'Q', ([], {'case_category_id': '"""CDIS"""', 'age__gt': '(15)'}), "(case_category_id='CDIS', age__gt=15)\n", (21381, 21418), False, 'from django.db.models import Q\n'), ((21443, 21481), 'django.db.models.Q', 'Q', ([], {'case_category_id': '"""CSIC"""', 'age__gt': '(18)'}), "(case_category_id='CSIC', age__gt=18)\n", (21444, 21481), False, 'from django.db.models import Q\n')]
|
from sqlalchemy.ext.declarative import declarative_base, declared_attr
class CustomBase(object):
""" Generates __tablename__ automatically """
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
Base = declarative_base(cls=CustomBase)
|
[
"sqlalchemy.ext.declarative.declarative_base"
] |
[((242, 274), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {'cls': 'CustomBase'}), '(cls=CustomBase)\n', (258, 274), False, 'from sqlalchemy.ext.declarative import declarative_base, declared_attr\n')]
|
doctests = """
########### Tests mostly copied from test_listcomps.py ############
Test simple loop with conditional
>>> sum({i*i for i in range(100) if i&1 == 1})
166650
Test simple case
>>> {2*y + x + 1 for x in (0,) for y in (1,)}
set([3])
Test simple nesting
>>> list(sorted({(i,j) for i in range(3) for j in range(4)}))
[(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)]
Test nesting with the inner expression dependent on the outer
>>> list(sorted({(i,j) for i in range(4) for j in range(i)}))
[(1, 0), (2, 0), (2, 1), (3, 0), (3, 1), (3, 2)]
Make sure the induction variable is not exposed
>>> i = 20
>>> sum({i*i for i in range(100)})
328350
>>> i
20
Verify that syntax error's are raised for setcomps used as lvalues
>>> {y for y in (1,2)} = 10 # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
SyntaxError: ...
>>> {y for y in (1,2)} += 10 # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
SyntaxError: ...
Make a nested set comprehension that acts like set(range())
>>> def srange(n):
... return {i for i in range(n)}
>>> list(sorted(srange(10)))
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Same again, only as a lambda expression instead of a function definition
>>> lrange = lambda n: {i for i in range(n)}
>>> list(sorted(lrange(10)))
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Generators can call other generators:
>>> def grange(n):
... for x in {i for i in range(n)}:
... yield x
>>> list(sorted(grange(5)))
[0, 1, 2, 3, 4]
Make sure that None is a valid return value
>>> {None for i in range(10)}
set([None])
########### Tests for various scoping corner cases ############
Return lambdas that use the iteration variable as a default argument
>>> items = {(lambda i=i: i) for i in range(5)}
>>> {x() for x in items} == set(range(5))
True
Same again, only this time as a closure variable
>>> items = {(lambda: i) for i in range(5)}
>>> {x() for x in items}
set([4])
Another way to test that the iteration variable is local to the list comp
>>> items = {(lambda: i) for i in range(5)}
>>> i = 20
>>> {x() for x in items}
set([4])
And confirm that a closure can jump over the list comp scope
>>> items = {(lambda: y) for i in range(5)}
>>> y = 2
>>> {x() for x in items}
set([2])
We also repeat each of the above scoping tests inside a function
>>> def test_func():
... items = {(lambda i=i: i) for i in range(5)}
... return {x() for x in items}
>>> test_func() == set(range(5))
True
>>> def test_func():
... items = {(lambda: i) for i in range(5)}
... return {x() for x in items}
>>> test_func()
set([4])
>>> def test_func():
... items = {(lambda: i) for i in range(5)}
... i = 20
... return {x() for x in items}
>>> test_func()
set([4])
>>> def test_func():
... items = {(lambda: y) for i in range(5)}
... y = 2
... return {x() for x in items}
>>> test_func()
set([2])
"""
__test__ = {'doctests' : doctests}
def test_main(verbose=None):
import sys
from test import test_support
from test import test_setcomps
test_support.run_doctest(test_setcomps, verbose)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in range(len(counts)):
test_support.run_doctest(test_setcomps, verbose)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
if __name__ == "__main__":
test_main(verbose=True)
|
[
"gc.collect",
"sys.gettotalrefcount",
"test.test_support.run_doctest"
] |
[((3418, 3466), 'test.test_support.run_doctest', 'test_support.run_doctest', (['test_setcomps', 'verbose'], {}), '(test_setcomps, verbose)\n', (3442, 3466), False, 'from test import test_support\n'), ((3648, 3696), 'test.test_support.run_doctest', 'test_support.run_doctest', (['test_setcomps', 'verbose'], {}), '(test_setcomps, verbose)\n', (3672, 3696), False, 'from test import test_support\n'), ((3709, 3721), 'gc.collect', 'gc.collect', ([], {}), '()\n', (3719, 3721), False, 'import gc\n'), ((3746, 3768), 'sys.gettotalrefcount', 'sys.gettotalrefcount', ([], {}), '()\n', (3766, 3768), False, 'import sys\n')]
|
import numpy as np
import random
import pickle
class Loader:
@staticmethod
def load_train_set_and_test_set(path):
# loading training set features
with open(path + "/new/train_set_features.pkl", "rb") as f:
train_set_features2 = pickle.load(f)
# reducing feature vector length
features_STDs = np.std(a=train_set_features2, axis=0)
train_set_features = train_set_features2[:, features_STDs > 52.3]
# changing the range of data between 0 and 1
train_set_features = np.divide(train_set_features, train_set_features.max())
# loading training set labels
with open(path + "/new/train_set_labels.pkl", "rb") as f:
train_set_labels = pickle.load(f)
# ------------
# loading test set features
with open(path + "/new/test_set_features.pkl", "rb") as f:
test_set_features2 = pickle.load(f)
# reducing feature vector length
features_STDs = np.std(a=test_set_features2, axis=0)
test_set_features = test_set_features2[:, features_STDs > 47.7]
# changing the range of data between 0 and 1
test_set_features = np.divide(test_set_features, test_set_features.max())
# loading test set labels
with open(path + "/new/test_set_labels.pkl", "rb") as f:
test_set_labels = pickle.load(f)
# ------------
# preparing our training and test sets - joining datasets and lables
train_set = []
test_set = []
for i in range(len(train_set_features)):
label = np.array([0, 0, 0, 0, 0, 0])
label[int(train_set_labels[i])] = 1
label = label.reshape(6, 1)
train_set.append((train_set_features[i].reshape(119, 1), label))
for i in range(len(test_set_features)):
label = np.array([0, 0, 0, 0, 0, 0])
label[int(test_set_labels[i])] = 1
label = label.reshape(6, 1)
test_set.append((test_set_features[i].reshape(119, 1), label))
# shuffle
random.shuffle(train_set)
random.shuffle(test_set)
# print size
# print(len(train_set), np.shape(train_set)) # 1962
# print(len(test_set)) # 662
train_set = np.array(train_set)
test_set = np.array(test_set)
return train_set, test_set
|
[
"numpy.std",
"random.shuffle",
"pickle.load",
"numpy.array"
] |
[((348, 385), 'numpy.std', 'np.std', ([], {'a': 'train_set_features2', 'axis': '(0)'}), '(a=train_set_features2, axis=0)\n', (354, 385), True, 'import numpy as np\n'), ((991, 1027), 'numpy.std', 'np.std', ([], {'a': 'test_set_features2', 'axis': '(0)'}), '(a=test_set_features2, axis=0)\n', (997, 1027), True, 'import numpy as np\n'), ((2078, 2103), 'random.shuffle', 'random.shuffle', (['train_set'], {}), '(train_set)\n', (2092, 2103), False, 'import random\n'), ((2112, 2136), 'random.shuffle', 'random.shuffle', (['test_set'], {}), '(test_set)\n', (2126, 2136), False, 'import random\n'), ((2278, 2297), 'numpy.array', 'np.array', (['train_set'], {}), '(train_set)\n', (2286, 2297), True, 'import numpy as np\n'), ((2317, 2335), 'numpy.array', 'np.array', (['test_set'], {}), '(test_set)\n', (2325, 2335), True, 'import numpy as np\n'), ((267, 281), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (278, 281), False, 'import pickle\n'), ((735, 749), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (746, 749), False, 'import pickle\n'), ((910, 924), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (921, 924), False, 'import pickle\n'), ((1366, 1380), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1377, 1380), False, 'import pickle\n'), ((1597, 1625), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 0, 0]'], {}), '([0, 0, 0, 0, 0, 0])\n', (1605, 1625), True, 'import numpy as np\n'), ((1860, 1888), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 0, 0]'], {}), '([0, 0, 0, 0, 0, 0])\n', (1868, 1888), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
# пример без использования дизайнера, все Gui определяется тут
from PyQt4 import QtCore
from PyQt4 import QtGui
# создадим "алиас" для этого метода, чтобы писать меньше кода
fromUtf8 = QtCore.QString.fromUtf8
class Ui(QtGui.QWidget):
def __init__(self, parent=None):
super(Ui,self).__init__(parent)
# Меняем заголовок у окна, обратите внимание,
# что кириллицу нельзя указывать напрямую
# только через QString, тут мы и исползуем
# алиас fromUtf8
self.setWindowTitle(fromUtf8('Пример'))
# Определяем компоновщик элементов, с ним
# приложение будет опрятнее, в данном случае
# используется табличный компоновщик, в который
# мы позже разместим элементы интерфейса
self.layout = QtGui.QGridLayout()
# создаем два виджета текстового редактирования
# левый и правый
self.leftText = QtGui.QTextEdit()
self.rightText = QtGui.QTextEdit()
# Создаем виджет - кнопку
self.buttonForward = QtGui.QPushButton()
# меняем ее текст
self.buttonForward.setText(fromUtf8('туда ->'))
self.buttonBack = QtGui.QPushButton()
self.buttonBack.setText(fromUtf8('<- сюда'))
# Располагаем в компоновщике элементы
# представьте себе таблицу 2х2
# в первой ячейке размещается leftText, который
# занимает одну ячейку
self.layout.addWidget(self.leftText, 0, 0, 1, 1)
self.layout.addWidget(self.rightText, 0, 1, 1, 1)
self.layout.addWidget(self.buttonForward, 1, 0, 1, 1)
self.layout.addWidget(self.buttonBack, 1, 1, 1, 1)
# применяем компоновку
self.setLayout(self.layout)
# соединяем сигналы со слотами
# в конкретном случае мы сами определили слоты forward и back
self.connect(self.buttonForward, QtCore.SIGNAL("clicked()"), self.forward)
self.connect(self.buttonBack, QtCore.SIGNAL("clicked()"), self.back)
# в forward() и back() мы просто работаем с "родными методами" Qt
def forward(self):
self.leftText.selectAll()
self.leftText.cut()
self.rightText.clear()
self.rightText.paste()
def back(self):
self.rightText.selectAll()
self.rightText.cut()
self.leftText.clear()
self.leftText.paste()
if __name__=='__main__':
import sys
app = QtGui.QApplication(sys.argv)
exampleWindow = Ui()
exampleWindow.show()
sys.exit(app.exec_())
|
[
"PyQt4.QtGui.QGridLayout",
"PyQt4.QtGui.QTextEdit",
"PyQt4.QtGui.QApplication",
"PyQt4.QtGui.QPushButton",
"PyQt4.QtCore.SIGNAL"
] |
[((2412, 2440), 'PyQt4.QtGui.QApplication', 'QtGui.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (2430, 2440), False, 'from PyQt4 import QtGui\n'), ((794, 813), 'PyQt4.QtGui.QGridLayout', 'QtGui.QGridLayout', ([], {}), '()\n', (811, 813), False, 'from PyQt4 import QtGui\n'), ((919, 936), 'PyQt4.QtGui.QTextEdit', 'QtGui.QTextEdit', ([], {}), '()\n', (934, 936), False, 'from PyQt4 import QtGui\n'), ((962, 979), 'PyQt4.QtGui.QTextEdit', 'QtGui.QTextEdit', ([], {}), '()\n', (977, 979), False, 'from PyQt4 import QtGui\n'), ((1043, 1062), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', ([], {}), '()\n', (1060, 1062), False, 'from PyQt4 import QtGui\n'), ((1171, 1190), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', ([], {}), '()\n', (1188, 1190), False, 'from PyQt4 import QtGui\n'), ((1871, 1897), 'PyQt4.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""clicked()"""'], {}), "('clicked()')\n", (1884, 1897), False, 'from PyQt4 import QtCore\n'), ((1951, 1977), 'PyQt4.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""clicked()"""'], {}), "('clicked()')\n", (1964, 1977), False, 'from PyQt4 import QtCore\n')]
|
import itertools
import six
from chainer import testing
def pooling_patches(dims, ksize, stride, pad, cover_all):
"""Return tuples of slices that indicate pooling patches."""
# Left-top indexes of each pooling patch.
if cover_all:
xss = itertools.product(
*[six.moves.range(-p, d + p - k + s, s)
for (d, k, s, p) in six.moves.zip(dims, ksize, stride, pad)])
else:
xss = itertools.product(
*[six.moves.range(-p, d + p - k + 1, s)
for (d, k, s, p) in six.moves.zip(dims, ksize, stride, pad)])
# Tuples of slices for pooling patches.
return [tuple(slice(max(x, 0), min(x + k, d))
for (x, d, k) in six.moves.zip(xs, dims, ksize))
for xs in xss]
testing.run_module(__name__, __file__)
|
[
"chainer.testing.run_module",
"six.moves.zip",
"six.moves.range"
] |
[((768, 806), 'chainer.testing.run_module', 'testing.run_module', (['__name__', '__file__'], {}), '(__name__, __file__)\n', (786, 806), False, 'from chainer import testing\n'), ((293, 330), 'six.moves.range', 'six.moves.range', (['(-p)', '(d + p - k + s)', 's'], {}), '(-p, d + p - k + s, s)\n', (308, 330), False, 'import six\n'), ((464, 501), 'six.moves.range', 'six.moves.range', (['(-p)', '(d + p - k + 1)', 's'], {}), '(-p, d + p - k + 1, s)\n', (479, 501), False, 'import six\n'), ((707, 737), 'six.moves.zip', 'six.moves.zip', (['xs', 'dims', 'ksize'], {}), '(xs, dims, ksize)\n', (720, 737), False, 'import six\n'), ((365, 404), 'six.moves.zip', 'six.moves.zip', (['dims', 'ksize', 'stride', 'pad'], {}), '(dims, ksize, stride, pad)\n', (378, 404), False, 'import six\n'), ((536, 575), 'six.moves.zip', 'six.moves.zip', (['dims', 'ksize', 'stride', 'pad'], {}), '(dims, ksize, stride, pad)\n', (549, 575), False, 'import six\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging.config
from logging.handlers import RotatingFileHandler
from .. import config
DEBUG_PLUS = 15
logging.addLevelName(DEBUG_PLUS, "DEBUG_PLUS")
LOG_FORMAT = '[%(asctime)s - %(levelname)s] %(pathname)s...%(funcName)s, line %(lineno)s > %(message)s'
def deplus(self, message, *args, **kws):
if self.isEnabledFor(DEBUG_PLUS):
self._log(DEBUG_PLUS, message, args, **kws)
logging.Logger.deplus = deplus
def get_logger():
logger = logging.getLogger()
if logger.level == DEBUG_PLUS:
return logger
logger.setLevel(DEBUG_PLUS)
debug_handler = RotatingFileHandler(
config.log_path,
maxBytes=1024 * 1024 * 5,
backupCount=0,
)
debug_handler.setLevel(DEBUG_PLUS)
formatter = logging.Formatter(LOG_FORMAT)
debug_handler.setFormatter(formatter)
logger.handlers = []
logger.addHandler(debug_handler)
return logger
logger = get_logger()
|
[
"logging.handlers.RotatingFileHandler"
] |
[((638, 715), 'logging.handlers.RotatingFileHandler', 'RotatingFileHandler', (['config.log_path'], {'maxBytes': '(1024 * 1024 * 5)', 'backupCount': '(0)'}), '(config.log_path, maxBytes=1024 * 1024 * 5, backupCount=0)\n', (657, 715), False, 'from logging.handlers import RotatingFileHandler\n')]
|
import ctcsound
def init_csound_with_orc(args_list, orc_file, silent, string_values):
with open(orc_file, "r") as f:
orc_string = f.read()
if silent:
args_list.append("-m0")
args_list.append("-d")
cs = ctcsound.Csound()
cs.compileOrc(orc_string)
for x in args_list:
cs.setOption(x)
if string_values:
for k in string_values.keys():
cs.setStringChannel(k, string_values[k])
return cs
def init_csound_with_csd(args_list, csd_file, silent, string_values):
with open(csd_file, "r") as f:
csd_string = f.read()
if silent:
args_list.append("-m0")
args_list.append("-d")
cs = ctcsound.Csound()
cs.compileCsd(csd_string)
for x in args_list:
cs.setOption(x)
if string_values:
for k in string_values.keys():
cs.setStringChannel(k, string_values[k])
return cs
|
[
"ctcsound.Csound"
] |
[((239, 256), 'ctcsound.Csound', 'ctcsound.Csound', ([], {}), '()\n', (254, 256), False, 'import ctcsound\n'), ((687, 704), 'ctcsound.Csound', 'ctcsound.Csound', ([], {}), '()\n', (702, 704), False, 'import ctcsound\n')]
|
import os
import sys
import re
import sublime
import sublime_plugin
from sbot_common import *
#-----------------------------------------------------------------------------------
def _do_sub(view, edit, reo, sub):
# Generic substitution function.
regions = sbot_common.get_sel_regions(view)
for reg in regions:
orig = view.substr(reg)
new = reo.sub(sub, orig)
view.replace(edit, reg, new)
#-----------------------------------------------------------------------------------
class SbotTrimCommand(sublime_plugin.TextCommand):
def run(self, edit, how):
if how == 'leading':
reo = re.compile('^[ \t]+', re.MULTILINE)
sub = ''
elif how == 'trailing':
reo = re.compile('[\t ]+$', re.MULTILINE)
sub = ''
else: # both
reo = re.compile('^[ \t]+|[\t ]+$', re.MULTILINE)
sub = ''
_do_sub(self.view, edit, reo, sub)
|
[
"re.compile"
] |
[((643, 678), 're.compile', 're.compile', (['"""^[ \t]+"""', 're.MULTILINE'], {}), "('^[ \\t]+', re.MULTILINE)\n", (653, 678), False, 'import re\n'), ((750, 785), 're.compile', 're.compile', (['"""[\t ]+$"""', 're.MULTILINE'], {}), "('[\\t ]+$', re.MULTILINE)\n", (760, 785), False, 'import re\n'), ((846, 889), 're.compile', 're.compile', (['"""^[ \t]+|[\t ]+$"""', 're.MULTILINE'], {}), "('^[ \\t]+|[\\t ]+$', re.MULTILINE)\n", (856, 889), False, 'import re\n')]
|
# GENERATED FILE - DO NOT EDIT THIS FILE UNLESS YOU ARE A WIZZARD
#pylint: skip-file
from heat.engine import properties
from heat.engine import constraints
from heat.engine import attributes
from heat.common.i18n import _
from avi.heat.avi_resource import AviResource
from avi.heat.avi_resource import AviNestedResource
from options import *
from common import *
from options import *
class ClusterNode(object):
# all schemas
name_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=False,
update_allowed=True,
)
ip_schema = properties.Schema(
properties.Schema.MAP,
_("IP address of controller VM."),
schema=IpAddr.properties_schema,
required=True,
update_allowed=True,
)
vm_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("UUID on the controller VM"),
required=False,
update_allowed=True,
)
vm_name_schema = properties.Schema(
properties.Schema.STRING,
_("Name of the controller VM"),
required=False,
update_allowed=True,
)
vm_mor_schema = properties.Schema(
properties.Schema.STRING,
_("Managed object reference of this controller VM"),
required=False,
update_allowed=True,
)
vm_hostname_schema = properties.Schema(
properties.Schema.STRING,
_("Hostname assigned to this controller VM"),
required=False,
update_allowed=True,
)
public_ip_or_name_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.2.3) Public IP address or hostname of the controller VM"),
schema=IpAddr.properties_schema,
required=False,
update_allowed=True,
)
categories_item_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 18.1.1) Optional service categories that a node can be assigned (e.g. SYSTEM, INFRASTRUCTURE or ANALYTICS)"),
required=True,
update_allowed=False,
)
categories_schema = properties.Schema(
properties.Schema.LIST,
_("(Introduced in: 18.1.1) Optional service categories that a node can be assigned (e.g. SYSTEM, INFRASTRUCTURE or ANALYTICS)"),
schema=categories_item_schema,
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'name',
'ip',
'vm_uuid',
'vm_name',
'vm_mor',
'vm_hostname',
'public_ip_or_name',
'categories',
)
# mapping of properties to their schemas
properties_schema = {
'name': name_schema,
'ip': ip_schema,
'vm_uuid': vm_uuid_schema,
'vm_name': vm_name_schema,
'vm_mor': vm_mor_schema,
'vm_hostname': vm_hostname_schema,
'public_ip_or_name': public_ip_or_name_schema,
'categories': categories_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'ip': getattr(IpAddr, 'field_references', {}),
'public_ip_or_name': getattr(IpAddr, 'field_references', {}),
}
unique_keys = {
'ip': getattr(IpAddr, 'unique_keys', {}),
'public_ip_or_name': getattr(IpAddr, 'unique_keys', {}),
}
class AzureClusterInfo(object):
# all schemas
subscription_id_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.2.5) "),
required=True,
update_allowed=True,
)
cloud_credential_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.2.5) You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=True,
update_allowed=True,
)
# properties list
PROPERTIES = (
'subscription_id',
'cloud_credential_uuid',
)
# mapping of properties to their schemas
properties_schema = {
'subscription_id': subscription_id_schema,
'cloud_credential_uuid': cloud_credential_uuid_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'cloud_credential_uuid': 'cloudconnectoruser',
}
class ClusterCloudDetails(AviResource):
resource_name = "clusterclouddetails"
# all schemas
avi_version_schema = properties.Schema(
properties.Schema.STRING,
_("Avi Version to use for the object. Default is 16.4.2. If you plan to use any fields introduced after 16.4.2, then this needs to be explicitly set."),
required=False,
update_allowed=True,
)
name_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.2.5) "),
required=True,
update_allowed=True,
)
azure_info_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.2.5) Azure info to configure cluster_vip on the controller"),
schema=AzureClusterInfo.properties_schema,
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'avi_version',
'name',
'azure_info',
)
# mapping of properties to their schemas
properties_schema = {
'avi_version': avi_version_schema,
'name': name_schema,
'azure_info': azure_info_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'azure_info': getattr(AzureClusterInfo, 'field_references', {}),
}
unique_keys = {
'azure_info': getattr(AzureClusterInfo, 'unique_keys', {}),
}
class Cluster(AviResource):
resource_name = "cluster"
# all schemas
avi_version_schema = properties.Schema(
properties.Schema.STRING,
_("Avi Version to use for the object. Default is 16.4.2. If you plan to use any fields introduced after 16.4.2, then this needs to be explicitly set."),
required=False,
update_allowed=True,
)
name_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=True,
update_allowed=True,
)
virtual_ip_schema = properties.Schema(
properties.Schema.MAP,
_("A virtual IP address. This IP address will be dynamically reconfigured so that it always is the IP of the cluster leader."),
schema=IpAddr.properties_schema,
required=False,
update_allowed=True,
)
nodes_item_schema = properties.Schema(
properties.Schema.MAP,
_(""),
schema=ClusterNode.properties_schema,
required=True,
update_allowed=False,
)
nodes_schema = properties.Schema(
properties.Schema.LIST,
_(""),
schema=nodes_item_schema,
required=False,
update_allowed=True,
)
rejoin_nodes_automatically_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Re-join cluster nodes automatically in the event one of the node is reset to factory. (Default: True)"),
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'avi_version',
'name',
'virtual_ip',
'nodes',
'rejoin_nodes_automatically',
)
# mapping of properties to their schemas
properties_schema = {
'avi_version': avi_version_schema,
'name': name_schema,
'virtual_ip': virtual_ip_schema,
'nodes': nodes_schema,
'rejoin_nodes_automatically': rejoin_nodes_automatically_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'nodes': getattr(ClusterNode, 'field_references', {}),
'virtual_ip': getattr(IpAddr, 'field_references', {}),
}
unique_keys = {
'nodes': getattr(ClusterNode, 'unique_keys', {}),
'virtual_ip': getattr(IpAddr, 'unique_keys', {}),
}
def resource_mapping():
return {
'Avi::LBaaS::Cluster': Cluster,
'Avi::LBaaS::ClusterCloudDetails': ClusterCloudDetails,
}
|
[
"heat.common.i18n._"
] |
[((513, 518), 'heat.common.i18n._', '_', (['""""""'], {}), "('')\n", (514, 518), False, 'from heat.common.i18n import _\n'), ((653, 686), 'heat.common.i18n._', '_', (['"""IP address of controller VM."""'], {}), "('IP address of controller VM.')\n", (654, 686), False, 'from heat.common.i18n import _\n'), ((869, 899), 'heat.common.i18n._', '_', (['"""UUID on the controller VM"""'], {}), "('UUID on the controller VM')\n", (870, 899), False, 'from heat.common.i18n import _\n'), ((1042, 1072), 'heat.common.i18n._', '_', (['"""Name of the controller VM"""'], {}), "('Name of the controller VM')\n", (1043, 1072), False, 'from heat.common.i18n import _\n'), ((1214, 1265), 'heat.common.i18n._', '_', (['"""Managed object reference of this controller VM"""'], {}), "('Managed object reference of this controller VM')\n", (1215, 1265), False, 'from heat.common.i18n import _\n'), ((1412, 1456), 'heat.common.i18n._', '_', (['"""Hostname assigned to this controller VM"""'], {}), "('Hostname assigned to this controller VM')\n", (1413, 1456), False, 'from heat.common.i18n import _\n'), ((1606, 1685), 'heat.common.i18n._', '_', (['"""(Introduced in: 17.2.3) Public IP address or hostname of the controller VM"""'], {}), "('(Introduced in: 17.2.3) Public IP address or hostname of the controller VM')\n", (1607, 1685), False, 'from heat.common.i18n import _\n'), ((1877, 2009), 'heat.common.i18n._', '_', (['"""(Introduced in: 18.1.1) Optional service categories that a node can be assigned (e.g. SYSTEM, INFRASTRUCTURE or ANALYTICS)"""'], {}), "('(Introduced in: 18.1.1) Optional service categories that a node can be assigned (e.g. SYSTEM, INFRASTRUCTURE or ANALYTICS)'\n )\n", (1878, 2009), False, 'from heat.common.i18n import _\n'), ((2148, 2280), 'heat.common.i18n._', '_', (['"""(Introduced in: 18.1.1) Optional service categories that a node can be assigned (e.g. SYSTEM, INFRASTRUCTURE or ANALYTICS)"""'], {}), "('(Introduced in: 18.1.1) Optional service categories that a node can be assigned (e.g. SYSTEM, INFRASTRUCTURE or ANALYTICS)'\n )\n", (2149, 2280), False, 'from heat.common.i18n import _\n'), ((3455, 3484), 'heat.common.i18n._', '_', (['"""(Introduced in: 17.2.5) """'], {}), "('(Introduced in: 17.2.5) ')\n", (3456, 3484), False, 'from heat.common.i18n import _\n'), ((3640, 3803), 'heat.common.i18n._', '_', (['"""(Introduced in: 17.2.5) You can either provide UUID or provide a name with the prefix \'get_avi_uuid_by_name:\', e.g., \'get_avi_uuid_by_name:my_obj_name\'."""'], {}), '("(Introduced in: 17.2.5) You can either provide UUID or provide a name with the prefix \'get_avi_uuid_by_name:\', e.g., \'get_avi_uuid_by_name:my_obj_name\'."\n )\n', (3641, 3803), False, 'from heat.common.i18n import _\n'), ((4490, 4646), 'heat.common.i18n._', '_', (['"""Avi Version to use for the object. Default is 16.4.2. If you plan to use any fields introduced after 16.4.2, then this needs to be explicitly set."""'], {}), "('Avi Version to use for the object. Default is 16.4.2. If you plan to use any fields introduced after 16.4.2, then this needs to be explicitly set.'\n )\n", (4491, 4646), False, 'from heat.common.i18n import _\n'), ((4781, 4810), 'heat.common.i18n._', '_', (['"""(Introduced in: 17.2.5) """'], {}), "('(Introduced in: 17.2.5) ')\n", (4782, 4810), False, 'from heat.common.i18n import _\n'), ((4952, 5039), 'heat.common.i18n._', '_', (['"""(Introduced in: 17.2.5) Azure info to configure cluster_vip on the controller"""'], {}), "('(Introduced in: 17.2.5) Azure info to configure cluster_vip on the controller'\n )\n", (4953, 5039), False, 'from heat.common.i18n import _\n'), ((5867, 6023), 'heat.common.i18n._', '_', (['"""Avi Version to use for the object. Default is 16.4.2. If you plan to use any fields introduced after 16.4.2, then this needs to be explicitly set."""'], {}), "('Avi Version to use for the object. Default is 16.4.2. If you plan to use any fields introduced after 16.4.2, then this needs to be explicitly set.'\n )\n", (5868, 6023), False, 'from heat.common.i18n import _\n'), ((6158, 6163), 'heat.common.i18n._', '_', (['""""""'], {}), "('')\n", (6159, 6163), False, 'from heat.common.i18n import _\n'), ((6305, 6436), 'heat.common.i18n._', '_', (['"""A virtual IP address. This IP address will be dynamically reconfigured so that it always is the IP of the cluster leader."""'], {}), "('A virtual IP address. This IP address will be dynamically reconfigured so that it always is the IP of the cluster leader.'\n )\n", (6306, 6436), False, 'from heat.common.i18n import _\n'), ((6615, 6620), 'heat.common.i18n._', '_', (['""""""'], {}), "('')\n", (6616, 6620), False, 'from heat.common.i18n import _\n'), ((6805, 6810), 'heat.common.i18n._', '_', (['""""""'], {}), "('')\n", (6806, 6810), False, 'from heat.common.i18n import _\n'), ((7007, 7118), 'heat.common.i18n._', '_', (['"""Re-join cluster nodes automatically in the event one of the node is reset to factory. (Default: True)"""'], {}), "('Re-join cluster nodes automatically in the event one of the node is reset to factory. (Default: True)'\n )\n", (7008, 7118), False, 'from heat.common.i18n import _\n')]
|
#!/usr/bin/env python3
import rospy
from bitbots_msgs.msg import FootPressure
import argparse
import random
import time
import tkinter
import threading
parser = argparse.ArgumentParser()
parser.add_argument("-r", "--rate", help="Publish rate", dest="rate", type=int, default=200)
parser.add_argument("-n", "--noise", help="Amount of noise on the signal", type=float, default=1e6)
parser.add_argument("-s", "--seed", help="random seed for zero and scale", dest="seed")
args = parser.parse_args()
if args.seed is not None:
random.seed(args.seed)
zeroes = [2e8 * random.random() - 1e8 for i in range(8)]
scales = []
for i in range(8):
if random.random() < 0.5:
m = 1
else:
m = -1
scales.append((random.random() * 2e7 + 1e7) * m) # random value between 1e7 and 3e7 or -1e7 and -3e7
force_values = [0] * 8
# this is ugly but I dont have patience to make it properly
def update_force_0(force):
force_values[0] = float(force)
def update_force_1(force):
force_values[1] = float(force)
def update_force_2(force):
force_values[2] = float(force)
def update_force_3(force):
force_values[3] = float(force)
def update_force_4(force):
force_values[4] = float(force)
def update_force_5(force):
force_values[5] = float(force)
def update_force_6(force):
force_values[6] = float(force)
def update_force_7(force):
force_values[7] = float(force)
force_functions = [update_force_0, update_force_1, update_force_2, update_force_3,
update_force_4, update_force_5, update_force_6, update_force_7]
master = tkinter.Tk()
master.title = "Foot Pressure test gui"
labels = ["l_l_back", "l_l_front", "l_r_back", "l_r_front", "r_l_back", "r_l_front", "r_r_back", "r_r_front", ]
scalers = []
for i in range(8):
scalers.append(tkinter.Scale(master,
from_=-0.2,
to=10,
orient=tkinter.HORIZONTAL,
resolution=0.05,
label=labels[i],
length=300,
width=30,
command=force_functions[i]))
scalers[i].pack()
def zero():
for s in scalers:
s.set(0.0)
b = tkinter.Button(master, command=zero, text="Zero")
b.pack()
rospy.init_node("foot_pressure_tester")
pub_r = rospy.Publisher("/foot_pressure_right/raw", FootPressure, queue_size=1, tcp_nodelay=True)
pub_l = rospy.Publisher("/foot_pressure_left/raw", FootPressure, queue_size=1, tcp_nodelay=True)
rate = rospy.Rate(args.rate)
msg_l = FootPressure()
msg_r = FootPressure()
def publish(timer):
msg_l.header.stamp = msg_r.header.stamp = rospy.get_rostime()
msg_l.left_back = zeroes[0] + scales[0] * force_values[0] + random.random() * args.noise
msg_l.left_front = zeroes[1] + scales[1] * force_values[1] + random.random() * args.noise
msg_l.right_back = zeroes[2] + scales[2] * force_values[2] + random.random() * args.noise
msg_l.right_front = zeroes[3] + scales[3] * force_values[3] + random.random() * args.noise
msg_r.left_back = zeroes[4] + scales[4] * force_values[4] + random.random() * args.noise
msg_r.left_front = zeroes[5] + scales[5] * force_values[5] + random.random() * args.noise
msg_r.right_back = zeroes[6] + scales[6] * force_values[6] + random.random() * args.noise
msg_r.right_front = zeroes[7] + scales[7] * force_values[7] + random.random() * args.noise
pub_l.publish(msg_l)
pub_r.publish(msg_r)
rospy.Timer(rospy.Duration(1) / args.rate, publish)
tkinter.mainloop()
rospy.signal_shutdown("gui closed")
|
[
"argparse.ArgumentParser",
"tkinter.mainloop",
"tkinter.Button",
"rospy.get_rostime",
"rospy.Publisher",
"rospy.Rate",
"rospy.signal_shutdown",
"bitbots_msgs.msg.FootPressure",
"random.random",
"random.seed",
"rospy.init_node",
"tkinter.Scale",
"tkinter.Tk",
"rospy.Duration"
] |
[((163, 188), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (186, 188), False, 'import argparse\n'), ((1592, 1604), 'tkinter.Tk', 'tkinter.Tk', ([], {}), '()\n', (1602, 1604), False, 'import tkinter\n'), ((2308, 2357), 'tkinter.Button', 'tkinter.Button', (['master'], {'command': 'zero', 'text': '"""Zero"""'}), "(master, command=zero, text='Zero')\n", (2322, 2357), False, 'import tkinter\n'), ((2368, 2407), 'rospy.init_node', 'rospy.init_node', (['"""foot_pressure_tester"""'], {}), "('foot_pressure_tester')\n", (2383, 2407), False, 'import rospy\n'), ((2416, 2509), 'rospy.Publisher', 'rospy.Publisher', (['"""/foot_pressure_right/raw"""', 'FootPressure'], {'queue_size': '(1)', 'tcp_nodelay': '(True)'}), "('/foot_pressure_right/raw', FootPressure, queue_size=1,\n tcp_nodelay=True)\n", (2431, 2509), False, 'import rospy\n'), ((2514, 2606), 'rospy.Publisher', 'rospy.Publisher', (['"""/foot_pressure_left/raw"""', 'FootPressure'], {'queue_size': '(1)', 'tcp_nodelay': '(True)'}), "('/foot_pressure_left/raw', FootPressure, queue_size=1,\n tcp_nodelay=True)\n", (2529, 2606), False, 'import rospy\n'), ((2611, 2632), 'rospy.Rate', 'rospy.Rate', (['args.rate'], {}), '(args.rate)\n', (2621, 2632), False, 'import rospy\n'), ((2641, 2655), 'bitbots_msgs.msg.FootPressure', 'FootPressure', ([], {}), '()\n', (2653, 2655), False, 'from bitbots_msgs.msg import FootPressure\n'), ((2664, 2678), 'bitbots_msgs.msg.FootPressure', 'FootPressure', ([], {}), '()\n', (2676, 2678), False, 'from bitbots_msgs.msg import FootPressure\n'), ((3624, 3642), 'tkinter.mainloop', 'tkinter.mainloop', ([], {}), '()\n', (3640, 3642), False, 'import tkinter\n'), ((3643, 3678), 'rospy.signal_shutdown', 'rospy.signal_shutdown', (['"""gui closed"""'], {}), "('gui closed')\n", (3664, 3678), False, 'import rospy\n'), ((530, 552), 'random.seed', 'random.seed', (['args.seed'], {}), '(args.seed)\n', (541, 552), False, 'import random\n'), ((2747, 2766), 'rospy.get_rostime', 'rospy.get_rostime', ([], {}), '()\n', (2764, 2766), False, 'import rospy\n'), ((649, 664), 'random.random', 'random.random', ([], {}), '()\n', (662, 664), False, 'import random\n'), ((1808, 1968), 'tkinter.Scale', 'tkinter.Scale', (['master'], {'from_': '(-0.2)', 'to': '(10)', 'orient': 'tkinter.HORIZONTAL', 'resolution': '(0.05)', 'label': 'labels[i]', 'length': '(300)', 'width': '(30)', 'command': 'force_functions[i]'}), '(master, from_=-0.2, to=10, orient=tkinter.HORIZONTAL,\n resolution=0.05, label=labels[i], length=300, width=30, command=\n force_functions[i])\n', (1821, 1968), False, 'import tkinter\n'), ((3584, 3601), 'rospy.Duration', 'rospy.Duration', (['(1)'], {}), '(1)\n', (3598, 3601), False, 'import rospy\n'), ((570, 585), 'random.random', 'random.random', ([], {}), '()\n', (583, 585), False, 'import random\n'), ((2831, 2846), 'random.random', 'random.random', ([], {}), '()\n', (2844, 2846), False, 'import random\n'), ((2925, 2940), 'random.random', 'random.random', ([], {}), '()\n', (2938, 2940), False, 'import random\n'), ((3019, 3034), 'random.random', 'random.random', ([], {}), '()\n', (3032, 3034), False, 'import random\n'), ((3114, 3129), 'random.random', 'random.random', ([], {}), '()\n', (3127, 3129), False, 'import random\n'), ((3208, 3223), 'random.random', 'random.random', ([], {}), '()\n', (3221, 3223), False, 'import random\n'), ((3302, 3317), 'random.random', 'random.random', ([], {}), '()\n', (3315, 3317), False, 'import random\n'), ((3396, 3411), 'random.random', 'random.random', ([], {}), '()\n', (3409, 3411), False, 'import random\n'), ((3491, 3506), 'random.random', 'random.random', ([], {}), '()\n', (3504, 3506), False, 'import random\n'), ((730, 745), 'random.random', 'random.random', ([], {}), '()\n', (743, 745), False, 'import random\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_virtual_address
short_description: Manage LTM virtual addresses on a BIG-IP
description:
- Manage LTM virtual addresses on a BIG-IP.
version_added: 2.4
options:
name:
description:
- Name of the virtual address.
- If this parameter is not provided, then the value of C(address) will
be used.
version_added: 2.6
address:
description:
- Virtual address. This value cannot be modified after it is set.
- If you never created a virtual address, but did create virtual servers, then
a virtual address for each virtual server was created automatically. The name
of this virtual address is its IP address value.
netmask:
description:
- Netmask of the provided virtual address. This value cannot be
modified after it is set.
default: 255.255.255.255
connection_limit:
description:
- Specifies the number of concurrent connections that the system
allows on this virtual address.
arp_state:
description:
- Specifies whether the system accepts ARP requests. When (disabled),
specifies that the system does not accept ARP requests. Note that
both ARP and ICMP Echo must be disabled in order for forwarding
virtual servers using that virtual address to forward ICMP packets.
If (enabled), then the packets are dropped.
choices:
- enabled
- disabled
auto_delete:
description:
- Specifies whether the system automatically deletes the virtual
address with the deletion of the last associated virtual server.
When C(disabled), specifies that the system leaves the virtual
address even when all associated virtual servers have been deleted.
When creating the virtual address, the default value is C(enabled).
choices:
- enabled
- disabled
icmp_echo:
description:
- Specifies how the systems sends responses to (ICMP) echo requests
on a per-virtual address basis for enabling route advertisement.
When C(enabled), the BIG-IP system intercepts ICMP echo request
packets and responds to them directly. When C(disabled), the BIG-IP
system passes ICMP echo requests through to the backend servers.
When (selective), causes the BIG-IP system to internally enable or
disable responses based on virtual server state; C(when_any_available),
C(when_all_available, or C(always), regardless of the state of any
virtual servers.
choices:
- enabled
- disabled
- selective
state:
description:
- The virtual address state. If C(absent), an attempt to delete the
virtual address will be made. This will only succeed if this
virtual address is not in use by a virtual server. C(present) creates
the virtual address and enables it. If C(enabled), enable the virtual
address if it exists. If C(disabled), create the virtual address if
needed, and set state to C(disabled).
default: present
choices:
- present
- absent
- enabled
- disabled
availability_calculation:
description:
- Specifies what routes of the virtual address the system advertises.
When C(when_any_available), advertises the route when any virtual
server is available. When C(when_all_available), advertises the
route when all virtual servers are available. When (always), always
advertises the route regardless of the virtual servers available.
choices:
- always
- when_all_available
- when_any_available
aliases: ['advertise_route']
version_added: 2.6
use_route_advertisement:
description:
- Specifies whether the system uses route advertisement for this
virtual address.
- When disabled, the system does not advertise routes for this virtual address.
- Deprecated. Use the C(route_advertisement) parameter instead.
type: bool
route_advertisement:
description:
- Specifies whether the system uses route advertisement for this
virtual address.
- When disabled, the system does not advertise routes for this virtual address.
- The majority of these options are only supported on versions 13.0.0-HF1 or
higher. On versions less than this, all choices expect C(disabled) will
translate to C(enabled).
- When C(always), the BIG-IP system will always advertise the route for the
virtual address, regardless of availability status. This requires an C(enabled)
virtual address.
- When C(enabled), the BIG-IP system will advertise the route for the available
virtual address, based on the calculation method in the availability calculation.
- When C(disabled), the BIG-IP system will not advertise the route for the virtual
address, regardless of the availability status.
- When C(selective), you can also selectively enable ICMP echo responses, which
causes the BIG-IP system to internally enable or disable responses based on
virtual server state. Either C(any) virtual server, C(all) virtual servers, or
C(always), regardless of the state of any virtual server.
- When C(any), the BIG-IP system will advertise the route for the virtual address
when any virtual server is available.
- When C(all), the BIG-IP system will advertise the route for the virtual address
when all virtual servers are available.
choices:
- disabled
- enabled
- always
- selective
- any
- all
version_added: 2.6
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
traffic_group:
description:
- The traffic group for the virtual address. When creating a new address,
if this value is not specified, the default of C(/Common/traffic-group-1)
will be used.
version_added: 2.5
route_domain:
description:
- The route domain of the C(address) that you want to use.
- This value cannot be modified after it is set.
version_added: 2.6
notes:
- Requires the netaddr Python package on the host. This is as easy as pip
install netaddr.
extends_documentation_fragment: f5
requirements:
- netaddr
author:
- <NAME> (@caphrim007)
'''
EXAMPLES = r'''
- name: Add virtual address
bigip_virtual_address:
server: lb.mydomain.net
user: admin
password: <PASSWORD>
state: present
partition: Common
address: 10.10.10.10
delegate_to: localhost
- name: Enable route advertisement on the virtual address
bigip_virtual_address:
server: lb.mydomain.net
user: admin
password: <PASSWORD>
state: present
address: 10.10.10.10
use_route_advertisement: yes
delegate_to: localhost
'''
RETURN = r'''
use_route_advertisement:
description: The new setting for whether to use route advertising or not.
returned: changed
type: bool
sample: true
auto_delete:
description: New setting for auto deleting virtual address.
returned: changed
type: string
sample: enabled
icmp_echo:
description: New ICMP echo setting applied to virtual address.
returned: changed
type: string
sample: disabled
connection_limit:
description: The new connection limit of the virtual address.
returned: changed
type: int
sample: 1000
netmask:
description: The netmask of the virtual address.
returned: created
type: int
sample: 2345
arp_state:
description: The new way the virtual address handles ARP requests.
returned: changed
type: string
sample: disabled
address:
description: The address of the virtual address.
returned: created
type: int
sample: 2345
state:
description: The new state of the virtual address.
returned: changed
type: string
sample: disabled
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.parsing.convert_bool import BOOLEANS_TRUE
from ansible.module_utils.parsing.convert_bool import BOOLEANS_FALSE
from distutils.version import LooseVersion
try:
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
except ImportError:
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
try:
import netaddr
HAS_NETADDR = True
except ImportError:
HAS_NETADDR = False
class Parameters(AnsibleF5Parameters):
api_map = {
'routeAdvertisement': 'route_advertisement_type',
'autoDelete': 'auto_delete',
'icmpEcho': 'icmp_echo',
'connectionLimit': 'connection_limit',
'serverScope': 'availability_calculation',
'mask': 'netmask',
'arp': 'arp_state',
'trafficGroup': 'traffic_group',
}
updatables = [
'route_advertisement_type', 'auto_delete', 'icmp_echo', 'connection_limit',
'arp_state', 'enabled', 'availability_calculation', 'traffic_group'
]
returnables = [
'route_advertisement_type', 'auto_delete', 'icmp_echo', 'connection_limit',
'netmask', 'arp_state', 'address', 'state', 'traffic_group', 'route_domain'
]
api_attributes = [
'routeAdvertisement', 'autoDelete', 'icmpEcho', 'connectionLimit',
'advertiseRoute', 'arp', 'mask', 'enabled', 'serverScope', 'trafficGroup'
]
@property
def availability_calculation(self):
if self._values['availability_calculation'] is None:
return None
elif self._values['availability_calculation'] in ['any', 'when_any_available']:
return 'any'
elif self._values['availability_calculation'] in ['all', 'when_all_available']:
return 'all'
elif self._values['availability_calculation'] in ['none', 'always']:
return 'none'
@property
def connection_limit(self):
if self._values['connection_limit'] is None:
return None
return int(self._values['connection_limit'])
@property
def enabled(self):
if self._values['state'] in ['enabled', 'present']:
return 'yes'
elif self._values['enabled'] in BOOLEANS_TRUE:
return 'yes'
elif self._values['state'] == 'disabled':
return 'no'
elif self._values['enabled'] in BOOLEANS_FALSE:
return 'no'
else:
return None
@property
def netmask(self):
if self._values['netmask'] is None:
return None
try:
ip = netaddr.IPAddress(self._values['netmask'])
return str(ip)
except netaddr.core.AddrFormatError:
raise F5ModuleError(
"The provided 'netmask' is not a valid IP address"
)
@property
def auto_delete(self):
if self._values['auto_delete'] is None:
return None
elif self._values['auto_delete'] in BOOLEANS_TRUE:
return True
elif self._values['auto_delete'] == 'enabled':
return True
else:
return False
@property
def state(self):
if self.enabled == 'yes' and self._values['state'] != 'present':
return 'enabled'
elif self.enabled == 'no':
return 'disabled'
else:
return self._values['state']
@property
def traffic_group(self):
if self._values['traffic_group'] is None:
return None
else:
result = fq_name(self.partition, self._values['traffic_group'])
if result.startswith('/Common/'):
return result
else:
raise F5ModuleError(
"Traffic groups can only exist in /Common"
)
@property
def route_advertisement_type(self):
if self.use_route_advertisement:
return self.use_route_advertisement
elif self.route_advertisement:
return self.route_advertisement
else:
return self._values['route_advertisement_type']
@property
def use_route_advertisement(self):
if self._values['use_route_advertisement'] is None:
return None
if self._values['use_route_advertisement'] in BOOLEANS_TRUE:
return 'enabled'
elif self._values['use_route_advertisement'] == 'enabled':
return 'enabled'
else:
return 'disabled'
@property
def route_advertisement(self):
if self._values['route_advertisement'] is None:
return None
version = self.client.api.tmos_version
if LooseVersion(version) <= LooseVersion('13.0.0'):
if self._values['route_advertisement'] == 'disabled':
return 'disabled'
else:
return 'enabled'
else:
return self._values['route_advertisement']
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def address(self):
if self._values['address'] is None:
return None
try:
ip = netaddr.IPAddress(self._values['address'])
return str(ip)
except netaddr.core.AddrFormatError:
raise F5ModuleError(
"The provided 'address' is not a valid IP address"
)
@property
def route_domain(self):
if self._values['route_domain'] is None:
return None
try:
return int(self._values['route_domain'])
except ValueError:
try:
rd = self.client.api.tm.net.route_domains.route_domain.load(
name=self._values['route_domain'],
partition=self.partition
)
return int(rd.id)
except iControlUnexpectedHTTPError:
raise F5ModuleError(
"The specified 'route_domain' was not found."
)
@property
def full_address(self):
if self.route_domain is not None:
return '{0}%{1}'.format(self.address, self.route_domain)
return self.address
@property
def name(self):
if self._values['name'] is None:
result = str(self.address)
if self.route_domain:
result = "{0}%{1}".format(result, self.route_domain)
else:
result = self._values['name']
return result
class Changes(Parameters):
pass
class UsableChanges(Changes):
@property
def address(self):
if self._values['address'] is None:
return None
if self._values['route_domain'] is None:
return self._values['address']
result = "{0}%{1}".format(self._values['address'], self._values['route_domain'])
return result
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def traffic_group(self):
if self.want.traffic_group != self.have.traffic_group:
return self.want.traffic_group
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = ModuleParameters(client=self.client, params=self.module.params)
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state in ['present', 'enabled', 'disabled']:
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
return result
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
changed = False
if self.exists():
changed = self.remove()
return changed
def read_current_from_device(self):
name = self.want.name
name = name.replace('%', '%25')
resource = self.client.api.tm.ltm.virtual_address_s.virtual_address.load(
name=name,
partition=self.want.partition
)
result = resource.attrs
return ApiParameters(params=result)
def exists(self):
# This addresses cases where the name includes a % sign. The URL in the REST
# API escapes a % sign as %25. If you don't do this, you will get errors in
# the exists() method.
name = self.want.name
name = name.replace('%', '%25')
result = self.client.api.tm.ltm.virtual_address_s.virtual_address.exists(
name=name,
partition=self.want.partition
)
return result
def update(self):
self.have = self.read_current_from_device()
if self.want.netmask is not None:
if self.have.netmask != self.want.netmask:
raise F5ModuleError(
"The netmask cannot be changed. Delete and recreate "
"the virtual address if you need to do this."
)
if self.want.address is not None:
if self.have.address != self.want.full_address:
raise F5ModuleError(
"The address cannot be changed. Delete and recreate "
"the virtual address if you need to do this."
)
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def update_on_device(self):
params = self.changes.api_params()
name = self.want.name
name = name.replace('%', '%25')
resource = self.client.api.tm.ltm.virtual_address_s.virtual_address.load(
name=name,
partition=self.want.partition
)
resource.modify(**params)
def create(self):
self._set_changed_options()
if self.want.traffic_group is None:
self.want.update({'traffic_group': '/Common/traffic-group-1'})
if self.module.check_mode:
return True
self.create_on_device()
if self.exists():
return True
else:
raise F5ModuleError("Failed to create the virtual address")
def create_on_device(self):
params = self.changes.api_params()
self.client.api.tm.ltm.virtual_address_s.virtual_address.create(
name=self.want.name,
partition=self.want.partition,
address=self.changes.address,
**params
)
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the virtual address")
return True
def remove_from_device(self):
name = self.want.name
name = name.replace('%', '%25')
resource = self.client.api.tm.ltm.virtual_address_s.virtual_address.load(
name=name,
partition=self.want.partition
)
resource.delete()
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
state=dict(
default='present',
choices=['present', 'absent', 'disabled', 'enabled']
),
name=dict(),
address=dict(),
netmask=dict(
type='str',
default='255.255.255.255',
),
connection_limit=dict(
type='int'
),
arp_state=dict(
choices=['enabled', 'disabled'],
),
auto_delete=dict(
choices=['enabled', 'disabled'],
),
icmp_echo=dict(
choices=['enabled', 'disabled', 'selective'],
),
availability_calculation=dict(
choices=['always', 'when_all_available', 'when_any_available'],
aliases=['advertise_route']
),
use_route_advertisement=dict(
type='bool',
removed_in_version=2.9,
),
route_advertisement=dict(
choices=[
'disabled',
'enabled',
'always',
'selective',
'any',
'all',
]
),
traffic_group=dict(),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
),
route_domain=dict()
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.required_one_of = [
['name', 'address']
]
self.mutually_exclusive = [
['use_route_advertisement', 'route_advertisement']
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
if not HAS_NETADDR:
module.fail_json(msg="The python netaddr module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
[
"ansible.module_utils.network.f5.common.fq_name",
"ansible.module_utils.network.f5.bigip.F5Client",
"distutils.version.LooseVersion",
"ansible.module_utils.network.f5.common.cleanup_tokens",
"netaddr.IPAddress",
"ansible.module_utils.network.f5.common.F5ModuleError",
"ansible.module_utils.basic.AnsibleModule"
] |
[((24609, 24707), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'spec.argument_spec', 'supports_check_mode': 'spec.supports_check_mode'}), '(argument_spec=spec.argument_spec, supports_check_mode=spec.\n supports_check_mode)\n', (24622, 24707), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((24937, 24962), 'ansible.module_utils.network.f5.bigip.F5Client', 'F5Client', ([], {}), '(**module.params)\n', (24945, 24962), False, 'from ansible.module_utils.network.f5.bigip import F5Client\n'), ((25063, 25085), 'ansible.module_utils.network.f5.common.cleanup_tokens', 'cleanup_tokens', (['client'], {}), '(client)\n', (25077, 25085), False, 'from ansible.module_utils.network.f5.common import cleanup_tokens\n'), ((12098, 12140), 'netaddr.IPAddress', 'netaddr.IPAddress', (["self._values['netmask']"], {}), "(self._values['netmask'])\n", (12115, 12140), False, 'import netaddr\n'), ((13053, 13107), 'ansible.module_utils.network.f5.common.fq_name', 'fq_name', (['self.partition', "self._values['traffic_group']"], {}), "(self.partition, self._values['traffic_group'])\n", (13060, 13107), False, 'from ansible.module_utils.network.f5.common import fq_name\n'), ((13208, 13265), 'ansible.module_utils.network.f5.common.F5ModuleError', 'F5ModuleError', (['"""Traffic groups can only exist in /Common"""'], {}), "('Traffic groups can only exist in /Common')\n", (13221, 13265), False, 'from ansible.module_utils.network.f5.common import F5ModuleError\n'), ((14161, 14182), 'distutils.version.LooseVersion', 'LooseVersion', (['version'], {}), '(version)\n', (14173, 14182), False, 'from distutils.version import LooseVersion\n'), ((14186, 14208), 'distutils.version.LooseVersion', 'LooseVersion', (['"""13.0.0"""'], {}), "('13.0.0')\n", (14198, 14208), False, 'from distutils.version import LooseVersion\n'), ((14863, 14905), 'netaddr.IPAddress', 'netaddr.IPAddress', (["self._values['address']"], {}), "(self._values['address'])\n", (14880, 14905), False, 'import netaddr\n'), ((21768, 21821), 'ansible.module_utils.network.f5.common.F5ModuleError', 'F5ModuleError', (['"""Failed to create the virtual address"""'], {}), "('Failed to create the virtual address')\n", (21781, 21821), False, 'from ansible.module_utils.network.f5.common import F5ModuleError\n'), ((22280, 22333), 'ansible.module_utils.network.f5.common.F5ModuleError', 'F5ModuleError', (['"""Failed to delete the virtual address"""'], {}), "('Failed to delete the virtual address')\n", (22293, 22333), False, 'from ansible.module_utils.network.f5.common import F5ModuleError\n'), ((25162, 25184), 'ansible.module_utils.network.f5.common.cleanup_tokens', 'cleanup_tokens', (['client'], {}), '(client)\n', (25176, 25184), False, 'from ansible.module_utils.network.f5.common import cleanup_tokens\n'), ((12231, 12296), 'ansible.module_utils.network.f5.common.F5ModuleError', 'F5ModuleError', (['"""The provided \'netmask\' is not a valid IP address"""'], {}), '("The provided \'netmask\' is not a valid IP address")\n', (12244, 12296), False, 'from ansible.module_utils.network.f5.common import F5ModuleError\n'), ((14996, 15061), 'ansible.module_utils.network.f5.common.F5ModuleError', 'F5ModuleError', (['"""The provided \'address\' is not a valid IP address"""'], {}), '("The provided \'address\' is not a valid IP address")\n', (15009, 15061), False, 'from ansible.module_utils.network.f5.common import F5ModuleError\n'), ((20437, 20558), 'ansible.module_utils.network.f5.common.F5ModuleError', 'F5ModuleError', (['"""The netmask cannot be changed. Delete and recreate the virtual address if you need to do this."""'], {}), "(\n 'The netmask cannot be changed. Delete and recreate the virtual address if you need to do this.'\n )\n", (20450, 20558), False, 'from ansible.module_utils.network.f5.common import F5ModuleError\n'), ((20734, 20855), 'ansible.module_utils.network.f5.common.F5ModuleError', 'F5ModuleError', (['"""The address cannot be changed. Delete and recreate the virtual address if you need to do this."""'], {}), "(\n 'The address cannot be changed. Delete and recreate the virtual address if you need to do this.'\n )\n", (20747, 20855), False, 'from ansible.module_utils.network.f5.common import F5ModuleError\n'), ((15617, 15677), 'ansible.module_utils.network.f5.common.F5ModuleError', 'F5ModuleError', (['"""The specified \'route_domain\' was not found."""'], {}), '("The specified \'route_domain\' was not found.")\n', (15630, 15677), False, 'from ansible.module_utils.network.f5.common import F5ModuleError\n')]
|
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
class DataAugmentor:
"""
A class used for data augmentation (partially taken from : https://www.wouterbulten.nl/blog/tech/data-augmentation-using-tensorflow-data-dataset/)
Attributes
----------
batch : tf.Tensor, optional
The batch to augment
batchSize: int
The batch size
seed: int, optional
Random seed
Methods
-------
flip
Flip Augmentation
color
Color Augmentation
gaussian
Gaussian Noise
brightness
Custom Brightness Augmentation
zoom
Crop Augmentation
kerasAug
Inbuilt Keras Augmentations
augment
Wrapper Augmentation Function
"""
def __init__(self, batch=None, batchSize=50, seed=0):
if batch is not None:
self.dataset = batch
self.seed = seed
tf.random.set_seed(self.seed)
np.random.seed(self.seed)
self.batchSize = batchSize
def flip(self, x: tf.Tensor) -> tf.Tensor:
"""Flip augmentation
Args:
x: Image to flip
Returns:
Augmented image
"""
x = tf.image.random_flip_left_right(x, seed=self.seed)
return x
def color(self, x: tf.Tensor) -> tf.Tensor:
"""Color augmentation
Args:
x: Image
Returns:
Augmented image
#"""
x = tf.image.random_hue(x, 0.05, seed=self.seed)
x = tf.image.random_saturation(x, 0.6, 1.2, seed=self.seed)
x = tf.image.random_brightness(x, 0.05, seed=self.seed)
x = tf.image.random_contrast(x, 0.7, 1.0, seed=self.seed)
return x
def gaussian(self, x: tf.Tensor) -> tf.Tensor:
mean = tf.keras.backend.mean(x)
std = tf.keras.backend.std(x)
max_ = tf.keras.backend.max(x)
min_ = tf.keras.backend.min(x)
ptp = max_ - min_
noise = tf.random.normal(
shape=tf.shape(x),
mean=0,
stddev=0.3 * self.var,
dtype=tf.float32,
seed=self.seed,
)
# noise_img = tf.clip_by_value(((x - mean)/std + noise)*std + mean,
# clip_value_min = min_, clip_value_max=max_)
noise_img = x + noise
return noise_img
def brightness(self, x: tf.Tensor) -> tf.Tensor:
max_ = tf.keras.backend.max(x)
min_ = tf.keras.backend.min(x)
brightness_val = 0.1 * np.random.random_sample() - 0.05
noise = tf.constant(brightness_val, shape=x.shape)
noise_img = x + noise
noise_img = tf.clip_by_value(x, clip_value_min=min_, clip_value_max=max_)
return noise_img
def zoom(self, x: tf.Tensor) -> tf.Tensor:
"""Zoom augmentation
Args:
x: Image
Returns:
Augmented image
"""
# Generate 20 crop settings, ranging from a 1% to 20% crop.
scales = list(np.arange(0.85, 1.0, 0.01))
boxes = np.zeros((len(scales), 4))
for i, scale in enumerate(scales):
x1 = y1 = 0.5 - (0.5 * scale)
x2 = y2 = 0.5 + (0.5 * scale)
boxes[i] = [x1, y1, x2, y2]
def random_crop(img):
# Create different crops for an image
crops = tf.image.crop_and_resize(
[img],
boxes=boxes,
box_indices=np.zeros(len(scales)),
crop_size=(x.shape[0], x.shape[1]),
)
# Return a random crop
return crops[
tf.random.uniform(
shape=[],
minval=0,
maxval=len(scales),
dtype=tf.int32,
seed=self.seed,
)
]
choice = tf.random.uniform(
shape=[], minval=0.0, maxval=1.0, dtype=tf.float32, seed=self.seed
)
# Only apply cropping 50% of the time
return tf.cond(choice < 0.5, lambda: x, lambda: random_crop(x))
def kerasAug(self, x: tf.Tensor) -> tf.Tensor:
datagen = tf.keras.preprocessing.image.ImageDataGenerator(
rotation_range=2,
width_shift_range=0,
height_shift_range=0,
horizontal_flip=False,
shear_range=0,
fill_mode="nearest",
dtype=tf.float32,
)
return datagen.flow(
x, batch_size=self.batchSize, shuffle=False, seed=self.seed
).next()
def augment(self, batch=None):
if batch is not None:
self.dataset = batch
self.dataset = tf.data.Dataset.from_tensor_slices(self.dataset.numpy())
# Add augmentations
augmentations = [self.flip, self.color, self.zoom]
# Add the augmentations to the dataset
for f in augmentations:
# Apply the augmentation, run 4 jobs in parallel.
self.dataset = self.dataset.map(f)
self.dataset = next(iter(self.dataset.batch(self.batchSize)))
return self.dataset
|
[
"tensorflow.random.set_seed",
"tensorflow.keras.backend.min",
"tensorflow.keras.preprocessing.image.ImageDataGenerator",
"numpy.random.seed",
"numpy.random.random_sample",
"tensorflow.clip_by_value",
"tensorflow.random.uniform",
"tensorflow.image.random_contrast",
"tensorflow.keras.backend.mean",
"tensorflow.keras.backend.max",
"tensorflow.image.random_hue",
"tensorflow.image.random_flip_left_right",
"tensorflow.keras.backend.std",
"tensorflow.constant",
"tensorflow.shape",
"numpy.arange",
"tensorflow.image.random_saturation",
"tensorflow.image.random_brightness"
] |
[((1028, 1057), 'tensorflow.random.set_seed', 'tf.random.set_seed', (['self.seed'], {}), '(self.seed)\n', (1046, 1057), True, 'import tensorflow as tf\n'), ((1066, 1091), 'numpy.random.seed', 'np.random.seed', (['self.seed'], {}), '(self.seed)\n', (1080, 1091), True, 'import numpy as np\n'), ((1318, 1368), 'tensorflow.image.random_flip_left_right', 'tf.image.random_flip_left_right', (['x'], {'seed': 'self.seed'}), '(x, seed=self.seed)\n', (1349, 1368), True, 'import tensorflow as tf\n'), ((1573, 1617), 'tensorflow.image.random_hue', 'tf.image.random_hue', (['x', '(0.05)'], {'seed': 'self.seed'}), '(x, 0.05, seed=self.seed)\n', (1592, 1617), True, 'import tensorflow as tf\n'), ((1630, 1685), 'tensorflow.image.random_saturation', 'tf.image.random_saturation', (['x', '(0.6)', '(1.2)'], {'seed': 'self.seed'}), '(x, 0.6, 1.2, seed=self.seed)\n', (1656, 1685), True, 'import tensorflow as tf\n'), ((1698, 1749), 'tensorflow.image.random_brightness', 'tf.image.random_brightness', (['x', '(0.05)'], {'seed': 'self.seed'}), '(x, 0.05, seed=self.seed)\n', (1724, 1749), True, 'import tensorflow as tf\n'), ((1762, 1815), 'tensorflow.image.random_contrast', 'tf.image.random_contrast', (['x', '(0.7)', '(1.0)'], {'seed': 'self.seed'}), '(x, 0.7, 1.0, seed=self.seed)\n', (1786, 1815), True, 'import tensorflow as tf\n'), ((1902, 1926), 'tensorflow.keras.backend.mean', 'tf.keras.backend.mean', (['x'], {}), '(x)\n', (1923, 1926), True, 'import tensorflow as tf\n'), ((1941, 1964), 'tensorflow.keras.backend.std', 'tf.keras.backend.std', (['x'], {}), '(x)\n', (1961, 1964), True, 'import tensorflow as tf\n'), ((1980, 2003), 'tensorflow.keras.backend.max', 'tf.keras.backend.max', (['x'], {}), '(x)\n', (2000, 2003), True, 'import tensorflow as tf\n'), ((2019, 2042), 'tensorflow.keras.backend.min', 'tf.keras.backend.min', (['x'], {}), '(x)\n', (2039, 2042), True, 'import tensorflow as tf\n'), ((2514, 2537), 'tensorflow.keras.backend.max', 'tf.keras.backend.max', (['x'], {}), '(x)\n', (2534, 2537), True, 'import tensorflow as tf\n'), ((2553, 2576), 'tensorflow.keras.backend.min', 'tf.keras.backend.min', (['x'], {}), '(x)\n', (2573, 2576), True, 'import tensorflow as tf\n'), ((2657, 2699), 'tensorflow.constant', 'tf.constant', (['brightness_val'], {'shape': 'x.shape'}), '(brightness_val, shape=x.shape)\n', (2668, 2699), True, 'import tensorflow as tf\n'), ((2750, 2811), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['x'], {'clip_value_min': 'min_', 'clip_value_max': 'max_'}), '(x, clip_value_min=min_, clip_value_max=max_)\n', (2766, 2811), True, 'import tensorflow as tf\n'), ((3952, 4042), 'tensorflow.random.uniform', 'tf.random.uniform', ([], {'shape': '[]', 'minval': '(0.0)', 'maxval': '(1.0)', 'dtype': 'tf.float32', 'seed': 'self.seed'}), '(shape=[], minval=0.0, maxval=1.0, dtype=tf.float32, seed=\n self.seed)\n', (3969, 4042), True, 'import tensorflow as tf\n'), ((4250, 4443), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'tf.keras.preprocessing.image.ImageDataGenerator', ([], {'rotation_range': '(2)', 'width_shift_range': '(0)', 'height_shift_range': '(0)', 'horizontal_flip': '(False)', 'shear_range': '(0)', 'fill_mode': '"""nearest"""', 'dtype': 'tf.float32'}), "(rotation_range=2,\n width_shift_range=0, height_shift_range=0, horizontal_flip=False,\n shear_range=0, fill_mode='nearest', dtype=tf.float32)\n", (4297, 4443), True, 'import tensorflow as tf\n'), ((3099, 3125), 'numpy.arange', 'np.arange', (['(0.85)', '(1.0)', '(0.01)'], {}), '(0.85, 1.0, 0.01)\n', (3108, 3125), True, 'import numpy as np\n'), ((2121, 2132), 'tensorflow.shape', 'tf.shape', (['x'], {}), '(x)\n', (2129, 2132), True, 'import tensorflow as tf\n'), ((2608, 2633), 'numpy.random.random_sample', 'np.random.random_sample', ([], {}), '()\n', (2631, 2633), True, 'import numpy as np\n')]
|
# Generated by Django 2.0.9 on 2019-04-24 20:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('movement', '0004_auto_20190423_1633'),
]
operations = [
migrations.AlterField(
model_name='movementdailysquare',
name='movement_cd',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='movement.MovementDailySquare', verbose_name='Movimiento CD Contrapartida'),
),
migrations.AlterField(
model_name='movementdailysquare',
name='movement_don_juan',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='movement.MovementDonJuan', verbose_name='Movimiento Don Juan Contrapartida'),
),
migrations.AlterField(
model_name='movementdailysquare',
name='movement_don_juan_usd',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='movement.MovementDonJuanUsd', verbose_name='Movimiento Don Juan Dolares Contrapartida'),
),
migrations.AlterField(
model_name='movementdailysquare',
name='movement_office',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='movement.MovementOffice', verbose_name='Movimiento Oficina Contrapartida'),
),
migrations.AlterField(
model_name='movementdailysquare',
name='movement_partner',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='movement.MovementPartner', verbose_name='Movimiento Socio Contrapartida'),
),
]
|
[
"django.db.models.OneToOneField"
] |
[((388, 565), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""movement.MovementDailySquare"""', 'verbose_name': '"""Movimiento CD Contrapartida"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='movement.MovementDailySquare', verbose_name=\n 'Movimiento CD Contrapartida')\n", (408, 565), False, 'from django.db import migrations, models\n'), ((701, 880), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""movement.MovementDonJuan"""', 'verbose_name': '"""Movimiento Don Juan Contrapartida"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='movement.MovementDonJuan', verbose_name=\n 'Movimiento Don Juan Contrapartida')\n", (721, 880), False, 'from django.db import migrations, models\n'), ((1020, 1210), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""movement.MovementDonJuanUsd"""', 'verbose_name': '"""Movimiento Don Juan Dolares Contrapartida"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='movement.MovementDonJuanUsd', verbose_name=\n 'Movimiento Don Juan Dolares Contrapartida')\n", (1040, 1210), False, 'from django.db import migrations, models\n'), ((1344, 1521), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""movement.MovementOffice"""', 'verbose_name': '"""Movimiento Oficina Contrapartida"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='movement.MovementOffice', verbose_name=\n 'Movimiento Oficina Contrapartida')\n", (1364, 1521), False, 'from django.db import migrations, models\n'), ((1656, 1832), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""movement.MovementPartner"""', 'verbose_name': '"""Movimiento Socio Contrapartida"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='movement.MovementPartner', verbose_name=\n 'Movimiento Socio Contrapartida')\n", (1676, 1832), False, 'from django.db import migrations, models\n')]
|
import unittest
from quickbooks import QuickBooks
from quickbooks.objects.vendor import Vendor, ContactInfo
class VendorTests(unittest.TestCase):
def test_unicode(self):
vendor = Vendor()
vendor.DisplayName = "test"
self.assertEquals(str(vendor), "test")
def test_to_ref(self):
vendor = Vendor()
vendor.DisplayName = "test"
vendor.Id = 100
ref = vendor.to_ref()
self.assertEquals(ref.name, "test")
self.assertEquals(ref.type, "Vendor")
self.assertEquals(ref.value, 100)
def test_valid_object_name(self):
obj = Vendor()
client = QuickBooks()
result = client.isvalid_object_name(obj.qbo_object_name)
self.assertTrue(result)
class ContactInfoTests(unittest.TestCase):
def test_init(self):
contact_info = ContactInfo()
self.assertEquals(contact_info.Type, "")
self.assertEquals(contact_info.Telephone, None)
|
[
"quickbooks.objects.vendor.ContactInfo",
"quickbooks.objects.vendor.Vendor",
"quickbooks.QuickBooks"
] |
[((194, 202), 'quickbooks.objects.vendor.Vendor', 'Vendor', ([], {}), '()\n', (200, 202), False, 'from quickbooks.objects.vendor import Vendor, ContactInfo\n'), ((332, 340), 'quickbooks.objects.vendor.Vendor', 'Vendor', ([], {}), '()\n', (338, 340), False, 'from quickbooks.objects.vendor import Vendor, ContactInfo\n'), ((618, 626), 'quickbooks.objects.vendor.Vendor', 'Vendor', ([], {}), '()\n', (624, 626), False, 'from quickbooks.objects.vendor import Vendor, ContactInfo\n'), ((644, 656), 'quickbooks.QuickBooks', 'QuickBooks', ([], {}), '()\n', (654, 656), False, 'from quickbooks import QuickBooks\n'), ((848, 861), 'quickbooks.objects.vendor.ContactInfo', 'ContactInfo', ([], {}), '()\n', (859, 861), False, 'from quickbooks.objects.vendor import Vendor, ContactInfo\n')]
|
from cone.app import compat
from cone.app.browser import RelatedViewConsumer
from cone.app.browser.batch import Batch
from cone.app.browser.utils import format_date
from cone.app.browser.utils import make_query
from cone.app.browser.utils import make_url
from cone.app.utils import node_path
from cone.tile import Tile
from node.utils import safe_decode
from plumber import plumbing
class RowData(dict):
def __init__(self, selectable=False, target=None, css=''):
self.selectable = selectable
self.target = target
self.css = css
@plumbing(RelatedViewConsumer)
class Table(Tile):
"""Abstract table tile. Provides rendering of sortable, batched tables.
A subclass of this tile must be registered under the same name as defined
at ``self.table_tile_name``, normally bound to template
``cone.app:browser/templates/table.pt``
"""
wrapper_binding = 'batchclicked sortclicked'
table_id = 'table'
table_css = ''
table_tile_name = 'table'
col_defs = []
default_sort = None
default_order = None
default_slicesize = 15
query_whitelist = []
show_title = True
show_filter = False
show_slicesize = True
head_additional = None
display_table_header = True
display_table_footer = True
ajax_path = None
ajax_path_event = None
table_length_size = 'col-xs-4 col-sm3'
table_filter_size = 'col-xs-3'
@property
def slice(self):
return TableSlice(self, self.model, self.request)
@property
def batch(self):
return TableBatch(self)(self.model, self.request)
@property
def slicesize(self):
return int(self.request.params.get('size', self.default_slicesize))
@property
def slicesizes(self):
return [i * self.default_slicesize for i in range(1, 5)]
@property
def table_title(self):
return self.model.metadata.title
@property
def slice_target(self):
return self.make_url({
'sort': self.sort_column,
'order': self.sort_order,
'term': self.filter_term,
})
@property
def filter_target(self):
return self.make_url({
'sort': self.sort_column,
'order': self.sort_order,
'size': self.slicesize,
})
@property
def filter_term(self):
term = self.request.params.get('term')
if term:
term = term.encode('utf-8') if compat.IS_PY2 else term
term = compat.unquote(term)
term = term.decode('utf-8') if compat.IS_PY2 else term
return term
@property
def sort_column(self):
return self.request.params.get('sort', self.default_sort)
@property
def sort_order(self):
return self.request.params.get('order', self.default_order)
@property
def sort_index(self):
"""Index of recent sort column.
"""
col = self.sort_column
idx = 0
for col_def in self.col_defs:
key = col_def.get('sort_key')
if key == col:
return idx
idx += 1
def make_query(self, params):
"""Create query considering ``query_whitelist``.
:param params: Dictionary with query parameters.
:return: Query as string.
"""
p = dict()
for param in self.query_whitelist:
p[param] = self.request.params.get(param, '')
p.update(params)
return make_query(**p)
def make_url(self, params, path=None, include_view=False):
"""Create URL considering ``query_whitelist``.
:param params: Dictionary with query parameters.
:param path: Optional model path, if ``None``, path gets taken from
``self.model``
:param include_view: Boolean whether to include
``self.related_view`` to URL.
:return: URL as string.
"""
return safe_decode(make_url(
self.request,
path=path,
node=None if path else self.model,
resource=self.related_view if include_view else None,
query=self.make_query(params)))
def format_date(self, dt):
return format_date(dt)
def th_defs(self, sortkey):
cur_sort = self.sort_column
cur_order = self.sort_order
selected = cur_sort == sortkey
alter = selected and cur_order == 'desc'
order = alter and 'asc' or 'desc'
params = {
'b_page': self.request.params.get('b_page', '0'),
'sort': sortkey,
'order': order,
'size': self.slicesize,
'term': self.filter_term,
}
url = self.make_url(params)
css = selected and order or ''
return css, url
@property
def item_count(self):
raise NotImplementedError("Abstract table does not implement "
"``item_count``.")
def sorted_rows(self, start, end, sort, order):
raise NotImplementedError("Abstract table does not implement "
"``sorted_rows``.")
class TableSlice(object):
def __init__(self, table_tile, model, request):
self.table_tile = table_tile
self.model = model
self.request = request
@property
def slice(self):
current = int(self.request.params.get('b_page', '0'))
start = current * self.table_tile.slicesize
end = start + self.table_tile.slicesize
return start, end
@property
def rows(self):
start, end = self.slice
return self.table_tile.sorted_rows(
start, end,
self.table_tile.sort_column,
self.table_tile.sort_order)
class TableBatch(Batch):
def __init__(self, table_tile):
self.table_tile = table_tile
self.name = table_tile.table_id + 'batch'
self.related_view = table_tile.related_view
self.ajax_path = self.ajax_path
self.ajax_path_event = self.ajax_path_event
@property
def display(self):
return len(self.vocab) > 1
@property
def vocab(self):
ret = list()
path = node_path(self.model)
count = self.table_tile.item_count
slicesize = self.table_tile.slicesize
pages = count // slicesize
if count % slicesize != 0:
pages += 1
current = self.request.params.get('b_page', '0')
params = {
'sort': self.table_tile.sort_column,
'order': self.table_tile.sort_order,
'size': slicesize,
'term': self.table_tile.filter_term,
}
for term in self.table_tile.query_whitelist:
params[term] = self.request.params.get(term, '')
for i in range(pages):
params['b_page'] = str(i)
query = make_query(**params)
url = make_url(
self.request,
path=path,
# resource=self.related_view,
query=query
)
ret.append({
'page': '%i' % (i + 1),
'current': current == str(i),
'visible': True,
'url': url,
})
return ret
|
[
"cone.app.browser.utils.make_url",
"cone.app.browser.utils.format_date",
"cone.app.browser.utils.make_query",
"plumber.plumbing",
"cone.app.compat.unquote",
"cone.app.utils.node_path"
] |
[((562, 591), 'plumber.plumbing', 'plumbing', (['RelatedViewConsumer'], {}), '(RelatedViewConsumer)\n', (570, 591), False, 'from plumber import plumbing\n'), ((3466, 3481), 'cone.app.browser.utils.make_query', 'make_query', ([], {}), '(**p)\n', (3476, 3481), False, 'from cone.app.browser.utils import make_query\n'), ((4194, 4209), 'cone.app.browser.utils.format_date', 'format_date', (['dt'], {}), '(dt)\n', (4205, 4209), False, 'from cone.app.browser.utils import format_date\n'), ((6165, 6186), 'cone.app.utils.node_path', 'node_path', (['self.model'], {}), '(self.model)\n', (6174, 6186), False, 'from cone.app.utils import node_path\n'), ((2490, 2510), 'cone.app.compat.unquote', 'compat.unquote', (['term'], {}), '(term)\n', (2504, 2510), False, 'from cone.app import compat\n'), ((6836, 6856), 'cone.app.browser.utils.make_query', 'make_query', ([], {}), '(**params)\n', (6846, 6856), False, 'from cone.app.browser.utils import make_query\n'), ((6875, 6921), 'cone.app.browser.utils.make_url', 'make_url', (['self.request'], {'path': 'path', 'query': 'query'}), '(self.request, path=path, query=query)\n', (6883, 6921), False, 'from cone.app.browser.utils import make_url\n')]
|
import inspect
import os.path
from six import reraise as raise_
import pytest
try:
from py.io import saferepr
except ImportError:
saferepr = repr
_FAILED_ASSUMPTIONS = []
_ASSUMPTION_LOCALS = []
class FailedAssumption(Exception):
pass
def assume(expr, msg=''):
"""
Checks the expression, if it's false, add it to the
list of failed assumptions. Also, add the locals at each failed
assumption, if showlocals is set.
:param expr: Expression to 'assert' on.
:param msg: Message to display if the assertion fails.
:return: None
"""
if not expr:
(frame, filename, line, funcname, contextlist) = inspect.stack()[1][0:5]
# get filename, line, and context
filename = os.path.relpath(filename)
context = contextlist[0].lstrip() if not msg else msg
# format entry
entry = u"{filename}:{line}: AssumptionFailure\n>>\t{context}".format(**locals())
# add entry
_FAILED_ASSUMPTIONS.append(entry)
if getattr(pytest, "_showlocals", None):
# Debatable whether we should display locals for
# every failed assertion, or just the final one.
# I'm defaulting to per-assumption, just because vars
# can easily change between assumptions.
pretty_locals = ["\t%-10s = %s" % (name, saferepr(val))
for name, val in frame.f_locals.items()]
_ASSUMPTION_LOCALS.append(pretty_locals)
return False
else:
return True
def pytest_configure(config):
"""
Add tracking lists to the pytest namespace, so we can
always access it, as well as the 'assume' function itself.
:return: Dictionary of name: values added to the pytest namespace.
"""
pytest.assume = assume
pytest._showlocals = config.getoption("showlocals")
@pytest.hookimpl(hookwrapper=True)
def pytest_pyfunc_call(pyfuncitem):
"""
Using pyfunc_call to be as 'close' to the actual call of the test as possible.
This is executed immediately after the test itself is called.
Note: I'm not happy with exception handling in here.
"""
__tracebackhide__ = True
outcome = None
try:
outcome = yield
finally:
failed_assumptions = _FAILED_ASSUMPTIONS
assumption_locals = _ASSUMPTION_LOCALS
if failed_assumptions:
failed_count = len(failed_assumptions)
root_msg = "\n%s Failed Assumptions:\n" % failed_count
if assumption_locals:
assume_data = zip(failed_assumptions, assumption_locals)
longrepr = ["{0}\nLocals:\n{1}\n\n".format(assumption, "\n".join(flocals))
for assumption, flocals in assume_data]
else:
longrepr = ["\n\n".join(failed_assumptions)]
del _FAILED_ASSUMPTIONS[:]
del _ASSUMPTION_LOCALS[:]
if outcome and outcome.excinfo:
root_msg = "\nOriginal Failure: \n>> %s\n" % repr(outcome.excinfo[1]) + root_msg
raise_(FailedAssumption, FailedAssumption(root_msg + "".join(longrepr)), outcome.excinfo[2])
else:
raise FailedAssumption(root_msg + "".join(longrepr))
|
[
"py.io.saferepr",
"pytest.hookimpl",
"inspect.stack"
] |
[((1862, 1895), 'pytest.hookimpl', 'pytest.hookimpl', ([], {'hookwrapper': '(True)'}), '(hookwrapper=True)\n', (1877, 1895), False, 'import pytest\n'), ((654, 669), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (667, 669), False, 'import inspect\n'), ((1345, 1358), 'py.io.saferepr', 'saferepr', (['val'], {}), '(val)\n', (1353, 1358), False, 'from py.io import saferepr\n')]
|
import pandas as pd
import pytest
import xwrf
from . import importorskip
@importorskip('cf_xarray')
@pytest.mark.parametrize(
'name, cf_grid_mapping_name', [('lambert_conformal', 'lambert_conformal_conic')]
)
def test_postprocess(name, cf_grid_mapping_name):
# Verify initial/raw state
raw_ds = xwrf.tutorial.open_dataset(name)
assert pd.api.types.is_string_dtype(raw_ds.Times.dtype)
assert pd.api.types.is_numeric_dtype(raw_ds.Time.dtype)
assert 'time' not in raw_ds.cf.coordinates
assert raw_ds.cf.standard_names == {}
# Postprocess without decoding times
dsa = raw_ds.xwrf.postprocess(decode_times=False)
assert pd.api.types.is_numeric_dtype(dsa.Time.dtype)
# Postprocess
ds = raw_ds.xwrf.postprocess()
# Check for time coordinate handling
assert pd.api.types.is_datetime64_dtype(ds.Time.dtype)
assert 'time' in ds.cf.coordinates
# Check for projection handling
assert ds['wrf_projection'].attrs['grid_mapping_name'] == cf_grid_mapping_name
# Check for standard name and variable handling
standard_names = ds.cf.standard_names
assert 'x' in standard_names['projection_x_coordinate']
assert 'y' in standard_names['projection_y_coordinate']
assert 'z' in standard_names['atmosphere_hybrid_sigma_pressure_coordinate']
assert standard_names['time'] == ['Time']
assert standard_names['humidity_mixing_ratio'] == ['Q2', 'QVAPOR']
assert standard_names['air_temperature'] == ['T2']
# Check for time dimension reduction
assert ds['z'].shape == (39,)
assert ds['z_stag'].shape == (40,)
assert ds['XLAT'].shape == ds['XLONG'].shape == (29, 31)
assert ds['XLAT_U'].shape == ds['XLONG_U'].shape == (29, 32)
assert ds['XLAT_V'].shape == ds['XLONG_V'].shape == (30, 31)
# Check for diagnostic variable calculation
assert 'air_potential_temperature' in ds.data_vars
assert 'air_pressure' in ds.data_vars
assert 'geopotential' in ds.data_vars
assert 'geopotential_height' in ds.data_vars
assert 'T' not in ds.data_vars
assert 'P' not in ds.data_vars
assert 'PB' not in ds.data_vars
assert 'PH' not in ds.data_vars
assert 'PHB' not in ds.data_vars
|
[
"xwrf.tutorial.open_dataset",
"pandas.api.types.is_datetime64_dtype",
"pandas.api.types.is_string_dtype",
"pandas.api.types.is_numeric_dtype",
"pytest.mark.parametrize"
] |
[((105, 214), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""name, cf_grid_mapping_name"""', "[('lambert_conformal', 'lambert_conformal_conic')]"], {}), "('name, cf_grid_mapping_name', [('lambert_conformal',\n 'lambert_conformal_conic')])\n", (128, 214), False, 'import pytest\n'), ((312, 344), 'xwrf.tutorial.open_dataset', 'xwrf.tutorial.open_dataset', (['name'], {}), '(name)\n', (338, 344), False, 'import xwrf\n'), ((356, 404), 'pandas.api.types.is_string_dtype', 'pd.api.types.is_string_dtype', (['raw_ds.Times.dtype'], {}), '(raw_ds.Times.dtype)\n', (384, 404), True, 'import pandas as pd\n'), ((416, 464), 'pandas.api.types.is_numeric_dtype', 'pd.api.types.is_numeric_dtype', (['raw_ds.Time.dtype'], {}), '(raw_ds.Time.dtype)\n', (445, 464), True, 'import pandas as pd\n'), ((661, 706), 'pandas.api.types.is_numeric_dtype', 'pd.api.types.is_numeric_dtype', (['dsa.Time.dtype'], {}), '(dsa.Time.dtype)\n', (690, 706), True, 'import pandas as pd\n'), ((814, 861), 'pandas.api.types.is_datetime64_dtype', 'pd.api.types.is_datetime64_dtype', (['ds.Time.dtype'], {}), '(ds.Time.dtype)\n', (846, 861), True, 'import pandas as pd\n')]
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import fsspec
import pytest
import torch
from fsspec.implementations.local import LocalFileSystem
from pytorch_lightning.utilities.cloud_io import get_filesystem
from tests.helpers import BoringModel
from tests.helpers.advanced_models import BasicGAN, ParityModuleRNN
from tests.helpers.runif import RunIf
@pytest.mark.parametrize("modelclass", [BoringModel, ParityModuleRNN, BasicGAN])
def test_torchscript_input_output(modelclass):
"""Test that scripted LightningModule forward works."""
model = modelclass()
if isinstance(model, BoringModel):
model.example_input_array = torch.randn(5, 32)
script = model.to_torchscript()
assert isinstance(script, torch.jit.ScriptModule)
model.eval()
with torch.no_grad():
model_output = model(model.example_input_array)
script_output = script(model.example_input_array)
assert torch.allclose(script_output, model_output)
@pytest.mark.parametrize("modelclass", [BoringModel, ParityModuleRNN, BasicGAN])
def test_torchscript_example_input_output_trace(modelclass):
"""Test that traced LightningModule forward works with example_input_array"""
model = modelclass()
if isinstance(model, BoringModel):
model.example_input_array = torch.randn(5, 32)
script = model.to_torchscript(method="trace")
assert isinstance(script, torch.jit.ScriptModule)
model.eval()
with torch.no_grad():
model_output = model(model.example_input_array)
script_output = script(model.example_input_array)
assert torch.allclose(script_output, model_output)
def test_torchscript_input_output_trace():
"""Test that traced LightningModule forward works with example_inputs"""
model = BoringModel()
example_inputs = torch.randn(1, 32)
script = model.to_torchscript(example_inputs=example_inputs, method="trace")
assert isinstance(script, torch.jit.ScriptModule)
model.eval()
with torch.no_grad():
model_output = model(example_inputs)
script_output = script(example_inputs)
assert torch.allclose(script_output, model_output)
@RunIf(min_gpus=1)
@pytest.mark.parametrize("device", [torch.device("cpu"), torch.device("cuda", 0)])
def test_torchscript_device(device):
"""Test that scripted module is on the correct device."""
model = BoringModel().to(device)
model.example_input_array = torch.randn(5, 32)
script = model.to_torchscript()
assert next(script.parameters()).device == device
script_output = script(model.example_input_array.to(device))
assert script_output.device == device
def test_torchscript_retain_training_state():
"""Test that torchscript export does not alter the training mode of original model."""
model = BoringModel()
model.train(True)
script = model.to_torchscript()
assert model.training
assert not script.training
model.train(False)
_ = model.to_torchscript()
assert not model.training
assert not script.training
@pytest.mark.parametrize("modelclass", [BoringModel, ParityModuleRNN, BasicGAN])
def test_torchscript_properties(modelclass):
"""Test that scripted LightningModule has unnecessary methods removed."""
model = modelclass()
script = model.to_torchscript()
assert not hasattr(model, "batch_size") or hasattr(script, "batch_size")
assert not hasattr(model, "learning_rate") or hasattr(script, "learning_rate")
assert not callable(getattr(script, "training_step", None))
@pytest.mark.parametrize("modelclass", [BoringModel, ParityModuleRNN, BasicGAN])
def test_torchscript_save_load(tmpdir, modelclass):
"""Test that scripted LightningModule is correctly saved and can be loaded."""
model = modelclass()
output_file = str(tmpdir / "model.pt")
script = model.to_torchscript(file_path=output_file)
loaded_script = torch.jit.load(output_file)
assert torch.allclose(next(script.parameters()), next(loaded_script.parameters()))
@pytest.mark.parametrize("modelclass", [BoringModel, ParityModuleRNN, BasicGAN])
def test_torchscript_save_load_custom_filesystem(tmpdir, modelclass):
"""Test that scripted LightningModule is correctly saved and can be loaded with custom filesystems."""
_DUMMY_PRFEIX = "dummy"
_PREFIX_SEPARATOR = "://"
class DummyFileSystem(LocalFileSystem):
...
fsspec.register_implementation(_DUMMY_PRFEIX, DummyFileSystem, clobber=True)
model = modelclass()
output_file = os.path.join(_DUMMY_PRFEIX, _PREFIX_SEPARATOR, tmpdir, "model.pt")
script = model.to_torchscript(file_path=output_file)
fs = get_filesystem(output_file)
with fs.open(output_file, "rb") as f:
loaded_script = torch.jit.load(f)
assert torch.allclose(next(script.parameters()), next(loaded_script.parameters()))
def test_torchcript_invalid_method(tmpdir):
"""Test that an error is thrown with invalid torchscript method"""
model = BoringModel()
model.train(True)
with pytest.raises(ValueError, match="only supports 'script' or 'trace'"):
model.to_torchscript(method="temp")
def test_torchscript_with_no_input(tmpdir):
"""Test that an error is thrown when there is no input tensor"""
model = BoringModel()
model.example_input_array = None
with pytest.raises(ValueError, match="requires either `example_inputs` or `model.example_input_array`"):
model.to_torchscript(method="trace")
|
[
"os.path.join",
"pytorch_lightning.utilities.cloud_io.get_filesystem",
"fsspec.register_implementation",
"torch.randn",
"tests.helpers.BoringModel",
"torch.jit.load",
"pytest.raises",
"tests.helpers.runif.RunIf",
"torch.device",
"pytest.mark.parametrize",
"torch.no_grad",
"torch.allclose"
] |
[((908, 987), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""modelclass"""', '[BoringModel, ParityModuleRNN, BasicGAN]'], {}), "('modelclass', [BoringModel, ParityModuleRNN, BasicGAN])\n", (931, 987), False, 'import pytest\n'), ((1519, 1598), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""modelclass"""', '[BoringModel, ParityModuleRNN, BasicGAN]'], {}), "('modelclass', [BoringModel, ParityModuleRNN, BasicGAN])\n", (1542, 1598), False, 'import pytest\n'), ((2691, 2708), 'tests.helpers.runif.RunIf', 'RunIf', ([], {'min_gpus': '(1)'}), '(min_gpus=1)\n', (2696, 2708), False, 'from tests.helpers.runif import RunIf\n'), ((3575, 3654), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""modelclass"""', '[BoringModel, ParityModuleRNN, BasicGAN]'], {}), "('modelclass', [BoringModel, ParityModuleRNN, BasicGAN])\n", (3598, 3654), False, 'import pytest\n'), ((4066, 4145), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""modelclass"""', '[BoringModel, ParityModuleRNN, BasicGAN]'], {}), "('modelclass', [BoringModel, ParityModuleRNN, BasicGAN])\n", (4089, 4145), False, 'import pytest\n'), ((4544, 4623), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""modelclass"""', '[BoringModel, ParityModuleRNN, BasicGAN]'], {}), "('modelclass', [BoringModel, ParityModuleRNN, BasicGAN])\n", (4567, 4623), False, 'import pytest\n'), ((1472, 1515), 'torch.allclose', 'torch.allclose', (['script_output', 'model_output'], {}), '(script_output, model_output)\n', (1486, 1515), False, 'import torch\n'), ((2133, 2176), 'torch.allclose', 'torch.allclose', (['script_output', 'model_output'], {}), '(script_output, model_output)\n', (2147, 2176), False, 'import torch\n'), ((2311, 2324), 'tests.helpers.BoringModel', 'BoringModel', ([], {}), '()\n', (2322, 2324), False, 'from tests.helpers import BoringModel\n'), ((2346, 2364), 'torch.randn', 'torch.randn', (['(1)', '(32)'], {}), '(1, 32)\n', (2357, 2364), False, 'import torch\n'), ((2644, 2687), 'torch.allclose', 'torch.allclose', (['script_output', 'model_output'], {}), '(script_output, model_output)\n', (2658, 2687), False, 'import torch\n'), ((2960, 2978), 'torch.randn', 'torch.randn', (['(5)', '(32)'], {}), '(5, 32)\n', (2971, 2978), False, 'import torch\n'), ((3328, 3341), 'tests.helpers.BoringModel', 'BoringModel', ([], {}), '()\n', (3339, 3341), False, 'from tests.helpers import BoringModel\n'), ((4426, 4453), 'torch.jit.load', 'torch.jit.load', (['output_file'], {}), '(output_file)\n', (4440, 4453), False, 'import torch\n'), ((4922, 4998), 'fsspec.register_implementation', 'fsspec.register_implementation', (['_DUMMY_PRFEIX', 'DummyFileSystem'], {'clobber': '(True)'}), '(_DUMMY_PRFEIX, DummyFileSystem, clobber=True)\n', (4952, 4998), False, 'import fsspec\n'), ((5043, 5109), 'os.path.join', 'os.path.join', (['_DUMMY_PRFEIX', '_PREFIX_SEPARATOR', 'tmpdir', '"""model.pt"""'], {}), "(_DUMMY_PRFEIX, _PREFIX_SEPARATOR, tmpdir, 'model.pt')\n", (5055, 5109), False, 'import os\n'), ((5177, 5204), 'pytorch_lightning.utilities.cloud_io.get_filesystem', 'get_filesystem', (['output_file'], {}), '(output_file)\n', (5191, 5204), False, 'from pytorch_lightning.utilities.cloud_io import get_filesystem\n'), ((5506, 5519), 'tests.helpers.BoringModel', 'BoringModel', ([], {}), '()\n', (5517, 5519), False, 'from tests.helpers import BoringModel\n'), ((5793, 5806), 'tests.helpers.BoringModel', 'BoringModel', ([], {}), '()\n', (5804, 5806), False, 'from tests.helpers import BoringModel\n'), ((1196, 1214), 'torch.randn', 'torch.randn', (['(5)', '(32)'], {}), '(5, 32)\n', (1207, 1214), False, 'import torch\n'), ((1333, 1348), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1346, 1348), False, 'import torch\n'), ((1843, 1861), 'torch.randn', 'torch.randn', (['(5)', '(32)'], {}), '(5, 32)\n', (1854, 1861), False, 'import torch\n'), ((1994, 2009), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2007, 2009), False, 'import torch\n'), ((2527, 2542), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2540, 2542), False, 'import torch\n'), ((2745, 2764), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (2757, 2764), False, 'import torch\n'), ((2766, 2789), 'torch.device', 'torch.device', (['"""cuda"""', '(0)'], {}), "('cuda', 0)\n", (2778, 2789), False, 'import torch\n'), ((5271, 5288), 'torch.jit.load', 'torch.jit.load', (['f'], {}), '(f)\n', (5285, 5288), False, 'import torch\n'), ((5552, 5620), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""only supports \'script\' or \'trace\'"""'}), '(ValueError, match="only supports \'script\' or \'trace\'")\n', (5565, 5620), False, 'import pytest\n'), ((5854, 5957), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""requires either `example_inputs` or `model.example_input_array`"""'}), "(ValueError, match=\n 'requires either `example_inputs` or `model.example_input_array`')\n", (5867, 5957), False, 'import pytest\n'), ((2903, 2916), 'tests.helpers.BoringModel', 'BoringModel', ([], {}), '()\n', (2914, 2916), False, 'from tests.helpers import BoringModel\n')]
|
import clr
import math
clr.AddReference('RevitAPI')
clr.AddReference('RevitAPIUI')
from Autodesk.Revit.DB import *
doc = __revit__.ActiveUIDocument.Document
app = __revit__.Application
t = Transaction(doc, 'Create Line')
t.Start()
#Create a plane by normal and origin
origin = XYZ.Zero
normal = XYZ.BasisZ
plane = Plane.CreateByNormalAndOrigin(normal, origin)
#Create a sketch plane
skplane = SketchPlane.Create(doc,plane)
#Create line vertices
lnStart = XYZ(0,0,0)
lnEnd = XYZ(20,20,0)
#create NewLine()
line = Line.CreateBound(lnStart, lnEnd)
#create NewModelCurve()
crv = doc.FamilyCreate.NewModelCurve(line, skplane)
t.Commit()
__window__.Close()
|
[
"clr.AddReference"
] |
[((23, 51), 'clr.AddReference', 'clr.AddReference', (['"""RevitAPI"""'], {}), "('RevitAPI')\n", (39, 51), False, 'import clr\n'), ((53, 83), 'clr.AddReference', 'clr.AddReference', (['"""RevitAPIUI"""'], {}), "('RevitAPIUI')\n", (69, 83), False, 'import clr\n')]
|
from behave import given, then, when
import validate_signature as vs
import common as c
import json
@given('The user validates a "{file}"')
def validation_file(context, file):
encoded = c.encode_file(file)
json = c.add_bytes_json(encoded)
context.response = vs.validate_signature(json)
@given("The user prepares the post")
def ready_post(context):
encoded = c.encode_file("/Signed_ok.xml")
context.json_file = c.add_bytes_json(encoded)
@then('The indication is "{Indication}"')
def validation_Indication(context, Indication):
response_dict = json.loads(context.response.content)
assert context.response.status_code == 200
assert response_dict["indication"] == Indication
@then('The subindication is "{SubIndication}"')
def validation_subconclusion(context, SubIndication):
response_dict = json.loads(context.response.content)
if response_dict["subIndication"] is None:
return True
else:
assert response_dict["subIndication"]
@when('Add {naughtystring} to the "{value}"')
def replace_signatureid(context, value, naughtystring):
json_post = c.change_property(context.json_file, value, naughtystring)
context.response = vs.validate_signature(json_post)
|
[
"behave.when",
"behave.then",
"common.change_property",
"json.loads",
"common.add_bytes_json",
"validate_signature.validate_signature",
"common.encode_file",
"behave.given"
] |
[((103, 141), 'behave.given', 'given', (['"""The user validates a "{file}\\""""'], {}), '(\'The user validates a "{file}"\')\n', (108, 141), False, 'from behave import given, then, when\n'), ((303, 338), 'behave.given', 'given', (['"""The user prepares the post"""'], {}), "('The user prepares the post')\n", (308, 338), False, 'from behave import given, then, when\n'), ((463, 503), 'behave.then', 'then', (['"""The indication is "{Indication}\\""""'], {}), '(\'The indication is "{Indication}"\')\n', (467, 503), False, 'from behave import given, then, when\n'), ((712, 758), 'behave.then', 'then', (['"""The subindication is "{SubIndication}\\""""'], {}), '(\'The subindication is "{SubIndication}"\')\n', (716, 758), False, 'from behave import given, then, when\n'), ((996, 1040), 'behave.when', 'when', (['"""Add {naughtystring} to the "{value}\\""""'], {}), '(\'Add {naughtystring} to the "{value}"\')\n', (1000, 1040), False, 'from behave import given, then, when\n'), ((192, 211), 'common.encode_file', 'c.encode_file', (['file'], {}), '(file)\n', (205, 211), True, 'import common as c\n'), ((223, 248), 'common.add_bytes_json', 'c.add_bytes_json', (['encoded'], {}), '(encoded)\n', (239, 248), True, 'import common as c\n'), ((272, 299), 'validate_signature.validate_signature', 'vs.validate_signature', (['json'], {}), '(json)\n', (293, 299), True, 'import validate_signature as vs\n'), ((378, 409), 'common.encode_file', 'c.encode_file', (['"""/Signed_ok.xml"""'], {}), "('/Signed_ok.xml')\n", (391, 409), True, 'import common as c\n'), ((434, 459), 'common.add_bytes_json', 'c.add_bytes_json', (['encoded'], {}), '(encoded)\n', (450, 459), True, 'import common as c\n'), ((572, 608), 'json.loads', 'json.loads', (['context.response.content'], {}), '(context.response.content)\n', (582, 608), False, 'import json\n'), ((833, 869), 'json.loads', 'json.loads', (['context.response.content'], {}), '(context.response.content)\n', (843, 869), False, 'import json\n'), ((1113, 1171), 'common.change_property', 'c.change_property', (['context.json_file', 'value', 'naughtystring'], {}), '(context.json_file, value, naughtystring)\n', (1130, 1171), True, 'import common as c\n'), ((1195, 1227), 'validate_signature.validate_signature', 'vs.validate_signature', (['json_post'], {}), '(json_post)\n', (1216, 1227), True, 'import validate_signature as vs\n')]
|
import copy
from scrapy import Item
from scrapy.http import Request, Response
from scrapy.spiders import CrawlSpider
from scrapy.utils.reqser import request_to_dict
class Parser:
def _clean_headers(self, headers):
# Use the new setting, if empty, try the deprecated one
excluded = self.spider.settings.get('AUTOUNIT_DONT_RECORD_HEADERS', [])
if not excluded:
excluded = self.spider.settings.get('AUTOUNIT_EXCLUDED_HEADERS', [])
auth_headers = ['Authorization', 'Proxy-Authorization']
# Use the new setting, if empty, try the deprecated one
included = self.spider.settings.get('AUTOUNIT_RECORD_AUTH_HEADERS', [])
if not included:
included = self.spider.settings.get('AUTOUNIT_INCLUDED_AUTH_HEADERS', [])
excluded.extend([h for h in auth_headers if h not in included])
for header in excluded:
headers.pop(header, None)
headers.pop(header.encode(), None)
def _request_to_dict(self, request):
_request = request_to_dict(request, spider=self.spider)
if not _request['callback']:
_request['callback'] = 'parse'
elif isinstance(self.spider, CrawlSpider):
rule = request.meta.get('rule')
if rule is not None:
_request['callback'] = self.spider.rules[rule].callback
self._clean_headers(_request['headers'])
_meta = {}
for key, value in _request.get('meta').items():
if key != '_autounit_cassette':
_meta[key] = self.parse_object(value)
_request['meta'] = _meta
return _request
def _response_to_dict(self, response):
return {
'cls': '{}.{}'.format(
type(response).__module__,
getattr(type(response), '__qualname__', None) or
getattr(type(response), '__name__', None)
),
'url': response.url,
'status': response.status,
'body': response.body,
'headers': dict(response.headers),
'flags': response.flags,
'encoding': response.encoding,
}
def spider_attrs(self):
to_filter = {'crawler', 'settings', 'start_urls'}
if isinstance(self.spider, CrawlSpider):
to_filter |= {'rules', '_rules'}
dont_record_attrs = set(
self.spider.settings.get('AUTOUNIT_DONT_RECORD_SPIDER_ATTRS', []))
to_filter |= dont_record_attrs
return {
k: v for k, v in self.spider.__dict__.items()
if k not in to_filter
}
def parse_response(self, response_obj):
request = self._request_to_dict(response_obj.request)
response = self._response_to_dict(response_obj)
return request, response
def parse_object(self, _object):
if isinstance(_object, Request):
return self._request_to_dict(_object)
elif isinstance(_object, Response):
return self.parse_object(self._response_to_dict(_object))
elif isinstance(_object, (dict, Item)):
for k, v in _object.items():
_object[k] = self.parse_object(v)
elif isinstance(_object, list):
for i, v in enumerate(_object):
_object[i] = self.parse_object(v)
elif isinstance(_object, tuple):
_object = tuple([self.parse_object(o) for o in _object])
return _object
def parse_callback_output(self, output):
parsed = []
original = []
for elem in output:
original.append(elem)
is_request = isinstance(elem, Request)
if is_request:
data = self._request_to_dict(elem)
else:
data = self.parse_object(copy.deepcopy(elem))
parsed.append({
'type': 'request' if is_request else 'item',
'data': data
})
return iter(original), parsed
def deprecated_settings(self):
mapping = {
'AUTOUNIT_SKIPPED_FIELDS': 'AUTOUNIT_DONT_TEST_OUTPUT_FIELDS',
'AUTOUNIT_REQUEST_SKIPPED_FIELDS': 'AUTOUNIT_DONT_TEST_REQUEST_ATTRS',
'AUTOUNIT_EXCLUDED_HEADERS': 'AUTOUNIT_DONT_RECORD_HEADERS',
'AUTOUNIT_INCLUDED_AUTH_HEADERS': 'AUTOUNIT_RECORD_AUTH_HEADERS',
'AUTOUNIT_INCLUDED_SETTINGS': 'AUTOUNIT_RECORD_SETTINGS',
}
message = "DEPRECATED: '{}' is going to be removed soon. Please use '{}' instead."
warnings = []
for old, new in mapping.items():
if not self.spider.settings.get(old):
continue
warnings.append(message.format(old, new))
return warnings
|
[
"copy.deepcopy",
"scrapy.utils.reqser.request_to_dict"
] |
[((1040, 1084), 'scrapy.utils.reqser.request_to_dict', 'request_to_dict', (['request'], {'spider': 'self.spider'}), '(request, spider=self.spider)\n', (1055, 1084), False, 'from scrapy.utils.reqser import request_to_dict\n'), ((3802, 3821), 'copy.deepcopy', 'copy.deepcopy', (['elem'], {}), '(elem)\n', (3815, 3821), False, 'import copy\n')]
|
# Generated by Django 4.0 on 2021-12-15 18:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jobs', '0003_alter_job_summary'),
]
operations = [
migrations.AlterField(
model_name='job',
name='video',
field=models.FileField(null=True, upload_to='video/%y'),
),
]
|
[
"django.db.models.FileField"
] |
[((326, 375), 'django.db.models.FileField', 'models.FileField', ([], {'null': '(True)', 'upload_to': '"""video/%y"""'}), "(null=True, upload_to='video/%y')\n", (342, 375), False, 'from django.db import migrations, models\n')]
|
from django.urls import path
from . import views
from . import api
app_name = 'hcat'
urlpatterns = [
path('', views.index, name='index'),
path('project/<int:pk>/', views.ProjectDetailView.as_view(), name='project_detail'),
path('project', views.ProjectListView.as_view(), name='project_list'),
path('project/', views.ProjectListView.as_view(), name='project_list'),
]
|
[
"django.urls.path"
] |
[((107, 142), 'django.urls.path', 'path', (['""""""', 'views.index'], {'name': '"""index"""'}), "('', views.index, name='index')\n", (111, 142), False, 'from django.urls import path\n')]
|
from __future__ import print_function
import unittest, os, sys
import cStringIO
import ConfigParser
# setup system library path
pathname = os.path.realpath('../')
sys.path.insert(0, pathname)
sys.path.insert(0, '.')
try:
from osg_configure.modules import resourcecatalog
from osg_configure.modules.resourcecatalog import ResourceCatalog, RCEntry
from osg_configure.modules import subcluster
except ImportError:
resourcecatalog = None
subcluster = None
print("resourcecatalog and/or subcluster not found -- skipping resourcecatalog tests")
from osg_configure.modules import exceptions
from osg_configure.modules.utilities import get_test_config
class TestResourceCatalog(unittest.TestCase):
def assertDoesNotRaise(self, exception, function, *args, **kwargs):
try:
function(*args, **kwargs)
except exception:
self.fail('%s called with %r and %r raised %s' % (function.__name__, args, kwargs, exception.__name__))
def setUp(self):
if not resourcecatalog: return
self.rc = ResourceCatalog()
def testEmpty(self):
if not resourcecatalog: return
self.assertEqual(self.rc.compose_text().strip(), "OSG_ResourceCatalog = {}")
def testSingle(self):
if not resourcecatalog: return
self.rc.add_rcentry(RCEntry(name='sc1', cpus=1, memory=2000))
self.assertEqual(self.rc.compose_text().strip(), r"""OSG_ResourceCatalog = { \
[ \
CPUs = 1; \
Memory = 2000; \
Name = "sc1"; \
Requirements = TARGET.RequestCPUs <= CPUs && TARGET.RequestMemory <= Memory; \
Transform = [ set_MaxMemory = RequestMemory; set_xcount = RequestCPUs; ]; \
] \
}""")
def testMulti(self):
if not resourcecatalog: return
(self.rc
.add_rcentry(RCEntry(name='sc1', cpus=1, memory=2000))
.add_rcentry(RCEntry(name='sc2', cpus=2, memory=4000))
.add_rcentry(RCEntry(name='sc3', cpus=4, memory=8000, allowed_vos='osg ,,,atlas')))
self.assertEqual(self.rc.compose_text().strip(), r"""OSG_ResourceCatalog = { \
[ \
CPUs = 1; \
Memory = 2000; \
Name = "sc1"; \
Requirements = TARGET.RequestCPUs <= CPUs && TARGET.RequestMemory <= Memory; \
Transform = [ set_MaxMemory = RequestMemory; set_xcount = RequestCPUs; ]; \
], \
[ \
CPUs = 2; \
Memory = 4000; \
Name = "sc2"; \
Requirements = TARGET.RequestCPUs <= CPUs && TARGET.RequestMemory <= Memory; \
Transform = [ set_MaxMemory = RequestMemory; set_xcount = RequestCPUs; ]; \
], \
[ \
AllowedVOs = { "osg", "atlas" }; \
CPUs = 4; \
Memory = 8000; \
Name = "sc3"; \
Requirements = TARGET.RequestCPUs <= CPUs && TARGET.RequestMemory <= Memory && member(TARGET.VO, AllowedVOs); \
Transform = [ set_MaxMemory = RequestMemory; set_xcount = RequestCPUs; ]; \
] \
}""")
def testNoName(self):
if not resourcecatalog: return
rce = RCEntry(name='', cpus=1, memory=1)
self.assertRaises(ValueError, self.rc.add_rcentry, rce)
def testOutOfRange(self):
if not resourcecatalog: return
rce = RCEntry(name='sc', cpus=-1, memory=1)
self.assertRaises(ValueError, self.rc.add_rcentry, rce)
rce.cpus = 1
rce.memory = 0
self.assertRaises(ValueError, self.rc.add_rcentry, rce)
def testZeroMaxWallTime(self):
if not resourcecatalog: return
rce = RCEntry(name='sc', cpus=1, memory=1, max_wall_time=0)
self.assertDoesNotRaise(ValueError, self.rc.add_rcentry, rce)
def testExtraRequirements(self):
if not resourcecatalog: return
rce = RCEntry(name='sc', cpus=1, memory=2000, extra_requirements='TARGET.WantGPUs =?= 1')
self.rc.add_rcentry(rce)
self.assertEqual(self.rc.compose_text().strip(), r"""OSG_ResourceCatalog = { \
[ \
CPUs = 1; \
Memory = 2000; \
Name = "sc"; \
Requirements = TARGET.RequestCPUs <= CPUs && TARGET.RequestMemory <= Memory && TARGET.WantGPUs =?= 1; \
Transform = [ set_MaxMemory = RequestMemory; set_xcount = RequestCPUs; ]; \
] \
}""")
def testExtraTransforms(self):
if not resourcecatalog: return
rce = RCEntry(name='sc', cpus=1, memory=2000, extra_transforms='set_WantRHEL6 = 1')
self.rc.add_rcentry(rce)
self.assertEqual(self.rc.compose_text().strip(), r"""OSG_ResourceCatalog = { \
[ \
CPUs = 1; \
Memory = 2000; \
Name = "sc"; \
Requirements = TARGET.RequestCPUs <= CPUs && TARGET.RequestMemory <= Memory; \
Transform = [ set_MaxMemory = RequestMemory; set_WantRHEL6 = 1; set_xcount = RequestCPUs; ]; \
] \
}""")
def testFull(self):
if not resourcecatalog: return
config = ConfigParser.SafeConfigParser()
config_io = cStringIO.StringIO(r"""
[Subcluster Valid]
name = red.unl.edu
node_count = 60
ram_mb = 4000
cpu_model = Opteron 275
cpu_vendor = AMD
cpu_speed_mhz = 2200
cpu_platform = x86_64
cpus_per_node = 2
cores_per_node = 4
inbound_network = FALSE
outbound_network = TRUE
HEPSPEC = 10
allowed_vos = osg, atlas
""")
config.readfp(config_io)
self.assertEqual(subcluster.resource_catalog_from_config(config).compose_text(),
r"""OSG_ResourceCatalog = { \
[ \
AllowedVOs = { "osg", "atlas" }; \
CPUs = 4; \
MaxWallTime = 1440; \
Memory = 4000; \
Name = "red.unl.edu"; \
Requirements = TARGET.RequestCPUs <= CPUs && TARGET.RequestMemory <= Memory && member(TARGET.VO, AllowedVOs); \
Transform = [ set_MaxMemory = RequestMemory; set_xcount = RequestCPUs; ]; \
] \
}""")
def testResourceEntry(self):
if not resourcecatalog: return
# Test using the "Resource Entry" section name instead of "Subcluster"
# and also using some of the attributes ATLAS requested
config = ConfigParser.SafeConfigParser()
config_io = cStringIO.StringIO(r"""
[Resource Entry Valid]
name = red.unl.edu
maxmemory = 4000
cpucount = 4
queue = red
vo_tag = ANALYSIS
allowed_vos = osg, atlas
""")
config.readfp(config_io)
self.assertEqual(subcluster.resource_catalog_from_config(config).compose_text(),
r"""OSG_ResourceCatalog = { \
[ \
AllowedVOs = { "osg", "atlas" }; \
CPUs = 4; \
MaxWallTime = 1440; \
Memory = 4000; \
Name = "red.unl.edu"; \
Requirements = TARGET.RequestCPUs <= CPUs && TARGET.RequestMemory <= Memory && member(TARGET.VO, AllowedVOs) && TARGET.VOTag == "ANALYSIS"; \
Transform = [ set_MaxMemory = RequestMemory; set_VOTag = "ANALYSIS"; set_remote_queue = "red"; set_xcount = RequestCPUs; ]; \
VOTag = "ANALYSIS"; \
] \
}""")
def testResourceEntryWithSubclusters(self):
if not resourcecatalog: return
config = ConfigParser.SafeConfigParser()
config_file = get_test_config("subcluster/resourceentry_and_sc.ini")
config.read(config_file)
self.assertDoesNotRaise(exceptions.SettingError, subcluster.resource_catalog_from_config, config)
rc = subcluster.resource_catalog_from_config(config).compose_text()
self.assertTrue('Subclusters = { "SC1", "Sub Cluster 2" }; \\' in rc,
'\'subclusters\' attrib improperly transformed')
def testResourceEntryBad(self):
if not resourcecatalog: return
for config_filename in ["subcluster/resourceentry_missing_cpucount.ini",
"subcluster/resourceentry_missing_memory.ini",
"subcluster/resourceentry_missing_queue.ini",
"subcluster/resourceentry_missing_sc.ini"]:
config = ConfigParser.SafeConfigParser()
config_file = get_test_config(config_filename)
config.read(config_file)
try:
self.assertRaises(exceptions.SettingError, subcluster.resource_catalog_from_config, config)
except AssertionError:
sys.stderr.write("Failed to raise error on " + config_filename)
raise
def testFullWithExtraTransforms(self):
if not resourcecatalog: return
config = ConfigParser.SafeConfigParser()
config_io = cStringIO.StringIO(r"""
[Subcluster Test]
name = glow.chtc.wisc.edu
node_count = 60
ram_mb = 4000
cpu_model = Opteron 275
cpu_vendor = AMD
cpu_speed_mhz = 2200
cpu_platform = x86_64
cpus_per_node = 2
cores_per_node = 4
inbound_network = FALSE
outbound_network = TRUE
HEPSPEC = 10
queue = blue
extra_transforms = set_WantRHEL6 = 1
max_wall_time = 1440
allowed_vos = osg, atlas
""")
config.readfp(config_io)
self.assertEqual(subcluster.resource_catalog_from_config(config).compose_text(),
r"""OSG_ResourceCatalog = { \
[ \
AllowedVOs = { "osg", "atlas" }; \
CPUs = 4; \
MaxWallTime = 1440; \
Memory = 4000; \
Name = "glow.chtc.wisc.edu"; \
Requirements = TARGET.RequestCPUs <= CPUs && TARGET.RequestMemory <= Memory && member(TARGET.VO, AllowedVOs); \
Transform = [ set_MaxMemory = RequestMemory; set_WantRHEL6 = 1; set_remote_queue = "blue"; set_xcount = RequestCPUs; ]; \
] \
}""")
def testFullWithExtras(self):
# Disable this test because the feature is disabled for now
return
if not resourcecatalog: return
config = ConfigParser.SafeConfigParser()
config_io = cStringIO.StringIO(r"""
[Subcluster Test]
name = glow.chtc.wisc.edu
node_count = 60
ram_mb = 4000
cpu_model = Opteron 275
cpu_vendor = AMD
cpu_speed_mhz = 2200
cpu_platform = x86_64
cpus_per_node = 2
cores_per_node = 4
inbound_network = FALSE
outbound_network = TRUE
HEPSPEC = 10
queue = blue
extra_requirements = WantGPUs =?= 1
extra_transforms = set_WantRHEL6 = 1
max_wall_time = 1440
allowed_vos = osg, atlas
""")
config.readfp(config_io)
self.assertEqual(subcluster.resource_catalog_from_config(config).compose_text(),
r"""OSG_ResourceCatalog = { \
[ \
AllowedVOs = { "osg", "atlas" }; \
CPUs = 4; \
MaxWallTime = 1440; \
Memory = 4000; \
Name = "glow.chtc.wisc.edu"; \
Requirements = TARGET.RequestCPUs <= CPUs && TARGET.RequestMemory <= Memory && member(TARGET.VO, AllowedVOs) && WantGPUs =?= 1; \
Transform = [ set_MaxMemory = RequestMemory; set_WantRHEL6 = 1; set_remote_queue = "blue"; set_xcount = RequestCPUs; ]; \
] \
}""")
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"ConfigParser.SafeConfigParser",
"osg_configure.modules.resourcecatalog.RCEntry",
"osg_configure.modules.resourcecatalog.ResourceCatalog",
"osg_configure.modules.subcluster.resource_catalog_from_config",
"os.path.realpath",
"sys.path.insert",
"cStringIO.StringIO",
"sys.stderr.write",
"osg_configure.modules.utilities.get_test_config"
] |
[((140, 163), 'os.path.realpath', 'os.path.realpath', (['"""../"""'], {}), "('../')\n", (156, 163), False, 'import unittest, os, sys\n'), ((164, 192), 'sys.path.insert', 'sys.path.insert', (['(0)', 'pathname'], {}), '(0, pathname)\n', (179, 192), False, 'import unittest, os, sys\n'), ((193, 216), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""."""'], {}), "(0, '.')\n", (208, 216), False, 'import unittest, os, sys\n'), ((10427, 10442), 'unittest.main', 'unittest.main', ([], {}), '()\n', (10440, 10442), False, 'import unittest, os, sys\n'), ((1062, 1079), 'osg_configure.modules.resourcecatalog.ResourceCatalog', 'ResourceCatalog', ([], {}), '()\n', (1077, 1079), False, 'from osg_configure.modules.resourcecatalog import ResourceCatalog, RCEntry\n'), ((2939, 2973), 'osg_configure.modules.resourcecatalog.RCEntry', 'RCEntry', ([], {'name': '""""""', 'cpus': '(1)', 'memory': '(1)'}), "(name='', cpus=1, memory=1)\n", (2946, 2973), False, 'from osg_configure.modules.resourcecatalog import ResourceCatalog, RCEntry\n'), ((3122, 3159), 'osg_configure.modules.resourcecatalog.RCEntry', 'RCEntry', ([], {'name': '"""sc"""', 'cpus': '(-1)', 'memory': '(1)'}), "(name='sc', cpus=-1, memory=1)\n", (3129, 3159), False, 'from osg_configure.modules.resourcecatalog import ResourceCatalog, RCEntry\n'), ((3421, 3474), 'osg_configure.modules.resourcecatalog.RCEntry', 'RCEntry', ([], {'name': '"""sc"""', 'cpus': '(1)', 'memory': '(1)', 'max_wall_time': '(0)'}), "(name='sc', cpus=1, memory=1, max_wall_time=0)\n", (3428, 3474), False, 'from osg_configure.modules.resourcecatalog import ResourceCatalog, RCEntry\n'), ((3636, 3724), 'osg_configure.modules.resourcecatalog.RCEntry', 'RCEntry', ([], {'name': '"""sc"""', 'cpus': '(1)', 'memory': '(2000)', 'extra_requirements': '"""TARGET.WantGPUs =?= 1"""'}), "(name='sc', cpus=1, memory=2000, extra_requirements=\n 'TARGET.WantGPUs =?= 1')\n", (3643, 3724), False, 'from osg_configure.modules.resourcecatalog import ResourceCatalog, RCEntry\n'), ((4191, 4268), 'osg_configure.modules.resourcecatalog.RCEntry', 'RCEntry', ([], {'name': '"""sc"""', 'cpus': '(1)', 'memory': '(2000)', 'extra_transforms': '"""set_WantRHEL6 = 1"""'}), "(name='sc', cpus=1, memory=2000, extra_transforms='set_WantRHEL6 = 1')\n", (4198, 4268), False, 'from osg_configure.modules.resourcecatalog import ResourceCatalog, RCEntry\n'), ((4726, 4757), 'ConfigParser.SafeConfigParser', 'ConfigParser.SafeConfigParser', ([], {}), '()\n', (4755, 4757), False, 'import ConfigParser\n'), ((4778, 5090), 'cStringIO.StringIO', 'cStringIO.StringIO', (['"""\n[Subcluster Valid]\nname = red.unl.edu\nnode_count = 60\nram_mb = 4000\ncpu_model = Opteron 275\ncpu_vendor = AMD\ncpu_speed_mhz = 2200\ncpu_platform = x86_64\ncpus_per_node = 2\ncores_per_node = 4\ninbound_network = FALSE\noutbound_network = TRUE\nHEPSPEC = 10\nallowed_vos = osg, atlas\n"""'], {}), '(\n """\n[Subcluster Valid]\nname = red.unl.edu\nnode_count = 60\nram_mb = 4000\ncpu_model = Opteron 275\ncpu_vendor = AMD\ncpu_speed_mhz = 2200\ncpu_platform = x86_64\ncpus_per_node = 2\ncores_per_node = 4\ninbound_network = FALSE\noutbound_network = TRUE\nHEPSPEC = 10\nallowed_vos = osg, atlas\n"""\n )\n', (4796, 5090), False, 'import cStringIO\n'), ((5836, 5867), 'ConfigParser.SafeConfigParser', 'ConfigParser.SafeConfigParser', ([], {}), '()\n', (5865, 5867), False, 'import ConfigParser\n'), ((5888, 6052), 'cStringIO.StringIO', 'cStringIO.StringIO', (['"""\n[Resource Entry Valid]\nname = red.unl.edu\nmaxmemory = 4000\ncpucount = 4\nqueue = red\nvo_tag = ANALYSIS\nallowed_vos = osg, atlas\n"""'], {}), '(\n """\n[Resource Entry Valid]\nname = red.unl.edu\nmaxmemory = 4000\ncpucount = 4\nqueue = red\nvo_tag = ANALYSIS\nallowed_vos = osg, atlas\n"""\n )\n', (5906, 6052), False, 'import cStringIO\n'), ((6776, 6807), 'ConfigParser.SafeConfigParser', 'ConfigParser.SafeConfigParser', ([], {}), '()\n', (6805, 6807), False, 'import ConfigParser\n'), ((6830, 6884), 'osg_configure.modules.utilities.get_test_config', 'get_test_config', (['"""subcluster/resourceentry_and_sc.ini"""'], {}), "('subcluster/resourceentry_and_sc.ini')\n", (6845, 6884), False, 'from osg_configure.modules.utilities import get_test_config\n'), ((8152, 8183), 'ConfigParser.SafeConfigParser', 'ConfigParser.SafeConfigParser', ([], {}), '()\n', (8181, 8183), False, 'import ConfigParser\n'), ((8204, 8593), 'cStringIO.StringIO', 'cStringIO.StringIO', (['"""\n[Subcluster Test]\nname = glow.chtc.wisc.edu\nnode_count = 60\nram_mb = 4000\ncpu_model = Opteron 275\ncpu_vendor = AMD\ncpu_speed_mhz = 2200\ncpu_platform = x86_64\ncpus_per_node = 2\ncores_per_node = 4\ninbound_network = FALSE\noutbound_network = TRUE\nHEPSPEC = 10\nqueue = blue\nextra_transforms = set_WantRHEL6 = 1\nmax_wall_time = 1440\nallowed_vos = osg, atlas\n"""'], {}), '(\n """\n[Subcluster Test]\nname = glow.chtc.wisc.edu\nnode_count = 60\nram_mb = 4000\ncpu_model = Opteron 275\ncpu_vendor = AMD\ncpu_speed_mhz = 2200\ncpu_platform = x86_64\ncpus_per_node = 2\ncores_per_node = 4\ninbound_network = FALSE\noutbound_network = TRUE\nHEPSPEC = 10\nqueue = blue\nextra_transforms = set_WantRHEL6 = 1\nmax_wall_time = 1440\nallowed_vos = osg, atlas\n"""\n )\n', (8222, 8593), False, 'import cStringIO\n'), ((9333, 9364), 'ConfigParser.SafeConfigParser', 'ConfigParser.SafeConfigParser', ([], {}), '()\n', (9362, 9364), False, 'import ConfigParser\n'), ((9385, 9810), 'cStringIO.StringIO', 'cStringIO.StringIO', (['"""\n[Subcluster Test]\nname = glow.chtc.wisc.edu\nnode_count = 60\nram_mb = 4000\ncpu_model = Opteron 275\ncpu_vendor = AMD\ncpu_speed_mhz = 2200\ncpu_platform = x86_64\ncpus_per_node = 2\ncores_per_node = 4\ninbound_network = FALSE\noutbound_network = TRUE\nHEPSPEC = 10\nqueue = blue\nextra_requirements = WantGPUs =?= 1\nextra_transforms = set_WantRHEL6 = 1\nmax_wall_time = 1440\nallowed_vos = osg, atlas\n"""'], {}), '(\n """\n[Subcluster Test]\nname = glow.chtc.wisc.edu\nnode_count = 60\nram_mb = 4000\ncpu_model = Opteron 275\ncpu_vendor = AMD\ncpu_speed_mhz = 2200\ncpu_platform = x86_64\ncpus_per_node = 2\ncores_per_node = 4\ninbound_network = FALSE\noutbound_network = TRUE\nHEPSPEC = 10\nqueue = blue\nextra_requirements = WantGPUs =?= 1\nextra_transforms = set_WantRHEL6 = 1\nmax_wall_time = 1440\nallowed_vos = osg, atlas\n"""\n )\n', (9403, 9810), False, 'import cStringIO\n'), ((1324, 1364), 'osg_configure.modules.resourcecatalog.RCEntry', 'RCEntry', ([], {'name': '"""sc1"""', 'cpus': '(1)', 'memory': '(2000)'}), "(name='sc1', cpus=1, memory=2000)\n", (1331, 1364), False, 'from osg_configure.modules.resourcecatalog import ResourceCatalog, RCEntry\n'), ((1923, 1993), 'osg_configure.modules.resourcecatalog.RCEntry', 'RCEntry', ([], {'name': '"""sc3"""', 'cpus': '(4)', 'memory': '(8000)', 'allowed_vos': '"""osg ,,,atlas"""'}), "(name='sc3', cpus=4, memory=8000, allowed_vos='osg ,,,atlas')\n", (1930, 1993), False, 'from osg_configure.modules.resourcecatalog import ResourceCatalog, RCEntry\n'), ((7662, 7693), 'ConfigParser.SafeConfigParser', 'ConfigParser.SafeConfigParser', ([], {}), '()\n', (7691, 7693), False, 'import ConfigParser\n'), ((7720, 7752), 'osg_configure.modules.utilities.get_test_config', 'get_test_config', (['config_filename'], {}), '(config_filename)\n', (7735, 7752), False, 'from osg_configure.modules.utilities import get_test_config\n'), ((7037, 7084), 'osg_configure.modules.subcluster.resource_catalog_from_config', 'subcluster.resource_catalog_from_config', (['config'], {}), '(config)\n', (7076, 7084), False, 'from osg_configure.modules import subcluster\n'), ((1859, 1899), 'osg_configure.modules.resourcecatalog.RCEntry', 'RCEntry', ([], {'name': '"""sc2"""', 'cpus': '(2)', 'memory': '(4000)'}), "(name='sc2', cpus=2, memory=4000)\n", (1866, 1899), False, 'from osg_configure.modules.resourcecatalog import ResourceCatalog, RCEntry\n'), ((5140, 5187), 'osg_configure.modules.subcluster.resource_catalog_from_config', 'subcluster.resource_catalog_from_config', (['config'], {}), '(config)\n', (5179, 5187), False, 'from osg_configure.modules import subcluster\n'), ((6102, 6149), 'osg_configure.modules.subcluster.resource_catalog_from_config', 'subcluster.resource_catalog_from_config', (['config'], {}), '(config)\n', (6141, 6149), False, 'from osg_configure.modules import subcluster\n'), ((7966, 8029), 'sys.stderr.write', 'sys.stderr.write', (["('Failed to raise error on ' + config_filename)"], {}), "('Failed to raise error on ' + config_filename)\n", (7982, 8029), False, 'import unittest, os, sys\n'), ((8643, 8690), 'osg_configure.modules.subcluster.resource_catalog_from_config', 'subcluster.resource_catalog_from_config', (['config'], {}), '(config)\n', (8682, 8690), False, 'from osg_configure.modules import subcluster\n'), ((9860, 9907), 'osg_configure.modules.subcluster.resource_catalog_from_config', 'subcluster.resource_catalog_from_config', (['config'], {}), '(config)\n', (9899, 9907), False, 'from osg_configure.modules import subcluster\n'), ((1795, 1835), 'osg_configure.modules.resourcecatalog.RCEntry', 'RCEntry', ([], {'name': '"""sc1"""', 'cpus': '(1)', 'memory': '(2000)'}), "(name='sc1', cpus=1, memory=2000)\n", (1802, 1835), False, 'from osg_configure.modules.resourcecatalog import ResourceCatalog, RCEntry\n')]
|
from unittest import TestCase
from anadroid.device.Device import get_first_connected_device
class TestDevice(TestCase):
device = get_first_connected_device()
def test_unlock_screen(self):
self.__class__.device.unlock_screen()
self.assertTrue(self.__class__.device.is_screen_unlocked())
def test_is_screen_unlocked(self):
self.__class__.device.lock_screen()
self.__class__.device.unlock_screen()
self.assertTrue(self.__class__.device.is_screen_unlocked())
|
[
"anadroid.device.Device.get_first_connected_device"
] |
[((136, 164), 'anadroid.device.Device.get_first_connected_device', 'get_first_connected_device', ([], {}), '()\n', (162, 164), False, 'from anadroid.device.Device import get_first_connected_device\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("fuauth", "0001_initial"), ("messagebox", "0001_initial")]
operations = [
migrations.AlterField(
model_name="message",
name="recipient",
field=models.ForeignKey(
to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE
),
preserve_default=True,
)
]
|
[
"django.db.models.ForeignKey"
] |
[((394, 466), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': 'settings.AUTH_USER_MODEL', 'on_delete': 'models.CASCADE'}), '(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)\n', (411, 466), False, 'from django.db import migrations, models\n')]
|
from diffiqult import Tasks
from diffiqult import System_mol
from diffiqult.Basis import basis_set_3G_STO
mol = [
( 8,(0.0, 0.0, 0.091685801102911746)),
( 1,(1.4229678834888837, 0.0, -0.98120954931681137)),
( 1,(-1.4229678834888837, 0.0, -0.98120954931681137))]
basis = basis_set_3G_STO
ne = 10
system = System_mol(mol, ## Geometry
basis, ## Basis set (if shifted it should have the coordinates too)
ne, ## Number of electrons
shifted=False, ## If the basis is going to be on the atoms coordinates
angs=False, ## Units -> Bohr
mol_name='agua') ## Units -> Bohr
manager = Tasks(system,
name='h2_sto_3g', ## Prefix for all optput files
verbose=True) ## If there is going to be an output
manager.runtask('Energy',
max_scf=50,
printcoef=True,
name='Output.molden',
output=False)
manager.runtask('Opt',
max_scf=50,
printcoef=False,
maxiter=3,
argnum=[0],
output=False)
|
[
"diffiqult.Tasks",
"diffiqult.System_mol"
] |
[((308, 378), 'diffiqult.System_mol', 'System_mol', (['mol', 'basis', 'ne'], {'shifted': '(False)', 'angs': '(False)', 'mol_name': '"""agua"""'}), "(mol, basis, ne, shifted=False, angs=False, mol_name='agua')\n", (318, 378), False, 'from diffiqult import System_mol\n'), ((773, 818), 'diffiqult.Tasks', 'Tasks', (['system'], {'name': '"""h2_sto_3g"""', 'verbose': '(True)'}), "(system, name='h2_sto_3g', verbose=True)\n", (778, 818), False, 'from diffiqult import Tasks\n')]
|
#!/usr/bin/env python
from pyrf.gui.spectrum_analyzer import main
main()
|
[
"pyrf.gui.spectrum_analyzer.main"
] |
[((68, 74), 'pyrf.gui.spectrum_analyzer.main', 'main', ([], {}), '()\n', (72, 74), False, 'from pyrf.gui.spectrum_analyzer import main\n')]
|
#
# 3way_junction.py
# A Python program to simulate a 3 way traffic light
# controlled road junction using Pimoroni's PiGlow.
#
# The PiGlow is a small add on board for the Raspberry Pi that
# provides 18 individually controllable LEDs.
#
# For full details including setup, documentation and examples see:
# https://github.com/pimoroni/piglow#setting-up-your-raspberry-pi
#
# Author: <NAME>
# https://github.com/georgejopling/piglow.git
#
# The code is commented for educational use.
#
# MIT License
#
# Copyright (c) 2021 <NAME>.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
#!/usr/bin/env python
# Import time - For pauses during traffic light sequence.
import time
# Import random - For simulating traffic volume during operation.
import random
# Import piglow - PiGlow Python library to control LED board.
import piglow
# Initialise LEDs. PiGlow arms 1, 2 & 3 = junctions.
# Clear all LEDs and illuminate red LED on arms 1, 2 & 3)
# and display the changes.
piglow.clear()
piglow.single(1, 6, 255)
piglow.single(2, 6, 255)
piglow.single(3, 6, 255)
piglow.show()
# Start of loop to continuously repeat the code until stopped
# at any time by pressing Ctrl+C on the keyboard.
try:
while True:
# Junction 1 (using arm 1 LEDs).
# Sequence:
# Pause on red LED for 2.5 seconds,
# Illuminate amber LED,
# Pause on red and amber LED for 2.5 seconds,
# Extinguish red and amber LED,
# Illuminate green LED.
# Note - PiGlow orange LED is described as amber throughout.
time.sleep(2.5)
piglow.single(1, 7, 255)
piglow.show()
time.sleep(2.5)
piglow.single(1, 6, 0)
piglow.single(1, 7, 0)
piglow.single(1, 9, 255)
piglow.show()
# Generate a random delay time to simulate traffic volume
# at the junction (10 to 20 seconds).
random_delay = random.randint(10, 20)
time.sleep(random_delay)
# Sequence:
# Extinguish green LED,
# Illuminate amber LED,
# Pause on amber LED for 3 seconds,
# Extinguish amber LED,
# Illuminate red LED.
piglow.single(1, 9, 0)
piglow.single(1, 7, 255)
piglow.show()
time.sleep(3.0)
piglow.single(1, 7, 0)
piglow.single(1, 6, 255)
piglow.show()
# Junction 2 (using arm 2 LEDs).
# **Same sequence as shown for arm 1**.
time.sleep(2.5)
piglow.single(2, 7, 255)
piglow.show()
time.sleep(2.5)
piglow.single(2, 6, 0)
piglow.single(2, 7, 0)
piglow.single(2, 9, 255)
piglow.show()
# Generate a random delay time to simulate traffic volume
# at the junction (10 to 20 seconds).
random_delay = random.randint(10, 20)
time.sleep(random_delay)
# **Same sequence as shown for arm 1**
piglow.single(2, 9, 0)
piglow.single(2, 7, 255)
piglow.show()
time.sleep(3.0)
piglow.single(2, 7, 0)
piglow.single(2, 6, 255)
piglow.show()
# Junction 3 (using arm 3 LEDs).
# **Same sequence as shown for arm 1**
time.sleep(2.5)
piglow.single(3, 7, 255)
piglow.show()
time.sleep(2.5)
piglow.single(3, 6, 0)
piglow.single(3, 7, 0)
piglow.single(3, 9, 255)
piglow.show()
# Generate a random delay time to simulate traffic volume
# at the junction (10 to 20 seconds).
random_delay = random.randint(10, 20)
time.sleep(random_delay)
# **Same sequence as shown for arm 1**.
piglow.single(3, 9, 0)
piglow.single(3, 7, 255)
piglow.show()
time.sleep(3.0)
piglow.single(3, 7, 0)
piglow.single(3, 6, 255)
piglow.show()
# Catch Ctr+C press on keyboard
except KeyboardInterrupt:
# Extinguish all LEDs on PiGlow and exit.
piglow.clear()
piglow.show()
|
[
"piglow.clear",
"random.randint",
"time.sleep",
"piglow.single",
"piglow.show"
] |
[((1982, 1996), 'piglow.clear', 'piglow.clear', ([], {}), '()\n', (1994, 1996), False, 'import piglow\n'), ((1997, 2021), 'piglow.single', 'piglow.single', (['(1)', '(6)', '(255)'], {}), '(1, 6, 255)\n', (2010, 2021), False, 'import piglow\n'), ((2022, 2046), 'piglow.single', 'piglow.single', (['(2)', '(6)', '(255)'], {}), '(2, 6, 255)\n', (2035, 2046), False, 'import piglow\n'), ((2047, 2071), 'piglow.single', 'piglow.single', (['(3)', '(6)', '(255)'], {}), '(3, 6, 255)\n', (2060, 2071), False, 'import piglow\n'), ((2072, 2085), 'piglow.show', 'piglow.show', ([], {}), '()\n', (2083, 2085), False, 'import piglow\n'), ((2501, 2516), 'time.sleep', 'time.sleep', (['(2.5)'], {}), '(2.5)\n', (2511, 2516), False, 'import time\n'), ((2525, 2549), 'piglow.single', 'piglow.single', (['(1)', '(7)', '(255)'], {}), '(1, 7, 255)\n', (2538, 2549), False, 'import piglow\n'), ((2558, 2571), 'piglow.show', 'piglow.show', ([], {}), '()\n', (2569, 2571), False, 'import piglow\n'), ((2580, 2595), 'time.sleep', 'time.sleep', (['(2.5)'], {}), '(2.5)\n', (2590, 2595), False, 'import time\n'), ((2604, 2626), 'piglow.single', 'piglow.single', (['(1)', '(6)', '(0)'], {}), '(1, 6, 0)\n', (2617, 2626), False, 'import piglow\n'), ((2635, 2657), 'piglow.single', 'piglow.single', (['(1)', '(7)', '(0)'], {}), '(1, 7, 0)\n', (2648, 2657), False, 'import piglow\n'), ((2666, 2690), 'piglow.single', 'piglow.single', (['(1)', '(9)', '(255)'], {}), '(1, 9, 255)\n', (2679, 2690), False, 'import piglow\n'), ((2699, 2712), 'piglow.show', 'piglow.show', ([], {}), '()\n', (2710, 2712), False, 'import piglow\n'), ((2832, 2854), 'random.randint', 'random.randint', (['(10)', '(20)'], {}), '(10, 20)\n', (2846, 2854), False, 'import random\n'), ((2863, 2887), 'time.sleep', 'time.sleep', (['random_delay'], {}), '(random_delay)\n', (2873, 2887), False, 'import time\n'), ((3040, 3062), 'piglow.single', 'piglow.single', (['(1)', '(9)', '(0)'], {}), '(1, 9, 0)\n', (3053, 3062), False, 'import piglow\n'), ((3071, 3095), 'piglow.single', 'piglow.single', (['(1)', '(7)', '(255)'], {}), '(1, 7, 255)\n', (3084, 3095), False, 'import piglow\n'), ((3104, 3117), 'piglow.show', 'piglow.show', ([], {}), '()\n', (3115, 3117), False, 'import piglow\n'), ((3126, 3141), 'time.sleep', 'time.sleep', (['(3.0)'], {}), '(3.0)\n', (3136, 3141), False, 'import time\n'), ((3150, 3172), 'piglow.single', 'piglow.single', (['(1)', '(7)', '(0)'], {}), '(1, 7, 0)\n', (3163, 3172), False, 'import piglow\n'), ((3181, 3205), 'piglow.single', 'piglow.single', (['(1)', '(6)', '(255)'], {}), '(1, 6, 255)\n', (3194, 3205), False, 'import piglow\n'), ((3214, 3227), 'piglow.show', 'piglow.show', ([], {}), '()\n', (3225, 3227), False, 'import piglow\n'), ((3318, 3333), 'time.sleep', 'time.sleep', (['(2.5)'], {}), '(2.5)\n', (3328, 3333), False, 'import time\n'), ((3342, 3366), 'piglow.single', 'piglow.single', (['(2)', '(7)', '(255)'], {}), '(2, 7, 255)\n', (3355, 3366), False, 'import piglow\n'), ((3375, 3388), 'piglow.show', 'piglow.show', ([], {}), '()\n', (3386, 3388), False, 'import piglow\n'), ((3397, 3412), 'time.sleep', 'time.sleep', (['(2.5)'], {}), '(2.5)\n', (3407, 3412), False, 'import time\n'), ((3421, 3443), 'piglow.single', 'piglow.single', (['(2)', '(6)', '(0)'], {}), '(2, 6, 0)\n', (3434, 3443), False, 'import piglow\n'), ((3452, 3474), 'piglow.single', 'piglow.single', (['(2)', '(7)', '(0)'], {}), '(2, 7, 0)\n', (3465, 3474), False, 'import piglow\n'), ((3483, 3507), 'piglow.single', 'piglow.single', (['(2)', '(9)', '(255)'], {}), '(2, 9, 255)\n', (3496, 3507), False, 'import piglow\n'), ((3516, 3529), 'piglow.show', 'piglow.show', ([], {}), '()\n', (3527, 3529), False, 'import piglow\n'), ((3649, 3671), 'random.randint', 'random.randint', (['(10)', '(20)'], {}), '(10, 20)\n', (3663, 3671), False, 'import random\n'), ((3680, 3704), 'time.sleep', 'time.sleep', (['random_delay'], {}), '(random_delay)\n', (3690, 3704), False, 'import time\n'), ((3754, 3776), 'piglow.single', 'piglow.single', (['(2)', '(9)', '(0)'], {}), '(2, 9, 0)\n', (3767, 3776), False, 'import piglow\n'), ((3785, 3809), 'piglow.single', 'piglow.single', (['(2)', '(7)', '(255)'], {}), '(2, 7, 255)\n', (3798, 3809), False, 'import piglow\n'), ((3818, 3831), 'piglow.show', 'piglow.show', ([], {}), '()\n', (3829, 3831), False, 'import piglow\n'), ((3840, 3855), 'time.sleep', 'time.sleep', (['(3.0)'], {}), '(3.0)\n', (3850, 3855), False, 'import time\n'), ((3864, 3886), 'piglow.single', 'piglow.single', (['(2)', '(7)', '(0)'], {}), '(2, 7, 0)\n', (3877, 3886), False, 'import piglow\n'), ((3895, 3919), 'piglow.single', 'piglow.single', (['(2)', '(6)', '(255)'], {}), '(2, 6, 255)\n', (3908, 3919), False, 'import piglow\n'), ((3928, 3941), 'piglow.show', 'piglow.show', ([], {}), '()\n', (3939, 3941), False, 'import piglow\n'), ((4024, 4039), 'time.sleep', 'time.sleep', (['(2.5)'], {}), '(2.5)\n', (4034, 4039), False, 'import time\n'), ((4048, 4072), 'piglow.single', 'piglow.single', (['(3)', '(7)', '(255)'], {}), '(3, 7, 255)\n', (4061, 4072), False, 'import piglow\n'), ((4081, 4094), 'piglow.show', 'piglow.show', ([], {}), '()\n', (4092, 4094), False, 'import piglow\n'), ((4103, 4118), 'time.sleep', 'time.sleep', (['(2.5)'], {}), '(2.5)\n', (4113, 4118), False, 'import time\n'), ((4127, 4149), 'piglow.single', 'piglow.single', (['(3)', '(6)', '(0)'], {}), '(3, 6, 0)\n', (4140, 4149), False, 'import piglow\n'), ((4158, 4180), 'piglow.single', 'piglow.single', (['(3)', '(7)', '(0)'], {}), '(3, 7, 0)\n', (4171, 4180), False, 'import piglow\n'), ((4189, 4213), 'piglow.single', 'piglow.single', (['(3)', '(9)', '(255)'], {}), '(3, 9, 255)\n', (4202, 4213), False, 'import piglow\n'), ((4222, 4235), 'piglow.show', 'piglow.show', ([], {}), '()\n', (4233, 4235), False, 'import piglow\n'), ((4355, 4377), 'random.randint', 'random.randint', (['(10)', '(20)'], {}), '(10, 20)\n', (4369, 4377), False, 'import random\n'), ((4386, 4410), 'time.sleep', 'time.sleep', (['random_delay'], {}), '(random_delay)\n', (4396, 4410), False, 'import time\n'), ((4465, 4487), 'piglow.single', 'piglow.single', (['(3)', '(9)', '(0)'], {}), '(3, 9, 0)\n', (4478, 4487), False, 'import piglow\n'), ((4496, 4520), 'piglow.single', 'piglow.single', (['(3)', '(7)', '(255)'], {}), '(3, 7, 255)\n', (4509, 4520), False, 'import piglow\n'), ((4529, 4542), 'piglow.show', 'piglow.show', ([], {}), '()\n', (4540, 4542), False, 'import piglow\n'), ((4551, 4566), 'time.sleep', 'time.sleep', (['(3.0)'], {}), '(3.0)\n', (4561, 4566), False, 'import time\n'), ((4575, 4597), 'piglow.single', 'piglow.single', (['(3)', '(7)', '(0)'], {}), '(3, 7, 0)\n', (4588, 4597), False, 'import piglow\n'), ((4606, 4630), 'piglow.single', 'piglow.single', (['(3)', '(6)', '(255)'], {}), '(3, 6, 255)\n', (4619, 4630), False, 'import piglow\n'), ((4639, 4652), 'piglow.show', 'piglow.show', ([], {}), '()\n', (4650, 4652), False, 'import piglow\n'), ((4758, 4772), 'piglow.clear', 'piglow.clear', ([], {}), '()\n', (4770, 4772), False, 'import piglow\n'), ((4777, 4790), 'piglow.show', 'piglow.show', ([], {}), '()\n', (4788, 4790), False, 'import piglow\n')]
|
"""
Test Stories module.
This module contains tests for Stories objects.
"""
import pytest
from esak import exceptions
def test_known_story(talker):
sm = talker.story(35505)
assert sm.title == "Spider-Man!"
assert sm.type == "story"
assert sm.thumbnail is None
assert sm.creators[0].name == "<NAME>"
assert sm.creators[0].resource_uri == "http://gateway.marvel.com/v1/public/creators/32"
assert sm.creators[0].role == "inker"
assert sm.creators[1].name == "<NAME>"
assert sm.creators[1].role == "colorist"
assert sm.creators[1].resource_uri == "http://gateway.marvel.com/v1/public/creators/962"
assert len(sm.events) == 0
assert sm.series[0].name == "Amazing Fantasy (1962)"
assert sm.series[0].resource_uri == "http://gateway.marvel.com/v1/public/series/2987"
assert sm.series[1].name == "AMAZING FANTASY OMNIBUS HC (2007)"
assert sm.series[1].resource_uri == "http://gateway.marvel.com/v1/public/series/2707"
assert sm.original_issue.id == 16926
assert sm.original_issue.name == "Amazing Fantasy (1962) #15"
assert sm.original_issue.resource_uri == "http://gateway.marvel.com/v1/public/comics/16926"
assert sm.characters[0].id == 1009610
assert sm.characters[0].name == "Spider-Man (<NAME>)"
assert (
sm.characters[0].resource_uri
== "http://gateway.marvel.com/v1/public/characters/1009610"
)
assert len(sm.comics) == 2
assert sm.comics[0].id == 16926
assert sm.comics[0].name == "Amazing Fantasy (1962) #15"
assert sm.comics[0].resource_uri == "http://gateway.marvel.com/v1/public/comics/16926"
assert len(sm.characters) == 1
assert sm.characters[0].id == 1009610
assert sm.characters[0].name == "Spider-Man (<NAME>)"
assert sm.characters[0].role is None
assert (
sm.characters[0].resource_uri
== "http://gateway.marvel.com/v1/public/characters/1009610"
)
def test_bad_story(talker):
with pytest.raises(exceptions.ApiError):
talker.story(-1)
def test_stories_list(talker):
stories_lst = talker.stories_list(
{
"orderBy": "modified",
}
)
stories_iter = iter(stories_lst)
assert (next(stories_iter).id) == 32039
assert (next(stories_iter).id) == 41777
assert (next(stories_iter).id) == 8186
assert len(stories_lst) == 20
assert stories_lst[2].id == 8186
def test_story_characters(talker):
sm = talker.story_characters(35505)
assert len(sm) == 1
peter = sm[0]
assert peter.id == 1009610
assert peter.name == "Spider-Man (<NAME>)"
assert len(peter.comics) == 20
assert len(peter.events) == 20
assert len(peter.series) == 20
assert len(peter.stories) == 20
def test_story_comics(talker):
sm = talker.story_comics(35505)
assert len(sm) == 2
af = sm[1]
assert af.id == 16926
assert af.format == "Comic"
assert af.issue_number == 15
assert af.title == "Amazing Fantasy (1962) #15"
def test_story_creators(talker):
sm = talker.story_creators(35505)
assert len(sm) == 4
ditko = sm[0]
assert ditko.id == 32
assert ditko.full_name == "<NAME>"
assert len(ditko.comics) == 20
assert len(ditko.events) == 1
assert len(ditko.series) == 20
assert len(ditko.stories) == 20
def test_story_events(talker):
sm = talker.story_events(113981)
assert len(sm) == 1
sw = sm[0]
assert sw.id == 323
assert sw.title == "Secret Wars (2015)"
assert sw.next.id == 332
assert sw.previous.id == 321
def test_story_series(talker):
sm = talker.story_series(35505)
assert len(sm) == 2
af = sm[0]
assert af.id == 2987
assert af.start_year == 1962
assert af.end_year == 1962
assert af.title == "Amazing Fantasy (1962)"
|
[
"pytest.raises"
] |
[((1969, 2003), 'pytest.raises', 'pytest.raises', (['exceptions.ApiError'], {}), '(exceptions.ApiError)\n', (1982, 2003), False, 'import pytest\n')]
|
## Note: the package always contains both libraries (shared vs static) independent of shared setting
## that is done to avoid patching CMakefile
from conans import ConanFile, CMake, tools
from conans.errors import ConanInvalidConfiguration
import os
import glob
import shutil
class ApacheMilagro(ConanFile):
name = "milagro"
description = "Milagro is core security infrastructure and crypto libraries for decentralized networks and distributed systems."
topics = ("conan", "milagro", "cryptography")
homepage = "https://github.com/apache/incubator-milagro-crypto-c"
url = "https://github.com/nemtech/symbol-server-dependencies.git"
license = ("Apache-2.0")
exports_sources = ["CMakeLists.txt"]
generators = "cmake" #, "cmake_find_package"
settings = "os", "compiler", "build_type", "arch"
options = {
"shared": [True, False],
"fPIC": [True, False]
}
default_options = {
"shared": True,
"fPIC": True
}
_source_subfolder = "source_subfolder"
_build_subfolder = "build_subfolder"
def config_options(self):
if self.settings.os == "Windows":
if self.settings.compiler == "Visual Studio" and tools.Version(self.settings.compiler.version.value) < 15:
raise ConanInvalidConfiguration("{} {}, 'Symbol' packages do not support Visual Studio < 15".format(self.name, self.version))
del self.options.fPIC
minimal_cpp_standard = "11"
if self.settings.compiler.cppstd:
tools.check_min_cppstd(self, minimal_cpp_standard)
def configure(self):
if self.settings.arch not in ["x86_64"]:
raise ConanInvalidConfiguration("'Symbol' packages support only x64 arch")
def _configure_cmake(self):
cmake = CMake(self)
# hack, force Release, due to check inside milagro cmakelists file
if self.settings.os == "Windows" and self.settings.compiler == "Visual Studio":
cmake.definitions["CMAKE_BUILD_TYPE"] = 'Release'
cmake.definitions["BUILD_TESTING"] = False
cmake.definitions["BUILD_PYTHON"] = False
cmake.definitions["BUILD_EXAMPLES"] = False
cmake.definitions["BUILD_BENCHMARKS"] = False
cmake.definitions["BUILD_DOCS"] = False
cmake.configure(build_folder=self._build_subfolder)
return cmake
def source(self):
tools.get(**self.conan_data["sources"][self.version])
extracted_dir = "incubator-milagro-crypto-c-{version}".format(version = self.version)
os.rename(extracted_dir, self._source_subfolder)
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
self.copy("LICENSE", dst="licenses", src=self._source_subfolder)
cmake = self._configure_cmake()
cmake.install()
def package_info(self):
self.cpp_info.names["cmake_find_package"] = "AMCL"
self.cpp_info.names["cmake_find_package_multi"] = "AMCL"
self.cpp_info.libs = tools.collect_libs(self)
|
[
"conans.tools.get",
"os.rename",
"conans.tools.Version",
"conans.CMake",
"conans.tools.check_min_cppstd",
"conans.errors.ConanInvalidConfiguration",
"conans.tools.collect_libs"
] |
[((1800, 1811), 'conans.CMake', 'CMake', (['self'], {}), '(self)\n', (1805, 1811), False, 'from conans import ConanFile, CMake, tools\n'), ((2407, 2460), 'conans.tools.get', 'tools.get', ([], {}), "(**self.conan_data['sources'][self.version])\n", (2416, 2460), False, 'from conans import ConanFile, CMake, tools\n'), ((2563, 2611), 'os.rename', 'os.rename', (['extracted_dir', 'self._source_subfolder'], {}), '(extracted_dir, self._source_subfolder)\n', (2572, 2611), False, 'import os\n'), ((3039, 3063), 'conans.tools.collect_libs', 'tools.collect_libs', (['self'], {}), '(self)\n', (3057, 3063), False, 'from conans import ConanFile, CMake, tools\n'), ((1534, 1584), 'conans.tools.check_min_cppstd', 'tools.check_min_cppstd', (['self', 'minimal_cpp_standard'], {}), '(self, minimal_cpp_standard)\n', (1556, 1584), False, 'from conans import ConanFile, CMake, tools\n'), ((1678, 1746), 'conans.errors.ConanInvalidConfiguration', 'ConanInvalidConfiguration', (['"""\'Symbol\' packages support only x64 arch"""'], {}), '("\'Symbol\' packages support only x64 arch")\n', (1703, 1746), False, 'from conans.errors import ConanInvalidConfiguration\n'), ((1208, 1259), 'conans.tools.Version', 'tools.Version', (['self.settings.compiler.version.value'], {}), '(self.settings.compiler.version.value)\n', (1221, 1259), False, 'from conans import ConanFile, CMake, tools\n')]
|
# Generated by Django 2.2.5 on 2019-11-10 18:29
from django.db import migrations, models
def fix_cassette_coords(apps, schema_editor):
"""
Update Cassette location coordinates to be more centered since labels
are now shown entirely within each sample location
"""
ContainerType = apps.get_model('lims', 'ContainerType')
db_alias = schema_editor.connection.alias
if ContainerType.objects.using(db_alias).filter(name='Cassette').exists():
cassette = ContainerType.objects.using(db_alias).get(name='Cassette')
xcoords = cassette.coords.values_list('x', flat=True)
ycoords = cassette.coords.values_list('y', flat=True)
xoffset = 0.5*(1-max(xcoords) - min(xcoords))
yoffset = 0.5*(1-max(ycoords) - min(ycoords))
cassette.coords.update(x=(models.F('x') + xoffset), y=(models.F('y') + yoffset))
class Migration(migrations.Migration):
dependencies = [
('lims', '0026_auto_20191109_1921'),
]
operations = [
migrations.RunPython(fix_cassette_coords),
]
|
[
"django.db.migrations.RunPython",
"django.db.models.F"
] |
[((1009, 1050), 'django.db.migrations.RunPython', 'migrations.RunPython', (['fix_cassette_coords'], {}), '(fix_cassette_coords)\n', (1029, 1050), False, 'from django.db import migrations, models\n'), ((812, 825), 'django.db.models.F', 'models.F', (['"""x"""'], {}), "('x')\n", (820, 825), False, 'from django.db import migrations, models\n'), ((841, 854), 'django.db.models.F', 'models.F', (['"""y"""'], {}), "('y')\n", (849, 854), False, 'from django.db import migrations, models\n')]
|
import json
from gocd.api.endpoint import Endpoint
__all__ = ['PipelineConfig']
class PipelineConfig(Endpoint):
base_path = 'go/api/admin/pipelines'
id = 'name'
#: The result of a job/stage has been finalised when these values are set
final_results = ['Passed', 'Failed']
def __init__(self, server, name, api_version=10):
"""A wrapper for the `Go pipeline config API`__
.. __: https://api.go.cd/current/#pipeline-config
Args:
server (Server): A configured instance of
:class:gocd.server.Server
name (str): The name of the pipeline we're working on
"""
self.server = server
self.name = name
self.api_version = api_version
def get(self):
"""Gets pipeline config for specified pipeline name.
See `The pipeline config object`__ for example responses.
.. __: https://api.go.cd/current/#the-pipeline-config-object
Returns:
Response: :class:`gocd.api.response.Response` object
"""
return self._get(self.name, headers={"Accept": self._accept_header_value})
def edit(self, config, etag):
"""Update pipeline config for specified pipeline name.
.. __: https://api.go.cd/current/#edit-pipeline-config
Returns:
Response: :class:`gocd.api.response.Response` object
"""
data = self._json_encode(config)
headers = self._default_headers()
if etag is not None:
headers["If-Match"] = etag
return self._request(self.name,
ok_status=None,
data=data,
headers=headers,
method="PUT")
def create(self, config, group=None):
"""Creates a new pipeline with the given config in the given group.
If the `group` parameter is None, it's expected there will be a field `group` in config
.. __: https://api.go.cd/current/#edit-pipeline-config
Returns:
Response: :class:`gocd.api.response.Response` object
"""
assert config["name"] == self.name, "Given config is not for this pipeline"
if group is None:
assert "group" in config, "Given config has no group"
group = config["group"]
data = self._json_encode({
"group": group,
"pipeline": config
})
headers = self._default_headers()
return self._request("",
ok_status=None,
data=data,
headers=headers)
def _default_headers(self):
return {"Accept": self._accept_header_value,
"Content-Type": "application/json"}
@property
def _accept_header_value(self):
return "application/vnd.go.cd.v{0}+json".format(self.api_version)
@staticmethod
def _json_encode(config):
return json.dumps(config)
|
[
"json.dumps"
] |
[((2977, 2995), 'json.dumps', 'json.dumps', (['config'], {}), '(config)\n', (2987, 2995), False, 'import json\n')]
|
import random
import math
def media(X):
return sum(X) / len(X)
def varianza(X):
mu = media(X)
acumulador = 0
for x in X:
acumulador += (x - mu)**2
return acumulador / len(X)
def desviacion_estandar(X):
return math.sqrt(varianza(X))
if __name__ == '__main__':
X = [random.randint(1, 21) for i in range(20)]
X.sort() #Ordenando array
mu = media(X)
Var = varianza(X)
sigma = desviacion_estandar(X)
print(f'Arreglo X: {X}')
print(f'Media: {mu}')
print(f'Varianza: {Var}')
print(f'Desviación estandar: {round(sigma, 2)}')
|
[
"random.randint"
] |
[((303, 324), 'random.randint', 'random.randint', (['(1)', '(21)'], {}), '(1, 21)\n', (317, 324), False, 'import random\n')]
|
import json
import requests
from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['translate']
helptext = "Translates the provided text. The first argument should be a two-letter country code ('it' for Italy, etc.) if you want to translate from English, " \
"or the source language and the target language separated by a '|' (So 'fi|en' to translate from Finnish to English)"
def execute(self, message):
"""
:type message: IrcMessage
"""
#API reference: http://mymemory.translated.net/doc/spec.php
if message.messagePartsLength == 0:
replytext = "This module " + self.helptext[0].lower() + self.helptext[1:]
elif message.messagePartsLength == 1:
replytext = "That's not enough parameters, I'm gonna need both a language identifier and some text"
elif '|' not in message.messageParts[0] and len(message.messageParts[0]) != 2:
replytext = "If you only provide a single language code, it should be a two-letter language identifier, I'm not sure how to interpret '{}'".format(message.messageParts[0])
elif '|' in message.messageParts[0] and len(message.messageParts[0]) != 5:
replytext = "If you provide two language codes with a separator, both codes can only be two letters long, I'm not sure which languages '{}' refers to".format(message.messageParts[0])
else:
#Let's just assume everything is right, we've done enough checks. Send in the data!
lang = message.messageParts[0]
if '|' not in lang:
lang = 'en|' + lang
params = {'q': ' '.join(message.messageParts[1:]), 'langpair': lang, 'of': 'json'}
try:
result = json.loads(requests.get('http://api.mymemory.translated.net/get', params=params, timeout=15.0).text)
except requests.exceptions.Timeout:
message.reply("Apparently that's such a difficult {} the translation API had some trouble with it and/or has given up. "
"Either way the API took too long to respond, sorry".format('sentence' if ' ' in params['q'] else 'word'))
return
if result['responseStatus'] != 200:
#Something went wrong, the error is in 'responseDetails' (though sometimes that field is not there)
# It's in all-caps though, so reduce the shouting a bit
error = result.get('responseDetails')
if not error:
error = "Unknown Error"
error = error.lower()
#An invalid language code gives an error message that's too long and a bit confusing. Correct that
if 'is an invalid target language' in error:
error = error[:error.index(' . example')] + '. Look for the right ISO 639-1 code here: https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes'
replytext = "Something went wrong with your query: " + error
else:
#The main translation returned doesn't take quality into account. Look through the matches ourselves
translationMatchIndex = -1.0
translation = ""
for match in result['matches']:
#Stored quality can either be an integer or a string
currentQuality = match['quality']
if not isinstance(currentQuality, int):
try:
currentQuality = int(currentQuality)
except (ValueError, TypeError):
continue
#Ignore bad translations
if currentQuality <= 50:
continue
if match['match'] > translationMatchIndex:
translationMatchIndex = match['match']
translation = match['translation'].encode('utf-8')
if len(translation) == 0:
replytext = "Translation is empty, sorry. Are you sure you entered something? If so, sorry!"
else:
replytext = "Translation: " + translation
message.bot.sendMessage(message.source, replytext)
|
[
"requests.get"
] |
[((1661, 1748), 'requests.get', 'requests.get', (['"""http://api.mymemory.translated.net/get"""'], {'params': 'params', 'timeout': '(15.0)'}), "('http://api.mymemory.translated.net/get', params=params,\n timeout=15.0)\n", (1673, 1748), False, 'import requests\n')]
|
import setuptools
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open("README.md", "r") as fh:
long_description = fh.read()
requires = ["loguru", "elasticsearch == 7.11.0"]
setup(name="elasticSearch_collections",
description="Collections of ElasticSearch pyscripts for human",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
version="1.1",
author="<NAME>",
author_email="<EMAIL>",
maintainer="<NAME>",
maintainer_email="<EMAIL>",
url="https://github.com/AlexNg9527/ElasticSearchCollections",
packages=setuptools.find_packages(),
install_requires=requires,
classifiers=[
'Programming Language :: Python :: 3',
])
|
[
"setuptools.find_packages"
] |
[((677, 703), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (701, 703), False, 'import setuptools\n')]
|
#!/usr/bin/env python2
# PYTHON_ARGCOMPLETE_OK
"""
pytest: unit and functional testing with Python.
"""
__all__ = [
'main',
'UsageError',
'cmdline',
'hookspec',
'hookimpl',
'__version__',
]
if __name__ == '__main__': # if run as a script or by 'python -m pytest'
# we trigger the below "else" condition by the following import
import pytest
import sys
if sys.platform == 'win32':
#Try to avoid opeing a dialog box if one of the tests causes a system error
import ctypes
winapi = ctypes.windll.kernel32
SetErrorMode = winapi.SetErrorMode
SetErrorMode.argtypes=[ctypes.c_int]
SEM_FAILCRITICALERRORS = 1
SEM_NOGPFAULTERRORBOX = 2
SEM_NOOPENFILEERRORBOX = 0x8000
flags = SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX | SEM_NOOPENFILEERRORBOX
#Since there is no GetErrorMode, do a double Set
old_mode = SetErrorMode(flags)
SetErrorMode(old_mode | flags)
raise SystemExit(pytest.main())
# else we are imported
from _pytest.config import (
main, UsageError, _preloadplugins, cmdline,
hookspec, hookimpl
)
from _pytest import __version__
_preloadplugins() # to populate pytest.* namespace so help(pytest) works
|
[
"pytest.main",
"_pytest.config._preloadplugins"
] |
[((1185, 1202), '_pytest.config._preloadplugins', '_preloadplugins', ([], {}), '()\n', (1200, 1202), False, 'from _pytest.config import main, UsageError, _preloadplugins, cmdline, hookspec, hookimpl\n'), ((1010, 1023), 'pytest.main', 'pytest.main', ([], {}), '()\n', (1021, 1023), False, 'import pytest\n')]
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import os
project_name = "reco-tut-asr"; branch = "main"; account = "sparsh-ai"
project_path = os.path.join('/content', project_name)
if not os.path.exists(project_path):
get_ipython().system(u'cp /content/drive/MyDrive/mykeys.py /content')
import mykeys
get_ipython().system(u'rm /content/mykeys.py')
path = "/content/" + project_name;
get_ipython().system(u'mkdir "{path}"')
get_ipython().magic(u'cd "{path}"')
import sys; sys.path.append(path)
get_ipython().system(u'git config --global user.email "<EMAIL>"')
get_ipython().system(u'git config --global user.name "reco-tut"')
get_ipython().system(u'git init')
get_ipython().system(u'git remote add origin https://"{mykeys.git_token}":x-oauth-basic@github.com/"{account}"/"{project_name}".git')
get_ipython().system(u'git pull origin "{branch}"')
get_ipython().system(u'git checkout main')
else:
get_ipython().magic(u'cd "{project_path}"')
# In[8]:
import random
import pandas as pd
import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.tree import DecisionTreeRegressor
import matplotlib.pyplot as plt
# In[25]:
items = pd.read_csv('./data/silver/items.csv')
items.head()
# In[26]:
actual_ratings = pd.read_csv('./data/silver/ratings.csv')
actual_ratings.head()
# In[27]:
cbf = pd.read_csv('./data/gold/cbf.csv')
item_item = pd.read_csv('./data/gold/item-item.csv')
user_user = pd.read_csv('./data/gold/user-user.csv')
pers_bias = pd.read_csv('./data/gold/pers-bias.csv')
mf = pd.read_csv('./data/gold/mf.csv')
# In[28]:
# preprocess
cbf = cbf.apply(lambda col: col.apply(lambda elem: str(elem).replace(',', '.'))).astype(float)
user_user = user_user.apply(lambda col: col.apply(lambda elem: str(elem).replace(',', '.'))).astype(float)
item_item = item_item.apply(lambda col: col.apply(lambda elem: str(elem).replace(',', '.'))).astype(float)
mf = mf.apply(lambda col: col.apply(lambda elem: str(elem).replace(',', '.'))).astype(float)
pers_bias = pers_bias.apply(lambda col: col.apply(lambda elem: str(elem).replace(',', '.'))).astype(float)
# In[29]:
recs = [cbf, item_item, user_user, pers_bias, mf]
recs_names = ['cbf', 'item_item', 'user_user', 'pers_bias', 'mf']
# ## Metrics
# In[30]:
def get_ratings(user_id):
user_ratings = ratings[user_id]
actual_ratings = user_ratings[~np.isnan(user_ratings)]
return actual_ratings
def get_top_n(user_id, n):
top_n = {}
for rec, rec_name in zip(recs, recs_names):
top_n_items = rec[user_id].argsort().sort_values()[:n].index.values
top_n[rec_name] = top_n_items
return top_n
def get_popular_items(n):
pop_percentages = ratings.copy()
pop_percentages['popularity'] = ratings.apply(lambda row: np.sum(~np.isnan(row))-1, axis=1)/len(ratings.columns[1::])
pop_percentages = pop_percentages.sort_values(by = 'popularity', ascending=False)
return pop_percentages.item.values[:n]
def get_rmse(user_id):
user_ratings = get_ratings(user_id)
rmse = {}
for rec, rec_name in zip(recs, recs_names):
predicted_ratings = rec.loc[user_ratings.index, user_id]
temp = np.sqrt(np.average((predicted_ratings - user_ratings)**2))
rmse[rec_name] = temp
return rmse
def get_precision_at_n(user_id, n):
top_n = get_top_n(user_id, n)
user_ratings = get_ratings(user_id).index.values
precisions = {}
for rec, rec_name in zip(recs, recs_names):
temp = np.sum(np.isin(top_n[rec_name], user_ratings))/n
precisions[rec_name] = temp
return precisions
# We will use the "FullCat" column in the items catalog to determine the product diversity in the recommendations.
# The recommender with a high number of distinct product categories in its recommendations is said to be product-diverse
def get_product_diversity(user_id, n):
top_n = get_top_n(user_id, n)
product_diversity = {}
for rec_name in top_n:
categories = items.loc[top_n[rec_name]][['FullCat']].values
categories = set([item for sublist in categories for item in sublist])
product_diversity[rec_name] = len(categories)
return product_diversity
# We will use the "Price" column in the items catalog to determine cost diversity in the recommendations.
# The recommender with a high standard deviation in the cost across all its recommendations is said to be cost-diverse
def get_cost_diversity(user_id, n):
top_n = get_top_n(user_id,n)
cost_diversity = {}
for rec_name in top_n:
std_dev = np.std(items.loc[top_n[rec_name]][['Price']].values)
cost_diversity[rec_name] = std_dev
return cost_diversity
# We will use inverse popularity as a measure of serendipity.
# The recommender with least number of recommendations on the "most popular" list, will be called most serendipitous
def get_serendipity(user_id, n):
top_n = get_top_n(user_id,n)
popular_items = get_popular_items(20)
serendipity = {}
for rec, rec_name in zip(recs, recs_names):
popularity = np.sum(np.isin(top_n[rec_name],popular_items))
if int(popularity) == 0:
serendipity[rec_name] = 1
else:
serendipity[rec_name] = 1/popularity
return serendipity
# In[31]:
avg_metrics = {}
for name in recs_names:
avg_metrics[name] = {"rmse": [], "precision_at_n": [], "product_diversity": [], "cost_diversity": [], "serendipity": []}
for user_id in ratings.columns:
if user_id == 'item':
continue
user_id = str(user_id)
rmse = get_rmse(user_id)
precision_at_n = get_precision_at_n(user_id, 10)
product_diversity = get_product_diversity(user_id, 10)
cost_diversity = get_cost_diversity(user_id, 10)
serendipity = get_serendipity(user_id, 10)
for key in avg_metrics:
rec_name = avg_metrics[key]
rec_name['rmse'].append(rmse[key])
rec_name['precision_at_n'].append(precision_at_n[key])
rec_name['product_diversity'].append(product_diversity[key])
rec_name['cost_diversity'].append(cost_diversity[key])
rec_name['serendipity'].append(serendipity[key])
# The Price for certain items is not available. Also rmse for certain users is turning out to be NaN.
# Ignoring nans in the average metric calculation for now. So basically narrowing down the evaluation to users who have
# rated atleast one item and items for which the price is known.
for key in avg_metrics:
rec_name = avg_metrics[key]
for metric in rec_name:
temp = rec_name[metric]
temp = [x for x in temp if not np.isnan(x)]
rec_name[metric] = sum(temp) / len(temp)
# In[32]:
avg_metrics
# ## Hybridization
# In[33]:
# Creating a dataframe with ratings from all algorithms and user_ratings as ground truth
users = []
items = []
user_ratings = []
cbf_ratings = []
user_user_ratings = []
item_item_ratings = []
mf_ratings = []
pers_bias_ratings = []
for user_id in ratings.columns:
if user_id == 'item':
continue
user_id = str(user_id)
true_ratings = get_ratings(user_id)
user_ratings.extend(true_ratings.values)
users.extend([user_id]*len(true_ratings))
items.extend(ratings.loc[true_ratings.index].item.values)
cbf_ratings.extend(cbf.loc[true_ratings.index, user_id].values)
item_item_ratings.extend(item_item.loc[true_ratings.index, user_id].values)
user_user_ratings.extend(user_user.loc[true_ratings.index, user_id].values)
pers_bias_ratings.extend(pers_bias.loc[true_ratings.index, user_id].values)
mf_ratings.extend(mf.loc[true_ratings.index, user_id].values)
df = pd.DataFrame({'user': users, 'item': items,'true_rating': user_ratings, 'cbf':cbf_ratings, 'item_item':item_item_ratings, 'user_user': user_user_ratings, 'pers_bias':pers_bias_ratings, 'mf':mf_ratings})
# In[34]:
df = df.dropna()
# In[35]:
df.head()
# ### Linear Combination
# In[39]:
clf = LinearRegression()
# In[40]:
# Split data in 80-20 train and test sets
train = df[0:(int(0.8*len(df)))]
test = df[(int(0.8*len(df)))::]
# In[41]:
train_data = train.drop(['user', 'item','true_rating'], axis=1)
train_labels = train.true_rating.values
model = clf.fit(train_data, train_labels)
# In[42]:
test_data = test.drop(['user', 'item','true_rating'], axis=1)
test_labels = test.true_rating.values
predictions = model.predict(test_data)
# In[44]:
# Avg RMSE predictions
avg_rmse = np.sqrt(np.average((predictions - test_labels)**2))
avg_rmse
# #### Top 5 for three users
# In[46]:
# Pick three users
users = random.sample(list(ratings.columns[1::]), 3)
print(users)
# In[47]:
train_data = df.drop(['user', 'item','true_rating'], axis=1)
train_labels = df.true_rating.values
model = clf.fit(train_data, train_labels)
# In[ ]:
top_5 = {}
for user in users:
df_preds = df[df.user == user]
preds = model.predict(df_preds.drop(['user', 'item','true_rating'], axis=1))
df_preds['predictions'] = preds
top_5_items = list(df_preds.sort_values(by=['predictions'], ascending=False)[:5].item.values)
top_5[user] = top_5_items
# In[49]:
top_5
# ### Non-linear Combination
# For a non-linear combination of the algorithms, we'll use the DecisionTreeRegressor method in scikitlearn
# In[51]:
clf = DecisionTreeRegressor()
# In[52]:
# Split data in 80-20 train and test sets
train = df[0:(int(0.8*len(df)))]
test = df[(int(0.8*len(df)))::]
# In[53]:
train_data = train.drop(['user', 'item','true_rating'], axis=1)
train_labels = train.true_rating.values
model = clf.fit(train_data, train_labels)
# In[54]:
test_data = test.drop(['user', 'item','true_rating'], axis=1)
test_labels = test.true_rating.values
predictions = model.predict(test_data)
# In[55]:
# Avg RMSE predictions
avg_rmse = np.sqrt(np.average((predictions - test_labels)**2))
avg_rmse
# #### Top-5 for 3 users
# In[56]:
# Using the same users as above to compare across the same users
users = ['3430', '112', '1817']
# In[57]:
train_data = df.drop(['user', 'item','true_rating'], axis=1)
train_labels = df.true_rating.values
model = clf.fit(train_data, train_labels)
# In[ ]:
top_5 = {}
for user in users:
df_preds = df[df.user == user]
preds = model.predict(df_preds.drop(['user', 'item','true_rating'], axis=1))
df_preds['predictions'] = preds
top_5_items = list(df_preds.sort_values(by=['predictions'], ascending=False)[:5].item.values)
top_5[user] = top_5_items
# In[59]:
top_5
# ## Different recommenders based on user type
# This hybridization techniques aims to create separate recomemnder strategies for two separate scenarios- one where users end up on the Nile-River.com landing page via banner ads for school products and other where users arrive at the landing page via endoresements for office products. For the first scenario, we'll pick a 3:2 ratio of school (inexpensive) products vs. office (expensive) products and the reverse for the second scenario i.e. 2:3 ratio of school to office products. Here we will show the evaluate only for the first scenario.
# In[68]:
# Determine threshold to label an item cheap or expensive- let's set this as the third quantile of the price list
# This is assuming office products are mostly in the expensive bracket
items = pd.read_csv('./data/silver/items.csv') # df converted to list in processing above, so loading back
prices = items.Price.values
price_threshold = np.percentile([x for x in prices if not np.isnan(x)], 75)
# ### Performance
# In[69]:
def get_precision_at_n(user_id, top_n):
user_ratings = get_ratings(user_id).index.values
precision_at_n = np.sum(np.isin(top_n, user_ratings))/ len(top_n)
return precision_at_n
# In[70]:
def get_cost_diversity(top_n):
std_dev = np.std(items.loc[top_n][['Price']].values)
return std_dev
# In[71]:
def get_product_diversity(top_n):
categories = items.loc[top_n][['FullCat']].values
categories = set([item for sublist in categories for item in sublist])
return len(categories)
# In[72]:
def get_serendipity(top_n):
popular_items = get_popular_items(20)
popularity = np.sum(np.isin(top_n,popular_items))
if int(popularity) == 0:
serendipity = 1
else:
serendipity = 1/popularity
return serendipity
# In[73]:
# To pick which items to finally recommend, let's assume that all the items in the top-5 for each recommender are
# equally relevant. We can potentially include some ranking based selection to pick item that are more relavant AND fit the
# cost criteria. For now, we'll pick at random since we're assuming all items are equally relevant.
def get_mixed_recs(user_id, n, n_cheap, n_exp):
top_n_overall_items = []
top_n_overall_prices = []
mixed_recs = []
for rec, rec_name in zip(recs, recs_names):
top_n_items = rec[user_id].argsort().sort_values()[:n].index.values
top_n_prices = items.loc[top_n_items][['Price']].values
top_n_overall_items.extend(top_n_items)
top_n_overall_prices.extend(top_n_prices)
top_dict = dict(zip(top_n_overall_items, top_n_overall_prices))
top_cheap = dict(filter(lambda elem: elem[1] <= price_threshold, top_dict.items())).keys()
top_exp = dict(filter(lambda elem: elem[1] > price_threshold, top_dict.items())).keys()
mixed_recs = random.sample(list(top_cheap), n_cheap) + random.sample(list(top_exp), n_exp)
return mixed_recs
# In[74]:
avg_metrics = {"precision_at_n": [], "product_diversity": [], "cost_diversity": [], "serendipity": []}
for user_id in ratings.columns:
if user_id == 'item':
continue
user_id = str(user_id)
top_5 = get_mixed_recs(user_id, 5, 3, 2)
avg_metrics["precision_at_n"].append(get_precision_at_n(user_id, top_5))
avg_metrics["cost_diversity"].append(get_cost_diversity(top_5))
avg_metrics["product_diversity"].append(get_product_diversity(top_5))
avg_metrics["serendipity"].append(get_serendipity(top_5))
for metric in avg_metrics:
temp = avg_metrics[metric]
temp = [x for x in temp if not np.isnan(x)]
avg_metrics[metric] = sum(temp) / len(temp)
# In[75]:
avg_metrics
# ### Top-5 for three users
# In[76]:
# Assuming all three users ended up on the landing pagee through scenario 1 i.e. banner ads for school products
users = ['3430', '112', '1817']
# In[77]:
top_5 = {}
for user_id in users:
# For office products
# top_5[user_id] = get_mixed_recs(user_id, 5, 2, 3)
# For school products
top_5[user_id] = list(ratings.loc[get_mixed_recs(user_id, 5, 3, 2)].item.values)
# In[78]:
top_5
# ## Switching hybridization
# We will not be implementing this hybridizaton as such, but we will explore whether or not the strategy of using content based filtering for new users (users with fewer/no ratings) or items with less ratings is even reasonable for this dataset. For this, let's begin with visualizing the number of ratings for the users in the dataset.
# In[80]:
item_ratings = ratings.apply(lambda row: np.sum(~np.isnan(row))-1, axis=1)
# In[81]:
plt.hist(item_ratings)
plt.xlabel("Number of ratings")
plt.ylabel("number of items")
# In[82]:
# Number of items with < 10 ratings
count_less_than_10 = np.count_nonzero(item_ratings<10)/len(item_ratings)*100
# In[83]:
count_less_than_10
# In[84]:
user_ratings = []
for user_id in ratings.columns:
if user_id == 'item':
continue
user_id = str(user_id)
user_ratings.append(len(get_ratings(user_id)))
# In[85]:
plt.hist(user_ratings)
plt.xlabel("Number of ratings")
plt.ylabel("number of users")
# In[86]:
# Number of users with < 10 ratings
count_less_than_10 = np.count_nonzero(np.array(user_ratings)<10)/len(user_ratings)*100
# In[87]:
count_less_than_10
|
[
"pandas.DataFrame",
"sys.path.append",
"numpy.isin",
"numpy.average",
"sklearn.tree.DecisionTreeRegressor",
"matplotlib.pyplot.hist",
"numpy.count_nonzero",
"pandas.read_csv",
"numpy.std",
"os.path.exists",
"numpy.isnan",
"sklearn.linear_model.LinearRegression",
"numpy.array",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"os.path.join"
] |
[((145, 183), 'os.path.join', 'os.path.join', (['"""/content"""', 'project_name'], {}), "('/content', project_name)\n", (157, 183), False, 'import os\n'), ((1218, 1256), 'pandas.read_csv', 'pd.read_csv', (['"""./data/silver/items.csv"""'], {}), "('./data/silver/items.csv')\n", (1229, 1256), True, 'import pandas as pd\n'), ((1301, 1341), 'pandas.read_csv', 'pd.read_csv', (['"""./data/silver/ratings.csv"""'], {}), "('./data/silver/ratings.csv')\n", (1312, 1341), True, 'import pandas as pd\n'), ((1384, 1418), 'pandas.read_csv', 'pd.read_csv', (['"""./data/gold/cbf.csv"""'], {}), "('./data/gold/cbf.csv')\n", (1395, 1418), True, 'import pandas as pd\n'), ((1431, 1471), 'pandas.read_csv', 'pd.read_csv', (['"""./data/gold/item-item.csv"""'], {}), "('./data/gold/item-item.csv')\n", (1442, 1471), True, 'import pandas as pd\n'), ((1484, 1524), 'pandas.read_csv', 'pd.read_csv', (['"""./data/gold/user-user.csv"""'], {}), "('./data/gold/user-user.csv')\n", (1495, 1524), True, 'import pandas as pd\n'), ((1537, 1577), 'pandas.read_csv', 'pd.read_csv', (['"""./data/gold/pers-bias.csv"""'], {}), "('./data/gold/pers-bias.csv')\n", (1548, 1577), True, 'import pandas as pd\n'), ((1583, 1616), 'pandas.read_csv', 'pd.read_csv', (['"""./data/gold/mf.csv"""'], {}), "('./data/gold/mf.csv')\n", (1594, 1616), True, 'import pandas as pd\n'), ((7652, 7867), 'pandas.DataFrame', 'pd.DataFrame', (["{'user': users, 'item': items, 'true_rating': user_ratings, 'cbf':\n cbf_ratings, 'item_item': item_item_ratings, 'user_user':\n user_user_ratings, 'pers_bias': pers_bias_ratings, 'mf': mf_ratings}"], {}), "({'user': users, 'item': items, 'true_rating': user_ratings,\n 'cbf': cbf_ratings, 'item_item': item_item_ratings, 'user_user':\n user_user_ratings, 'pers_bias': pers_bias_ratings, 'mf': mf_ratings})\n", (7664, 7867), True, 'import pandas as pd\n'), ((7956, 7974), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (7972, 7974), False, 'from sklearn.linear_model import LinearRegression\n'), ((9304, 9327), 'sklearn.tree.DecisionTreeRegressor', 'DecisionTreeRegressor', ([], {}), '()\n', (9325, 9327), False, 'from sklearn.tree import DecisionTreeRegressor\n'), ((11306, 11344), 'pandas.read_csv', 'pd.read_csv', (['"""./data/silver/items.csv"""'], {}), "('./data/silver/items.csv')\n", (11317, 11344), True, 'import pandas as pd\n'), ((15098, 15120), 'matplotlib.pyplot.hist', 'plt.hist', (['item_ratings'], {}), '(item_ratings)\n', (15106, 15120), True, 'import matplotlib.pyplot as plt\n'), ((15121, 15152), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Number of ratings"""'], {}), "('Number of ratings')\n", (15131, 15152), True, 'import matplotlib.pyplot as plt\n'), ((15153, 15182), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""number of items"""'], {}), "('number of items')\n", (15163, 15182), True, 'import matplotlib.pyplot as plt\n'), ((15542, 15564), 'matplotlib.pyplot.hist', 'plt.hist', (['user_ratings'], {}), '(user_ratings)\n', (15550, 15564), True, 'import matplotlib.pyplot as plt\n'), ((15565, 15596), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Number of ratings"""'], {}), "('Number of ratings')\n", (15575, 15596), True, 'import matplotlib.pyplot as plt\n'), ((15597, 15626), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""number of users"""'], {}), "('number of users')\n", (15607, 15626), True, 'import matplotlib.pyplot as plt\n'), ((192, 220), 'os.path.exists', 'os.path.exists', (['project_path'], {}), '(project_path)\n', (206, 220), False, 'import os\n'), ((505, 526), 'sys.path.append', 'sys.path.append', (['path'], {}), '(path)\n', (520, 526), False, 'import sys\n'), ((8465, 8509), 'numpy.average', 'np.average', (['((predictions - test_labels) ** 2)'], {}), '((predictions - test_labels) ** 2)\n', (8475, 8509), True, 'import numpy as np\n'), ((9818, 9862), 'numpy.average', 'np.average', (['((predictions - test_labels) ** 2)'], {}), '((predictions - test_labels) ** 2)\n', (9828, 9862), True, 'import numpy as np\n'), ((11790, 11832), 'numpy.std', 'np.std', (["items.loc[top_n][['Price']].values"], {}), "(items.loc[top_n][['Price']].values)\n", (11796, 11832), True, 'import numpy as np\n'), ((4587, 4639), 'numpy.std', 'np.std', (["items.loc[top_n[rec_name]][['Price']].values"], {}), "(items.loc[top_n[rec_name]][['Price']].values)\n", (4593, 4639), True, 'import numpy as np\n'), ((12164, 12193), 'numpy.isin', 'np.isin', (['top_n', 'popular_items'], {}), '(top_n, popular_items)\n', (12171, 12193), True, 'import numpy as np\n'), ((15254, 15289), 'numpy.count_nonzero', 'np.count_nonzero', (['(item_ratings < 10)'], {}), '(item_ratings < 10)\n', (15270, 15289), True, 'import numpy as np\n'), ((2408, 2430), 'numpy.isnan', 'np.isnan', (['user_ratings'], {}), '(user_ratings)\n', (2416, 2430), True, 'import numpy as np\n'), ((3215, 3266), 'numpy.average', 'np.average', (['((predicted_ratings - user_ratings) ** 2)'], {}), '((predicted_ratings - user_ratings) ** 2)\n', (3225, 3266), True, 'import numpy as np\n'), ((5095, 5134), 'numpy.isin', 'np.isin', (['top_n[rec_name]', 'popular_items'], {}), '(top_n[rec_name], popular_items)\n', (5102, 5134), True, 'import numpy as np\n'), ((11663, 11691), 'numpy.isin', 'np.isin', (['top_n', 'user_ratings'], {}), '(top_n, user_ratings)\n', (11670, 11691), True, 'import numpy as np\n'), ((3527, 3565), 'numpy.isin', 'np.isin', (['top_n[rec_name]', 'user_ratings'], {}), '(top_n[rec_name], user_ratings)\n', (3534, 3565), True, 'import numpy as np\n'), ((11491, 11502), 'numpy.isnan', 'np.isnan', (['x'], {}), '(x)\n', (11499, 11502), True, 'import numpy as np\n'), ((14093, 14104), 'numpy.isnan', 'np.isnan', (['x'], {}), '(x)\n', (14101, 14104), True, 'import numpy as np\n'), ((15715, 15737), 'numpy.array', 'np.array', (['user_ratings'], {}), '(user_ratings)\n', (15723, 15737), True, 'import numpy as np\n'), ((6619, 6630), 'numpy.isnan', 'np.isnan', (['x'], {}), '(x)\n', (6627, 6630), True, 'import numpy as np\n'), ((15058, 15071), 'numpy.isnan', 'np.isnan', (['row'], {}), '(row)\n', (15066, 15071), True, 'import numpy as np\n'), ((2816, 2829), 'numpy.isnan', 'np.isnan', (['row'], {}), '(row)\n', (2824, 2829), True, 'import numpy as np\n')]
|
import sys
import unittest
if sys.version_info<(3,4):
class TestDummy(unittest.TestCase):
def test_asyncio(self):
raise unittest.SkipTest("asyncio needs Py >=3.4")
else:
from .asynciotest import *
|
[
"unittest.SkipTest"
] |
[((145, 188), 'unittest.SkipTest', 'unittest.SkipTest', (['"""asyncio needs Py >=3.4"""'], {}), "('asyncio needs Py >=3.4')\n", (162, 188), False, 'import unittest\n')]
|
#!/usr/bin/env python
import os
import sys
# For coverage.
if __package__ is None:
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/..")
from unittest import main, TestCase
import requests
import requests_mock
from iris_sdk.client import Client
from iris_sdk.models.account import Account
XML_RESPONSE_DLDA_GET = (
b"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\" ?>"
b"<DldaOrderResponse><DldaOrder>"
b"<CustomerOrderId>5a88d16d-f8a9-45c5-a5db-137d700c6a22</CustomerOrderId>"
b"<OrderCreateDate>2014-07-10T12:38:11.833Z</OrderCreateDate>"
b"<AccountId>14</AccountId><CreatedByUser>jbm</CreatedByUser>"
b"<OrderId>ea9e90c2-77a4-4f82-ac47-e1c5bb1311f4</OrderId>"
b"<LastModifiedDate>2014-07-10T12:38:11.833Z</LastModifiedDate>"
b"<ProcessingStatus>RECEIVED</ProcessingStatus><DldaTnGroups>"
b"<DldaTnGroup><TelephoneNumbers>"
b"<TelephoneNumber>2053778335</TelephoneNumber>"
b"<TelephoneNumber>2053865784</TelephoneNumber></TelephoneNumbers>"
b"<AccountType>BUSINESS</AccountType><ListingType>LISTED</ListingType>"
b"<ListingName><FirstName>Joe</FirstName><LastName>Smith</LastName>"
b"</ListingName><ListAddress>true</ListAddress><Address>"
b"<HouseNumber>12</HouseNumber><StreetName>ELM</StreetName>"
b"<City>New York</City><StateCode>NY</StateCode><Zip>10007</Zip>"
b"<Country>United States</Country><AddressType>Dlda</AddressType>"
b"</Address></DldaTnGroup></DldaTnGroups></DldaOrder>"
b"</DldaOrderResponse>"
)
XML_RESPONSE_DLDA_HISTORY = (
b"<?xml version=\"1.0\"?> <OrderHistoryWrapper><OrderHistory>"
b"<OrderDate>2014-09-04T16:28:11.320Z</OrderDate>"
b"<Note>The DL/DA request has been received</Note>"
b"<Author>jbm</Author><Status>RECEIVED</Status></OrderHistory>"
b"<OrderHistory><OrderDate>2014-09-04T16:28:18.742Z</OrderDate>"
b"<Note>The DL/DA request is being processed by our 3rd party supplier"
b"</Note><Author>jbm</Author><Status>PROCESSING</Status> </OrderHistory>"
b"<OrderHistory><OrderDate>2014-09-05T19:00:17.968Z</OrderDate>"
b"<Note>The DL/DA request is complete for all TNs</Note>"
b"<Author>jbm</Author><Status>COMPLETE</Status></OrderHistory>"
b"</OrderHistoryWrapper>"
)
XML_RESPONSE_DLDA_LIST = (
b"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\" ?>"
b"<ResponseSelectWrapper><ListOrderIdUserIdDate>"
b"<TotalCount>3</TotalCount><OrderIdUserIdDate>"
b"<accountId>14</accountId><CountOfTNs>2</CountOfTNs>"
b"<userId>team_ua</userId>"
b"<lastModifiedDate>2014-07-07T10:06:43.427Z</lastModifiedDate>"
b"<OrderType>dlda</OrderType>"
b"<OrderDate>2014-07-07T10:06:43.427Z</OrderDate>"
b"<orderId>37a6447c-1a0b-4be9-ba89-3f5cb0aea142</orderId>"
b"<OrderStatus>FAILED</OrderStatus></OrderIdUserIdDate>"
b"<OrderIdUserIdDate><accountId>14</accountId>"
b"<CountOfTNs>2</CountOfTNs><userId>team_ua</userId>"
b"<lastModifiedDate>2014-07-07T10:05:56.595Z</lastModifiedDate>"
b"<OrderType>dlda</OrderType>"
b"<OrderDate>2014-07-07T10:05:56.595Z</OrderDate>"
b"<orderId>743b0e64-3350-42e4-baa6-406dac7f4a85</orderId>"
b"<OrderStatus>RECEIVED</OrderStatus></OrderIdUserIdDate>"
b"<OrderIdUserIdDate><accountId>14</accountId>"
b"<CountOfTNs>2</CountOfTNs><userId>team_ua</userId>"
b"<lastModifiedDate>2014-07-07T09:32:17.234Z</lastModifiedDate>"
b"<OrderType>dlda</OrderType>"
b"<OrderDate>2014-07-07T09:32:17.234Z</OrderDate>"
b"<orderId>f71eb4d2-bfef-4384-957f-45cd6321185e</orderId>"
b"<OrderStatus>RECEIVED</OrderStatus></OrderIdUserIdDate>"
b"</ListOrderIdUserIdDate></ResponseSelectWrapper>"
)
XML_RESPONSE_DLDA_POST = (
b"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\" ?>"
b"<DldaOrderResponse><DldaOrder>"
b"<CustomerOrderId>5a88d16d-f8a9-45c5-a5db-137d700c6a22</CustomerOrderId>"
b"<OrderCreateDate>2014-07-10T12:38:11.833Z</OrderCreateDate>"
b"<AccountId>14</AccountId><CreatedByUser>jbm</CreatedByUser>"
b"<OrderId>ea9e90c2-77a4-4f82-ac47-e1c5bb1311f4</OrderId>"
b"<LastModifiedDate>2014-07-10T12:38:11.833Z</LastModifiedDate>"
b"<ProcessingStatus>RECEIVED</ProcessingStatus><DldaTnGroups>"
b"<DldaTnGroup><TelephoneNumbers>"
b"<TelephoneNumber>2053778335</TelephoneNumber>"
b"<TelephoneNumber>2053865784</TelephoneNumber></TelephoneNumbers>"
b"<AccountType>BUSINESS</AccountType><ListingType>LISTED</ListingType>"
b"<ListingName><FirstName>Joe</FirstName><LastName>Smith</LastName>"
b"</ListingName><ListAddress>true</ListAddress><Address>"
b"<HouseNumber>12</HouseNumber><StreetName>ELM</StreetName>"
b"<City>New York</City><StateCode>NY</StateCode><Zip>10007</Zip>"
b"<Country>United States</Country><AddressType>Dlda</AddressType>"
b"</Address></DldaTnGroup></DldaTnGroups></DldaOrder>"
b"</DldaOrderResponse>"
)
class ClassDldaTest(TestCase):
"""Test DLDA orders"""
@classmethod
def setUpClass(cls):
cls._client = Client("http://foo", "bar", "bar", "qux")
cls._account = Account(client=cls._client)
@classmethod
def tearDownClass(cls):
del cls._client
del cls._account
def test_dlda_get(self):
with requests_mock.Mocker() as m:
dlda = self._account.dldas.create()
dlda.id = "ea9e90c2-77a4-4f82-ac47-e1c5bb1311f4"
url = self._client.config.url + dlda.get_xpath()
m.get(url, content=XML_RESPONSE_DLDA_GET)
dlda = self._account.dldas.get(dlda.id)
self.assertEqual(dlda.id, "ea9e90c2-77a4-4f82-ac47-e1c5bb1311f4")
self.assertEqual(dlda.customer_order_id,
"5a88d16d-f8a9-45c5-a5db-137d700c6a22")
self.assertEqual(dlda.order_create_date,
"2014-07-10T12:38:11.833Z")
self.assertEqual(dlda.account_id, "14")
self.assertEqual(dlda.created_by_user, "jbm")
self.assertEqual(dlda.order_id,
"ea9e90c2-77a4-4f82-ac47-e1c5bb1311f4")
self.assertEqual(dlda.last_modified_date,
"2014-07-10T12:38:11.833Z")
self.assertEqual(dlda.processing_status, "RECEIVED")
grp = dlda.dlda_tn_groups.dlda_tn_group.items[0]
self.assertEqual(
grp.telephone_numbers.telephone_number.items,
["2053778335","2053865784"]
)
self.assertEqual(grp.account_type, "BUSINESS")
self.assertEqual(grp.listing_type, "LISTED")
self.assertEqual(grp.list_address, "true")
lname = grp.listing_name
self.assertEqual(lname.first_name, "Joe")
self.assertEqual(lname.last_name, "Smith")
addr = grp.address
self.assertEqual(addr.city, "New York")
self.assertEqual(addr.house_number, "12")
self.assertEqual(addr.street_name, "ELM")
self.assertEqual(addr.state_code, "NY")
self.assertEqual(addr.zip, "10007")
self.assertEqual(addr.country, "United States")
self.assertEqual(addr.address_type, "Dlda")
def test_dlda_list(self):
with requests_mock.Mocker() as m:
url = self._client.config.url + self._account.dldas.get_xpath()
m.get(url, content=XML_RESPONSE_DLDA_LIST)
dldas = self._account.dldas.list()
dlda = dldas.items[0]
self.assertEqual(len(dldas.items), 3)
self.assertEqual(dlda.id, "37a6447c-1a0b-4be9-ba89-3f5cb0aea142")
self.assertEqual(dlda.account_id, "14")
self.assertEqual(dlda.count_of_tns, "2")
self.assertEqual(dlda.user_id, "team_ua")
self.assertEqual(dlda.last_modified_date,
"2014-07-07T10:06:43.427Z")
self.assertEqual(dlda.order_type, "dlda")
self.assertEqual(dlda.order_date, "2014-07-07T10:06:43.427Z")
self.assertEqual(dlda.order_id, "37a6447c-1a0b-4be9-ba89-3f5cb0aea142")
self.assertEqual(dlda.order_status, "FAILED")
def test_dlda_post(self):
with requests_mock.Mocker() as m:
url = self._client.config.url + self._account.dldas.get_xpath()
m.post(url, content=XML_RESPONSE_DLDA_POST)
order_data = {
"customer_order_id": "123",
"dlda_tn_groups": {
"dlda_tn_group": [{
"telephone_numbers": {
"telephone_number": ["4352154856"]
},
"account_type": "RESIDENTIAL",
"listing_type": "LISTED",
"list_address": "true",
"listing_name": {
"first_name": "<NAME>",
"first_name2": "<NAME>",
"last_name": "<NAME>",
"designation": "designation",
"title_of_lineage": "title of lineage",
"title_of_address": "title of address",
"title_of_address2": "title of address2",
"title_of_lineage_name2":"title of lineage name2",
"title_of_address_name2":"title of address name2",
"title_of_address2_name2":
"title of address2 name2",
"place_listing_as": "place listing as"
},
"address": {
"house_prefix": "house prefix",
"house_number": "915",
"house_suffix": "house suffix",
"pre_directional": "pre directional",
"street_name": "street name",
"street_suffix": "street suffix",
"post_directional": "post directional",
"address_line2": "address line2",
"city": "city",
"state_code": "state code",
"zip": "zip",
"plus_four": "plus four",
"country": "country",
"address_type": "address type"
}
}]
}
}
dlda = self._account.dldas.create(order_data, False)
self.assertEqual(dlda.customer_order_id, "123")
grp = dlda.dlda_tn_groups.dlda_tn_group.items[0]
self.assertEqual(grp.telephone_numbers.telephone_number.items,
["4352154856"])
self.assertEqual(grp.account_type, "RESIDENTIAL")
self.assertEqual(grp.listing_type, "LISTED")
self.assertEqual(grp.list_address, "true")
name = grp.listing_name
self.assertEqual(name.first_name, "<NAME>")
self.assertEqual(name.first_name2, "<NAME>")
self.assertEqual(name.last_name, "<NAME>")
self.assertEqual(name.designation, "designation")
self.assertEqual(name.title_of_lineage, "title of lineage")
self.assertEqual(name.title_of_address, "title of address")
self.assertEqual(name.title_of_address2, "title of address2")
self.assertEqual(name.title_of_lineage_name2,
"title of lineage name2")
self.assertEqual(name.title_of_address_name2,
"title of address name2")
self.assertEqual(name.title_of_address2_name2,
"title of address2 name2")
self.assertEqual(name.place_listing_as, "place listing as")
addr = grp.address
self.assertEqual(addr.house_prefix, "house prefix")
self.assertEqual(addr.house_number, "915")
self.assertEqual(addr.house_suffix, "house suffix")
self.assertEqual(addr.pre_directional, "pre directional")
self.assertEqual(addr.street_name, "street name")
self.assertEqual(addr.street_suffix, "street suffix")
self.assertEqual(addr.post_directional, "post directional")
self.assertEqual(addr.address_line2, "address line2")
self.assertEqual(addr.city, "city")
self.assertEqual(addr.state_code, "state code")
self.assertEqual(addr.zip, "zip")
self.assertEqual(addr.plus_four, "plus four")
self.assertEqual(addr.country, "country")
self.assertEqual(addr.address_type, "address type")
dlda = self._account.dldas.create(order_data)
self.assertEqual(dlda.customer_order_id,
"5a88d16d-f8a9-45c5-a5db-137d700c6a22")
self.assertEqual(dlda.order_create_date,
"2014-07-10T12:38:11.833Z")
self.assertEqual(dlda.account_id, "14")
self.assertEqual(dlda.created_by_user, "jbm")
self.assertEqual(dlda.order_id,
"ea9e90c2-77a4-4f82-ac47-e1c5bb1311f4")
self.assertEqual(dlda.last_modified_date,
"2014-07-10T12:38:11.833Z")
self.assertEqual(dlda.processing_status, "RECEIVED")
grp = dlda.dlda_tn_groups.dlda_tn_group.items[0]
self.assertEqual(grp.telephone_numbers.telephone_number.items,
["2053778335","2053865784"])
self.assertEqual(grp.account_type, "BUSINESS")
self.assertEqual(grp.listing_type, "LISTED")
self.assertEqual(grp.list_address, "true")
name = grp.listing_name
self.assertEqual(name.first_name, "Joe")
self.assertEqual(name.last_name, "Smith")
addr = grp.address
self.assertEqual(addr.city, "New York")
self.assertEqual(addr.house_number, "12")
self.assertEqual(addr.street_name, "ELM")
self.assertEqual(addr.state_code, "NY")
self.assertEqual(addr.zip, "10007")
self.assertEqual(addr.country, "United States")
self.assertEqual(addr.address_type, "Dlda")
def test_dlda_put(self):
order_data = {
"order_id": "7802373f-4f52-4387-bdd1-c5b74833d6e2",
"customer_order_id": "123",
"dlda_tn_groups": {
"dlda_tn_group": [{
"telephone_numbers": {
"telephone_number": ["4352154856"]
},
"account_type": "RESIDENTIAL",
"listing_type": "LISTED",
"list_address": "true",
"listing_name": {
"first_name": "<NAME>",
"first_name2": "<NAME>",
"last_name": "<NAME>",
"designation": "designation",
"title_of_lineage": "title of lineage",
"title_of_address": "title of address",
"title_of_address2": "title of address2",
"title_of_lineage_name2":"title of lineage name2",
"title_of_address_name2":"title of address name2",
"title_of_address2_name2": "title of address2 name2",
"place_listing_as": "place listing as"
},
"address": {
"house_prefix": "house prefix",
"house_number": "915",
"house_suffix": "house suffix",
"pre_directional": "pre directional",
"street_name": "street name",
"street_suffix": "street suffix",
"post_directional": "post directional",
"address_line2": "address line2",
"city": "city",
"state_code": "state code",
"zip": "zip",
"plus_four": "plus four",
"country": "country",
"address_type": "address type"
}
}]
}
}
dlda = self._account.dldas.create(order_data, False)
self.assertEqual(dlda.customer_order_id, "123")
self.assertEqual(dlda.order_id,
"7802373f-4f52-4387-bdd1-c5b74833d6e2")
grp = dlda.dlda_tn_groups.dlda_tn_group.items[0]
self.assertEqual(grp.telephone_numbers.telephone_number.items,
["4352154856"])
self.assertEqual(grp.account_type, "RESIDENTIAL")
self.assertEqual(grp.listing_type, "LISTED")
self.assertEqual(grp.list_address, "true")
name = grp.listing_name
self.assertEqual(name.first_name, "<NAME>")
self.assertEqual(name.first_name2, "<NAME>")
self.assertEqual(name.last_name, "<NAME>")
self.assertEqual(name.designation, "designation")
self.assertEqual(name.title_of_lineage, "title of lineage")
self.assertEqual(name.title_of_address, "title of address")
self.assertEqual(name.title_of_address2, "title of address2")
self.assertEqual(name.title_of_lineage_name2,
"title of lineage name2")
self.assertEqual(name.title_of_address_name2,
"title of address name2")
self.assertEqual(name.title_of_address2_name2,
"title of address2 name2")
self.assertEqual(name.place_listing_as, "place listing as")
addr = grp.address
self.assertEqual(addr.house_prefix, "house prefix")
self.assertEqual(addr.house_number, "915")
self.assertEqual(addr.house_suffix, "house suffix")
self.assertEqual(addr.pre_directional, "pre directional")
self.assertEqual(addr.street_name, "street name")
self.assertEqual(addr.street_suffix, "street suffix")
self.assertEqual(addr.post_directional, "post directional")
self.assertEqual(addr.address_line2, "address line2")
self.assertEqual(addr.city, "city")
self.assertEqual(addr.state_code, "state code")
self.assertEqual(addr.zip, "zip")
self.assertEqual(addr.plus_four, "plus four")
self.assertEqual(addr.country, "country")
self.assertEqual(addr.address_type, "address type")
self.assertEqual(dlda.get_xpath(),
self._account.get_xpath() + self._account.dldas._xpath +
dlda._xpath.format(dlda.id))
with requests_mock.Mocker() as m:
url = self._client.config.url + dlda.get_xpath()
m.put(url, content = XML_RESPONSE_DLDA_GET)
dlda.save()
if __name__ == "__main__":
main()
|
[
"unittest.main",
"os.path.abspath",
"requests_mock.Mocker",
"iris_sdk.client.Client",
"iris_sdk.models.account.Account"
] |
[((18850, 18856), 'unittest.main', 'main', ([], {}), '()\n', (18854, 18856), False, 'from unittest import main, TestCase\n'), ((5030, 5071), 'iris_sdk.client.Client', 'Client', (['"""http://foo"""', '"""bar"""', '"""bar"""', '"""qux"""'], {}), "('http://foo', 'bar', 'bar', 'qux')\n", (5036, 5071), False, 'from iris_sdk.client import Client\n'), ((5095, 5122), 'iris_sdk.models.account.Account', 'Account', ([], {'client': 'cls._client'}), '(client=cls._client)\n', (5102, 5122), False, 'from iris_sdk.models.account import Account\n'), ((5262, 5284), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (5282, 5284), False, 'import requests_mock\n'), ((7215, 7237), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (7235, 7237), False, 'import requests_mock\n'), ((8160, 8182), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (8180, 8182), False, 'import requests_mock\n'), ((18646, 18668), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (18666, 18668), False, 'import requests_mock\n'), ((121, 146), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (136, 146), False, 'import os\n')]
|
"""Main module that contains SimulationOptimization class definition
.. module:: sim_opt.py
:synopsis: DWSIM simulation optimization class
.. moduleauthor:: <NAME> <<EMAIL>>
:Module: sim_opt.py
:Author: <NAME> <<EMAIL>>
"""
import numpy as np
import time
class SimulationOptimization():
"""Class that defines DWSIM simulation optimization objects.
:ivar path2sim: Absolute path to a DWSIM simulation (.dwxmz)
:ivar path2dwsim: Absolute path to the DWSIM installation
:ivar savepath: Absolute path to save the DWSIM simulation (.dwxmz)
:ivar verbose: Boolean that controls display messages during simulation calculation
:ivar x_val: Last simulated degrees of freedom values
:ivar f_val: Last simulated objective functions values
:ivar g_val: Last simulated constraints values
:ivar dof: Lambda function that assign the degrees of freedom of the DWSIM process simulation to be handled by the optimization solver
:ivar f: Lambda function that returns a numpy.array with objective functions values after converging the simulation
:ivar g: Lambda function that returns a numpy.array with constraints values after converging the simulation
:ivar n_dof: Number of degrees of freedom (size of optimization problem)
:ivar n_f: Number of objective functions (still unsupported for n_f>1, *i.e.* multi-objective problem)
:ivar n_g: Number of constraints
"""
def __init__(self, path2sim, dof=np.array([], dtype=object),
path2dwsim = "C:\\Users\\lfsfr\\AppData\\Local\\DWSIM7\\",
savepath = "", verbose = True): # pragma: no cover
self.path2sim = path2sim
self.path2dwsim = path2dwsim
if savepath=="":
self.savepath = path2sim
else:
self.savepath = savepath
self.x_val = np.array([])
self.f_val = np.array([])
self.g_val = np.array([])
self.f = np.array([], dtype=object)
self.n_f = self.f.size
self.g = np.array([], dtype=object)
self.n_g = self.g.size
self.dof = dof
self.n_dof = self.dof.size
self.verbose = verbose
def add_refs(self):
"""This method add reference in the proggraming environment to the DWSIM dlls, so they can be imported.
"""
import pythoncom
pythoncom.CoInitialize()
import clr
# from os import system as System
# from System.IO import Directory, Path, File
# from System import String, Environment
clr.AddReference(self.path2dwsim + "CapeOpen.dll")
clr.AddReference(self.path2dwsim + "DWSIM.Automation.dll")
clr.AddReference(self.path2dwsim + "DWSIM.Interfaces.dll")
clr.AddReference(self.path2dwsim + "DWSIM.GlobalSettings.dll")
clr.AddReference(self.path2dwsim + "DWSIM.SharedClasses.dll")
clr.AddReference(self.path2dwsim + "DWSIM.Thermodynamics.dll")
clr.AddReference(self.path2dwsim + "DWSIM.UnitOperations.dll")
clr.AddReference(self.path2dwsim + "System.Buffers.dll")
try:
clr.AddReference(self.path2dwsim + "System.Buffers2.dll")
except Exception as e:
pass
# print(Exception)
# print("More refs")
clr.AddReference(self.path2dwsim + "DWSIM.Inspector.dll")
clr.AddReference(self.path2dwsim + "DWSIM.MathOps.dll")
clr.AddReference(self.path2dwsim + "TcpComm.dll")
clr.AddReference(self.path2dwsim + "Microsoft.ServiceBus.dll")
clr.AddReference(self.path2dwsim + "System.Buffers.dll")
clr.AddReference(self.path2dwsim + "SkiaSharp.dll")
clr.AddReference(self.path2dwsim + "OxyPlot")
# clr.AddReference(self.path2dwsim + "OxyPlot.WindowsForms")
# clr.AddReference(self.path2dwsim + "DWSIM.ExtensionMethods.Eto")
print("added refs")
def connect(self, interf):
"""This method uses the automation manager object to load the DWSIM flowsheet and store them into self.
Args:
interf (DWSIM.Automation.Automation2): Automation manager object with methods to load, save, and create DWSIM flowsheet simulations.
"""
import sys
if ~hasattr(self, 'flowsheet'):
# load simulation
flowsheet = interf.LoadFlowsheet(self.path2sim)
# add DWSIM objects to Simulation object
self.interface = interf
self.flowsheet = flowsheet
if flowsheet is not None:
print("Simulation was loaded successfully")
def add_dof(self, dof_new, description=[None,None,None,None]):
"""Append a new degree of freedom to the SimulationOptimization object
Args:
dof_new (lambda function): Lambda function that assign the appended degrees of freedom of the DWSIM process simulation
"""
if self.dof.size==0:
self.dof = np.append(self.dof, np.append( dof_new, description ) )
else:
self.dof = np.block( [ [self.dof], [np.append( dof_new, description)] ] )
self.n_dof += 1# int(self.dof.size)
# self.dof.reshape((self.n_dof,2))
def add_fobj(self, func, description=[None,None,None,None]):
"""Append a new objective function to the SimulationOptimization object
Args:
func (lambda function): Lambda function that returns a numpy.array with objective function value after converging the simulation
"""
if self.f.size==0:
self.f = np.append(self.f, np.append( func, description ) )
else:
self.f = np.block( [ [self.f], [np.append( func, description)] ] )
self.n_f += 1
# self.f = np.append(self.f, func)
# self.n_f = self.f.size
def add_constraint(self, g_func, description=[None,None,None,None]):
"""Append a new constraint to the SimulationOptimization object
Args:
g_func (lambda function): Lambda function that returns a numpy.array with constraint value after converging the simulation
"""
if self.g.size==0:
self.g = np.append(self.g, np.append( g_func, description ) )
else:
self.g = np.block( [ [self.g], [np.append( g_func, description)] ] )
self.n_g += 1
# self.g = np.append(self.g, g_func)
# self.n_g = self.g.size
def converge_simulation(self, x):
"""Converge the simulation with degrees of freedom values of ``x``
Args:
x (numpy.array): Array of degrees of freedom values to be simulated
"""
if self.verbose:
print(f"opt_functions calculation at x = {x}")
if x.size != self.n_dof:
print(f"Size of x {x.size} is diferent from n_dof = {self.n_dof}. DO you know what your doing? Only {x.size} values of dof will be assigned.")
for i in range(self.n_dof):
self.dof[i][0](x[i])
# first calculation
error = self.interface.CalculateFlowsheet2(self.flowsheet)
time.sleep(0.1)
# second calculation
error = self.interface.CalculateFlowsheet2(self.flowsheet)
time.sleep(0.1)
res_old = np.array([self.f[0]()])
for i in range(self.n_g):
res_old = np.append(res_old, np.asarray(self.g[i][0]()))
# third+ calculation
for conv_ite in range(3):
error = self.interface.CalculateFlowsheet2(self.flowsheet)
time.sleep(0.1)
res_new = np.array([self.f[0]()])
for i in range(self.n_g):
res_new = np.append(res_new, self.g[i][0]())
try:
variation = np.linalg.norm(res_new-res_old)
except:
variation = 1
if variation > 1e-6:
res_old = res_new
else:
if self.verbose:
print(f" Simulation converged in {conv_ite+3} iterations")
if len(error)>0:
print(f"{error} at x = {x}")
return
# fifth calculation, in case of error
if len(error)>0:
error = self.interface.CalculateFlowsheet2(self.flowsheet)
time.sleep(0.05)
if self.verbose:
print(" Simulation converged in 5 iterations or failed to converge...")
if len(error)>0:
print(f"{error} at x = {x}")
def calculate_optProblem(self, x):
"""Assign degrees of freedom values to the simulation if norm > 1e-10. Converge the simulation and return an array with objectives and constraints values.
Args:
x (numpy.array): Array of degrees of freedom values to be simulated
Returns:
numpy.array: Array of objectives and constraints values calculated at ``x``
"""
try:
delta_x = np.linalg.norm(self.x_val - np.asarray(x))
except:
delta_x = 1
if delta_x > 1e-10:
self.converge_simulation(x)
self.x_val = np.array(x)
self.f_val = np.zeros(self.n_f)
self.g_val = np.zeros(self.n_g)
if self.n_f>1:
for i, ff in enumerate(self.f):
self.f_val[i] = ff[0]()
elif self.n_f==0:
self.f_val = None
else:
self.f_val = np.array([self.f[0]()])
if self.n_g>1:
for i, gg in enumerate(self.g):
self.g_val[i] = gg[0]()
elif self.n_g==0:
self.f_val = None
else:
self.g_val = np.array([self.g[0]()])
if self.verbose:
print(f"f = {self.f_val}, g = {self.g_val} at x = {x}")
return np.append(self.f_val, self.g_val)
def fpen_barrier(self,x,pen=1000):
"""Calculates a penalized objective function using barrier method and considering ``f`` and ``g``.
Args:
x (numpy.array): Array of degrees of freedom values to be simulated.
pen (float, optional): Penalization parameter. Defaults to 1000.
Returns:
float: Penalized objective function.
"""
self.calculate_optProblem(x)
fpen = 0
for i in range(self.n_f):
fpen += np.asarray(self.f_val)[i]
for i in range(self.n_g):
fpen += pen*max(0, self.g_val[i])
return fpen
def fpen_quad(self, x, pen=1000):
"""Calculates a penalized objective function using quadratic penalization method and considering ``f`` and ``g``.
Args:
x (numpy.array): Array of degrees of freedom values to be simulated.
pen (float, optional): Penalization parameter. Defaults to 1000.
Returns:
float: Penalized objective function.
"""
self.calculate_optProblem(x)
fpen = 0
for i in range(self.n_f):
fpen += self.f_val[i]
for i in range(self.n_g):
fpen += pen*max(0, self.g_val[i])**2
return fpen
def fpen_exp(self, x, pen=1000):
"""Calculates a penalized objective function using exponential penalization method and considering ``f`` and ``g``.
Args:
x (numpy.array): Array of degrees of freedom values to be simulated.
pen (float, optional): Penalization parameter. Defaults to 1000.
Returns:
float: Penalized objective function.
"""
self.calculate_optProblem(x)
fpen = 0
for i in range(self.n_f):
fpen += self.f_val[i]
for i in range(self.n_g):
fpen += pen*exp(max(0, self.g_val[i]))
return fpen
|
[
"numpy.asarray",
"numpy.zeros",
"time.sleep",
"pythoncom.CoInitialize",
"numpy.append",
"clr.AddReference",
"numpy.array",
"numpy.linalg.norm"
] |
[((1505, 1531), 'numpy.array', 'np.array', (['[]'], {'dtype': 'object'}), '([], dtype=object)\n', (1513, 1531), True, 'import numpy as np\n'), ((1882, 1894), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1890, 1894), True, 'import numpy as np\n'), ((1916, 1928), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1924, 1928), True, 'import numpy as np\n'), ((1950, 1962), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1958, 1962), True, 'import numpy as np\n'), ((1980, 2006), 'numpy.array', 'np.array', (['[]'], {'dtype': 'object'}), '([], dtype=object)\n', (1988, 2006), True, 'import numpy as np\n'), ((2055, 2081), 'numpy.array', 'np.array', (['[]'], {'dtype': 'object'}), '([], dtype=object)\n', (2063, 2081), True, 'import numpy as np\n'), ((2388, 2412), 'pythoncom.CoInitialize', 'pythoncom.CoInitialize', ([], {}), '()\n', (2410, 2412), False, 'import pythoncom\n'), ((2596, 2646), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'CapeOpen.dll')"], {}), "(self.path2dwsim + 'CapeOpen.dll')\n", (2612, 2646), False, 'import clr\n'), ((2655, 2713), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'DWSIM.Automation.dll')"], {}), "(self.path2dwsim + 'DWSIM.Automation.dll')\n", (2671, 2713), False, 'import clr\n'), ((2722, 2780), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'DWSIM.Interfaces.dll')"], {}), "(self.path2dwsim + 'DWSIM.Interfaces.dll')\n", (2738, 2780), False, 'import clr\n'), ((2789, 2851), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'DWSIM.GlobalSettings.dll')"], {}), "(self.path2dwsim + 'DWSIM.GlobalSettings.dll')\n", (2805, 2851), False, 'import clr\n'), ((2860, 2921), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'DWSIM.SharedClasses.dll')"], {}), "(self.path2dwsim + 'DWSIM.SharedClasses.dll')\n", (2876, 2921), False, 'import clr\n'), ((2930, 2992), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'DWSIM.Thermodynamics.dll')"], {}), "(self.path2dwsim + 'DWSIM.Thermodynamics.dll')\n", (2946, 2992), False, 'import clr\n'), ((3001, 3063), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'DWSIM.UnitOperations.dll')"], {}), "(self.path2dwsim + 'DWSIM.UnitOperations.dll')\n", (3017, 3063), False, 'import clr\n'), ((3072, 3128), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'System.Buffers.dll')"], {}), "(self.path2dwsim + 'System.Buffers.dll')\n", (3088, 3128), False, 'import clr\n'), ((3328, 3385), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'DWSIM.Inspector.dll')"], {}), "(self.path2dwsim + 'DWSIM.Inspector.dll')\n", (3344, 3385), False, 'import clr\n'), ((3394, 3449), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'DWSIM.MathOps.dll')"], {}), "(self.path2dwsim + 'DWSIM.MathOps.dll')\n", (3410, 3449), False, 'import clr\n'), ((3458, 3507), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'TcpComm.dll')"], {}), "(self.path2dwsim + 'TcpComm.dll')\n", (3474, 3507), False, 'import clr\n'), ((3516, 3578), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'Microsoft.ServiceBus.dll')"], {}), "(self.path2dwsim + 'Microsoft.ServiceBus.dll')\n", (3532, 3578), False, 'import clr\n'), ((3587, 3643), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'System.Buffers.dll')"], {}), "(self.path2dwsim + 'System.Buffers.dll')\n", (3603, 3643), False, 'import clr\n'), ((3652, 3703), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'SkiaSharp.dll')"], {}), "(self.path2dwsim + 'SkiaSharp.dll')\n", (3668, 3703), False, 'import clr\n'), ((3712, 3757), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'OxyPlot')"], {}), "(self.path2dwsim + 'OxyPlot')\n", (3728, 3757), False, 'import clr\n'), ((7121, 7136), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (7131, 7136), False, 'import time\n'), ((7241, 7256), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (7251, 7256), False, 'import time\n'), ((9868, 9901), 'numpy.append', 'np.append', (['self.f_val', 'self.g_val'], {}), '(self.f_val, self.g_val)\n', (9877, 9901), True, 'import numpy as np\n'), ((3154, 3211), 'clr.AddReference', 'clr.AddReference', (["(self.path2dwsim + 'System.Buffers2.dll')"], {}), "(self.path2dwsim + 'System.Buffers2.dll')\n", (3170, 3211), False, 'import clr\n'), ((7549, 7564), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (7559, 7564), False, 'import time\n'), ((8308, 8324), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (8318, 8324), False, 'import time\n'), ((9151, 9162), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (9159, 9162), True, 'import numpy as np\n'), ((9188, 9206), 'numpy.zeros', 'np.zeros', (['self.n_f'], {}), '(self.n_f)\n', (9196, 9206), True, 'import numpy as np\n'), ((9232, 9250), 'numpy.zeros', 'np.zeros', (['self.n_g'], {}), '(self.n_g)\n', (9240, 9250), True, 'import numpy as np\n'), ((5022, 5053), 'numpy.append', 'np.append', (['dof_new', 'description'], {}), '(dof_new, description)\n', (5031, 5053), True, 'import numpy as np\n'), ((5626, 5654), 'numpy.append', 'np.append', (['func', 'description'], {}), '(func, description)\n', (5635, 5654), True, 'import numpy as np\n'), ((6225, 6255), 'numpy.append', 'np.append', (['g_func', 'description'], {}), '(g_func, description)\n', (6234, 6255), True, 'import numpy as np\n'), ((7755, 7788), 'numpy.linalg.norm', 'np.linalg.norm', (['(res_new - res_old)'], {}), '(res_new - res_old)\n', (7769, 7788), True, 'import numpy as np\n'), ((10409, 10431), 'numpy.asarray', 'np.asarray', (['self.f_val'], {}), '(self.f_val)\n', (10419, 10431), True, 'import numpy as np\n'), ((9003, 9016), 'numpy.asarray', 'np.asarray', (['x'], {}), '(x)\n', (9013, 9016), True, 'import numpy as np\n'), ((5121, 5152), 'numpy.append', 'np.append', (['dof_new', 'description'], {}), '(dof_new, description)\n', (5130, 5152), True, 'import numpy as np\n'), ((5718, 5746), 'numpy.append', 'np.append', (['func', 'description'], {}), '(func, description)\n', (5727, 5746), True, 'import numpy as np\n'), ((6319, 6349), 'numpy.append', 'np.append', (['g_func', 'description'], {}), '(g_func, description)\n', (6328, 6349), True, 'import numpy as np\n')]
|
import logging
from unittest import TestCase
from lymph.utils.logging import get_loglevel
class LoggingUtilsTests(TestCase):
def test_get_loglevel(self):
self.assertEqual(get_loglevel('DEBUG'), logging.DEBUG)
self.assertEqual(get_loglevel('debug'), logging.DEBUG)
self.assertEqual(get_loglevel('Debug'), logging.DEBUG)
self.assertEqual(get_loglevel('INFO'), logging.INFO)
self.assertEqual(get_loglevel('info'), logging.INFO)
self.assertEqual(get_loglevel('ERROR'), logging.ERROR)
self.assertEqual(get_loglevel('error'), logging.ERROR)
self.assertEqual(get_loglevel('CRITICAL'), logging.CRITICAL)
self.assertEqual(get_loglevel('critical'), logging.CRITICAL)
self.assertRaises(ValueError, get_loglevel, 'FOO')
self.assertRaises(ValueError, get_loglevel, '*')
|
[
"lymph.utils.logging.get_loglevel"
] |
[((187, 208), 'lymph.utils.logging.get_loglevel', 'get_loglevel', (['"""DEBUG"""'], {}), "('DEBUG')\n", (199, 208), False, 'from lymph.utils.logging import get_loglevel\n'), ((250, 271), 'lymph.utils.logging.get_loglevel', 'get_loglevel', (['"""debug"""'], {}), "('debug')\n", (262, 271), False, 'from lymph.utils.logging import get_loglevel\n'), ((313, 334), 'lymph.utils.logging.get_loglevel', 'get_loglevel', (['"""Debug"""'], {}), "('Debug')\n", (325, 334), False, 'from lymph.utils.logging import get_loglevel\n'), ((376, 396), 'lymph.utils.logging.get_loglevel', 'get_loglevel', (['"""INFO"""'], {}), "('INFO')\n", (388, 396), False, 'from lymph.utils.logging import get_loglevel\n'), ((437, 457), 'lymph.utils.logging.get_loglevel', 'get_loglevel', (['"""info"""'], {}), "('info')\n", (449, 457), False, 'from lymph.utils.logging import get_loglevel\n'), ((498, 519), 'lymph.utils.logging.get_loglevel', 'get_loglevel', (['"""ERROR"""'], {}), "('ERROR')\n", (510, 519), False, 'from lymph.utils.logging import get_loglevel\n'), ((561, 582), 'lymph.utils.logging.get_loglevel', 'get_loglevel', (['"""error"""'], {}), "('error')\n", (573, 582), False, 'from lymph.utils.logging import get_loglevel\n'), ((624, 648), 'lymph.utils.logging.get_loglevel', 'get_loglevel', (['"""CRITICAL"""'], {}), "('CRITICAL')\n", (636, 648), False, 'from lymph.utils.logging import get_loglevel\n'), ((693, 717), 'lymph.utils.logging.get_loglevel', 'get_loglevel', (['"""critical"""'], {}), "('critical')\n", (705, 717), False, 'from lymph.utils.logging import get_loglevel\n')]
|
import pybullet as p
import pybullet_data
from time import sleep, time
# import control
# import slycot
p.connect(p.GUI)
p.setAdditionalSearchPath(pybullet_data.getDataPath())
p.loadURDF("plane.urdf")
botpos=[0,0,0.08]
botori = p.getQuaternionFromEuler([0, 0, 0])
bot = p.loadURDF("urdf/Paucibot.urdf", *botpos, *botori)
# bot = p.loadURDF("urdf/Paucibot.urdf",*botpos)
p.setGravity(0,0,-10)
numJoints = p.getNumJoints(bot)
for joint in range(numJoints):
print(p.getJointInfo(bot,joint))
wheels = [ 2, 5 ]
targetVel = 15
maxForce = 6
kp, kd, ki = 255,26,34
init = time()
target_pos = 0.0
prev_error = 0
inti_term = 0
encoder_pos = [0,0]
while(1):
orie = p.getBasePositionAndOrientation(bot)[1]
euler = p.getEulerFromQuaternion(orie)
pitch = euler[1]
dt = time()-init
error = (pitch-target_pos)
#k = control.lqr(A,B,Q,R)
if abs(error)<0.01:
inti_term += error*dt
else:
inti_term = 0
feedback = kp*error + kd*(error - prev_error)/dt + ki*inti_term
prev_error = error
print("error: ", error)
feedback/=500
#feedback = - k * error
print("feedback: ",feedback)
encoder_pos[0] -= feedback
encoder_pos[1] -= feedback
p.setJointMotorControl2(bot, wheels[0], p.VELOCITY_CONTROL, targetVelocity=encoder_pos[0], force=maxForce)# targetVelocity=-max(min(15,feedback),-15),)
p.setJointMotorControl2(bot, wheels[1], p.VELOCITY_CONTROL, targetVelocity=encoder_pos[1], force=maxForce)# targetVelocity=-max(min(15,feedback),-15), )
#print(list(p.getJointState(bot, wheel) for wheel in wheels))
p.stepSimulation()
sleep(0.05)
init = time()
|
[
"pybullet.getQuaternionFromEuler",
"pybullet.stepSimulation",
"pybullet.setGravity",
"pybullet.getBasePositionAndOrientation",
"pybullet.getJointInfo",
"pybullet.getNumJoints",
"time.time",
"pybullet.setJointMotorControl2",
"time.sleep",
"pybullet_data.getDataPath",
"pybullet.connect",
"pybullet.loadURDF",
"pybullet.getEulerFromQuaternion"
] |
[((105, 121), 'pybullet.connect', 'p.connect', (['p.GUI'], {}), '(p.GUI)\n', (114, 121), True, 'import pybullet as p\n'), ((177, 201), 'pybullet.loadURDF', 'p.loadURDF', (['"""plane.urdf"""'], {}), "('plane.urdf')\n", (187, 201), True, 'import pybullet as p\n'), ((229, 264), 'pybullet.getQuaternionFromEuler', 'p.getQuaternionFromEuler', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (253, 264), True, 'import pybullet as p\n'), ((272, 322), 'pybullet.loadURDF', 'p.loadURDF', (['"""urdf/Paucibot.urdf"""', '*botpos', '*botori'], {}), "('urdf/Paucibot.urdf', *botpos, *botori)\n", (282, 322), True, 'import pybullet as p\n'), ((372, 395), 'pybullet.setGravity', 'p.setGravity', (['(0)', '(0)', '(-10)'], {}), '(0, 0, -10)\n', (384, 395), True, 'import pybullet as p\n'), ((406, 425), 'pybullet.getNumJoints', 'p.getNumJoints', (['bot'], {}), '(bot)\n', (420, 425), True, 'import pybullet as p\n'), ((567, 573), 'time.time', 'time', ([], {}), '()\n', (571, 573), False, 'from time import sleep, time\n'), ((148, 175), 'pybullet_data.getDataPath', 'pybullet_data.getDataPath', ([], {}), '()\n', (173, 175), False, 'import pybullet_data\n'), ((713, 743), 'pybullet.getEulerFromQuaternion', 'p.getEulerFromQuaternion', (['orie'], {}), '(orie)\n', (737, 743), True, 'import pybullet as p\n'), ((1197, 1308), 'pybullet.setJointMotorControl2', 'p.setJointMotorControl2', (['bot', 'wheels[0]', 'p.VELOCITY_CONTROL'], {'targetVelocity': 'encoder_pos[0]', 'force': 'maxForce'}), '(bot, wheels[0], p.VELOCITY_CONTROL, targetVelocity=\n encoder_pos[0], force=maxForce)\n', (1220, 1308), True, 'import pybullet as p\n'), ((1353, 1464), 'pybullet.setJointMotorControl2', 'p.setJointMotorControl2', (['bot', 'wheels[1]', 'p.VELOCITY_CONTROL'], {'targetVelocity': 'encoder_pos[1]', 'force': 'maxForce'}), '(bot, wheels[1], p.VELOCITY_CONTROL, targetVelocity=\n encoder_pos[1], force=maxForce)\n', (1376, 1464), True, 'import pybullet as p\n'), ((1573, 1591), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (1589, 1591), True, 'import pybullet as p\n'), ((1596, 1607), 'time.sleep', 'sleep', (['(0.05)'], {}), '(0.05)\n', (1601, 1607), False, 'from time import sleep, time\n'), ((1619, 1625), 'time.time', 'time', ([], {}), '()\n', (1623, 1625), False, 'from time import sleep, time\n'), ((464, 490), 'pybullet.getJointInfo', 'p.getJointInfo', (['bot', 'joint'], {}), '(bot, joint)\n', (478, 490), True, 'import pybullet as p\n'), ((661, 697), 'pybullet.getBasePositionAndOrientation', 'p.getBasePositionAndOrientation', (['bot'], {}), '(bot)\n', (692, 697), True, 'import pybullet as p\n'), ((774, 780), 'time.time', 'time', ([], {}), '()\n', (778, 780), False, 'from time import sleep, time\n')]
|
"""Run the chain syncer, storing data in the given db dir.
Run with `python -m scripts.chainsync -db <path>`.
"""
import asyncio
import logging
from typing import (
cast,
Type,
Union,
)
from eth.exceptions import HeaderNotFound
from trinity.db.eth1.chain import (
AsyncChainDB,
AsyncHeaderDB,
)
from trinity.protocol.eth.peer import ETHPeerPool
from trinity.protocol.les.peer import LESPeerPool
from trinity.sync.common.chain import BaseHeaderChainSyncer
from trinity.sync.full.chain import RegularChainSyncer
from trinity.sync.light.chain import LightChainSyncer
def _test() -> None:
import argparse
from pathlib import Path
import signal
from p2p import ecies
from p2p.kademlia import Node
from eth.chains.ropsten import RopstenChain, ROPSTEN_GENESIS_HEADER, ROPSTEN_VM_CONFIGURATION
from eth.chains.mainnet import MainnetChain, MAINNET_GENESIS_HEADER, MAINNET_VM_CONFIGURATION
from eth.db.backends.level import LevelDB
from tests.core.integration_test_helpers import (
AsyncMainnetChain, AsyncRopstenChain,
connect_to_peers_loop)
from trinity.constants import DEFAULT_PREFERRED_NODES
from trinity.protocol.common.context import ChainContext
from trinity._utils.chains import load_nodekey
parser = argparse.ArgumentParser()
parser.add_argument('-db', type=str, required=True)
parser.add_argument('-light', action="store_true")
parser.add_argument('-nodekey', type=str)
parser.add_argument('-enode', type=str, required=False, help="The enode we should connect to")
parser.add_argument('-debug', action="store_true")
args = parser.parse_args()
logging.basicConfig(
level=logging.INFO, format='%(asctime)s %(levelname)s: %(message)s', datefmt='%H:%M:%S')
log_level = logging.INFO
if args.debug:
log_level = logging.DEBUG
loop = asyncio.get_event_loop()
base_db = LevelDB(args.db)
headerdb = AsyncHeaderDB(base_db)
chaindb = AsyncChainDB(base_db)
try:
genesis = chaindb.get_canonical_block_header_by_number(0)
except HeaderNotFound:
genesis = ROPSTEN_GENESIS_HEADER
chaindb.persist_header(genesis)
peer_pool_class: Type[Union[ETHPeerPool, LESPeerPool]] = ETHPeerPool
if args.light:
peer_pool_class = LESPeerPool
if genesis.hash == ROPSTEN_GENESIS_HEADER.hash:
network_id = RopstenChain.network_id
vm_config = ROPSTEN_VM_CONFIGURATION # type: ignore
chain_class = AsyncRopstenChain
elif genesis.hash == MAINNET_GENESIS_HEADER.hash:
network_id = MainnetChain.network_id
vm_config = MAINNET_VM_CONFIGURATION # type: ignore
chain_class = AsyncMainnetChain
else:
raise RuntimeError("Unknown genesis: %s", genesis)
if args.nodekey:
privkey = load_nodekey(Path(args.nodekey))
else:
privkey = ecies.generate_privkey()
context = ChainContext(
headerdb=headerdb,
network_id=network_id,
vm_configuration=vm_config,
)
peer_pool = peer_pool_class(privkey=privkey, context=context)
if args.enode:
nodes = tuple([Node.from_uri(args.enode)])
else:
nodes = DEFAULT_PREFERRED_NODES[network_id]
asyncio.ensure_future(peer_pool.run())
peer_pool.run_task(connect_to_peers_loop(peer_pool, nodes))
chain = chain_class(base_db)
syncer: BaseHeaderChainSyncer = None
if args.light:
syncer = LightChainSyncer(chain, headerdb, cast(LESPeerPool, peer_pool))
else:
syncer = RegularChainSyncer(chain, chaindb, cast(ETHPeerPool, peer_pool))
syncer.logger.setLevel(log_level)
syncer.min_peers_to_sync = 1
sigint_received = asyncio.Event()
for sig in [signal.SIGINT, signal.SIGTERM]:
loop.add_signal_handler(sig, sigint_received.set)
async def exit_on_sigint() -> None:
await sigint_received.wait()
await peer_pool.cancel()
await syncer.cancel()
loop.stop()
async def run() -> None:
await syncer.run()
syncer.logger.info("run() finished, exiting")
sigint_received.set()
# loop.set_debug(True)
asyncio.ensure_future(exit_on_sigint())
asyncio.ensure_future(run())
loop.run_forever()
loop.close()
def _run_test(profile: bool) -> None:
import cProfile, pstats # noqa
if profile:
cProfile.run('_test()', 'stats')
pstats.Stats('stats').strip_dirs().sort_stats('cumulative').print_stats(50)
else:
_test()
if __name__ == "__main__":
_run_test(profile=False)
|
[
"tests.core.integration_test_helpers.connect_to_peers_loop",
"asyncio.get_event_loop",
"argparse.ArgumentParser",
"logging.basicConfig",
"trinity.db.eth1.chain.AsyncChainDB",
"trinity.protocol.common.context.ChainContext",
"p2p.ecies.generate_privkey",
"asyncio.Event",
"typing.cast",
"p2p.kademlia.Node.from_uri",
"pstats.Stats",
"pathlib.Path",
"trinity.db.eth1.chain.AsyncHeaderDB",
"eth.db.backends.level.LevelDB",
"cProfile.run"
] |
[((1295, 1320), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1318, 1320), False, 'import argparse\n'), ((1668, 1781), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s %(levelname)s: %(message)s"""', 'datefmt': '"""%H:%M:%S"""'}), "(level=logging.INFO, format=\n '%(asctime)s %(levelname)s: %(message)s', datefmt='%H:%M:%S')\n", (1687, 1781), False, 'import logging\n'), ((1880, 1904), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (1902, 1904), False, 'import asyncio\n'), ((1920, 1936), 'eth.db.backends.level.LevelDB', 'LevelDB', (['args.db'], {}), '(args.db)\n', (1927, 1936), False, 'from eth.db.backends.level import LevelDB\n'), ((1952, 1974), 'trinity.db.eth1.chain.AsyncHeaderDB', 'AsyncHeaderDB', (['base_db'], {}), '(base_db)\n', (1965, 1974), False, 'from trinity.db.eth1.chain import AsyncChainDB, AsyncHeaderDB\n'), ((1989, 2010), 'trinity.db.eth1.chain.AsyncChainDB', 'AsyncChainDB', (['base_db'], {}), '(base_db)\n', (2001, 2010), False, 'from trinity.db.eth1.chain import AsyncChainDB, AsyncHeaderDB\n'), ((2934, 3021), 'trinity.protocol.common.context.ChainContext', 'ChainContext', ([], {'headerdb': 'headerdb', 'network_id': 'network_id', 'vm_configuration': 'vm_config'}), '(headerdb=headerdb, network_id=network_id, vm_configuration=\n vm_config)\n', (2946, 3021), False, 'from trinity.protocol.common.context import ChainContext\n'), ((3716, 3731), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (3729, 3731), False, 'import asyncio\n'), ((2894, 2918), 'p2p.ecies.generate_privkey', 'ecies.generate_privkey', ([], {}), '()\n', (2916, 2918), False, 'from p2p import ecies\n'), ((3315, 3354), 'tests.core.integration_test_helpers.connect_to_peers_loop', 'connect_to_peers_loop', (['peer_pool', 'nodes'], {}), '(peer_pool, nodes)\n', (3336, 3354), False, 'from tests.core.integration_test_helpers import AsyncMainnetChain, AsyncRopstenChain, connect_to_peers_loop\n'), ((4386, 4418), 'cProfile.run', 'cProfile.run', (['"""_test()"""', '"""stats"""'], {}), "('_test()', 'stats')\n", (4398, 4418), False, 'import cProfile, pstats\n'), ((2846, 2864), 'pathlib.Path', 'Path', (['args.nodekey'], {}), '(args.nodekey)\n', (2850, 2864), False, 'from pathlib import Path\n'), ((3500, 3528), 'typing.cast', 'cast', (['LESPeerPool', 'peer_pool'], {}), '(LESPeerPool, peer_pool)\n', (3504, 3528), False, 'from typing import cast, Type, Union\n'), ((3592, 3620), 'typing.cast', 'cast', (['ETHPeerPool', 'peer_pool'], {}), '(ETHPeerPool, peer_pool)\n', (3596, 3620), False, 'from typing import cast, Type, Union\n'), ((3158, 3183), 'p2p.kademlia.Node.from_uri', 'Node.from_uri', (['args.enode'], {}), '(args.enode)\n', (3171, 3183), False, 'from p2p.kademlia import Node\n'), ((4427, 4448), 'pstats.Stats', 'pstats.Stats', (['"""stats"""'], {}), "('stats')\n", (4439, 4448), False, 'import cProfile, pstats\n')]
|
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Still in experimental stage!
from optparse import OptionParser
import os
import sys
import copy
import numpy as np
import pandas as pd
import scipy as sp
COLUMNS = ["age", "workclass", "fnlwgt", "education", "education_num",
"marital_status", "occupation", "relationship", "race", "gender",
"capital_gain", "capital_loss", "hours_per_week", "native_country",
"income_bracket"]
AGE, WORKCLASS, FNLWGT, EDUCATION, EDUCATION_NUM, MARITAL_STATUS, OCCPATION, \
RELATIONSHIP, RACE, GENDER, CAPITAL_GAIN, CAPITAL_LOSS, HOURS_PER_WEEK, NATIVE_COUNTRY, \
AGE_BUCKETS, LABEL, EDUCATION_OCCUPATION, NATIVECOUNTRY_OCCUPATION, AGEBUCKET_EDUCATION_OCCUPATION = range(19)
LABEL_COLUMN = "label"
CATEGORICAL_COLUMNS = ["workclass", "education", "marital_status", "occupation",
"relationship", "race", "gender", "native_country"]
CONTINUOUS_COLUMNS = ["age", "education_num", "capital_gain", "capital_loss",
"hours_per_week"]
def get_data(train_file_name='train.data', test_file_name='test.data'):
df_train = pd.read_csv(train_file_name,
names=COLUMNS,
skipinitialspace=True,
engine="python")
df_test = pd.read_csv(test_file_name,
names=COLUMNS,
skipinitialspace=True,
skiprows=1, # skip first line: "|1x3 Cross Validator"
engine="python")
df_train = df_train.dropna(how='any', axis=0)
df_test = df_test.dropna(how='any', axis=0)
df_train[LABEL_COLUMN] = (
df_train["income_bracket"].apply(lambda x: ">50K" in x)).astype(int)
df_test[LABEL_COLUMN] = (
df_test["income_bracket"].apply(lambda x: ">50K" in x)).astype(int)
return df_train, df_test
def binary_search(val, array, start=0):
"""
binary search implementation
:param val: value to search
:param array: data array to be searched
:param start: 0 if array starts with 0 else 1
:return: location of val in array, or bucket fall in if not in array
"""
low = start
high = len(array) - 1 + start
while low <= high:
mid = (low + high) / 2
if array[mid] == val:
return mid
elif array[mid] > val:
high = mid-1
else:
low = mid+1
return low
def bucketized_column(column, boundaries):
"""
transform every value of a column to corresponding bucket according to boundaries
:param column: primitive column
:param boundaries: boundaries to bucketize
:return: bucketized column
"""
_column = copy.deepcopy(column)
for i in range(len(_column)):
_column[i] = binary_search(_column[i], boundaries)
return _column
def discretize_for_lookupTable(df, data_type, lookup_dict, columns, start=0):
"""
discretize for BigDL's lookupTable's requirement: elements of input should be little than or equal to $nIndex + 1
:param df: data tensor. Type must be numpy.ndarray
:param columns: columns to do discretize
:param start: index that starts from
:return: discretized data tensor
"""
if data_type == 'train':
for col in columns:
total = sorted({}.fromkeys(df[:, col]).keys())
total_dict = {k: i+start
for i, k in enumerate(total)}
for _ in range(len(df[:, col])):
if df[_, col] not in total_dict.keys():
df[_, col] = 1
else:
df[_, col] = total_dict[df[_, col]]
lookup_dict[col] = total_dict
elif data_type == 'test':
for col in columns:
total_dict = lookup_dict[col]
for _ in range(len(df[:, col])):
if df[_, col] not in total_dict.keys():
df[_, col] = 1
else:
df[_, col] = total_dict[df[_, col]]
else:
raise ValueError("Not valid data type")
return df, lookup_dict
def cross_column(columns, hash_backet_size=1e4, scale=0.0):
"""
generate cross column feature from `columns` with hash bucket.
:param columns: columns to use to generate cross column, Type must be ndarray
:param hash_backet_size: hash bucket size to bucketize cross columns to fixed hash bucket
:return: cross column, represented as a ndarray
"""
assert columns.shape[0] > 0 and columns.shape[1] > 0
_crossed_column = np.zeros((columns.shape[0], 1))
for i in range(columns.shape[0]):
_crossed_column[i, 0] = (hash("_".join(map(str, columns[i, :]))) % hash_backet_size
+ hash_backet_size) % hash_backet_size
if scale > 0.0:
_crossed_column[i, 0] *= scale
return _crossed_column
def feature_columns(df, data_type, lookup_dict):
gender_dict = {"Male": 1, "Female": 2}
age_boundaries = [18, 25, 30, 35, 40, 45, 50, 55, 60, 65]
age_bucket = bucketized_column(df[:, AGE], boundaries=age_boundaries)
df[:, AGE_BUCKETS] = age_bucket
assert WORKCLASS == 1 and EDUCATION == 3 and CAPITAL_LOSS == 11 and NATIVE_COUNTRY == 13
education_occupation = cross_column(df[:, [EDUCATION, OCCPATION]], hash_backet_size=int(1e4))
nativecountry_occupation = cross_column(df[:, [NATIVE_COUNTRY, OCCPATION]], hash_backet_size=int(1e4))
agebucket_education_occpation = cross_column(df[:, [AGE_BUCKETS, EDUCATION, OCCPATION]], hash_backet_size=int(1e6))
for i in range(df.shape[0]):
df[i, WORKCLASS] = (hash(df[i, 1]) % 100 + 100) % 100 # workclass
df[i, EDUCATION] = (hash(df[i, 3]) % 1000 + 1000) % 1000 # education
df[i, RELATIONSHIP] = (hash(df[i, 7]) % 100 + 100) % 100 # relationship
df[i, OCCPATION] = (hash(df[i, 6]) % 1000 + 1000) % 1000 # occupation
df[i, NATIVE_COUNTRY] = (hash(df[i, 13]) % 1000 + 1000) % 1000 # native_country
df[i, GENDER] = gender_dict[df[i, 9]] \
if (df[i, 9] in gender_dict.keys()) else -1 # gender
df[i, AGE] = df[i, 0] # age
df[i, EDUCATION_NUM] = df[i, 4] # education_num
df[i, CAPITAL_GAIN] = df[i, 10] # capital_gain
df[i, CAPITAL_LOSS] = df[i, 11] # capital_loss
df[i, HOURS_PER_WEEK] = df[i, 12] # hours_per_week
df, lookup_dict = discretize_for_lookupTable(df, data_type, lookup_dict,
columns=[WORKCLASS, EDUCATION, RELATIONSHIP, OCCPATION, NATIVE_COUNTRY, GENDER], start=1)
df = np.c_[df, education_occupation, nativecountry_occupation, agebucket_education_occpation]
return df, lookup_dict
def make_wide_deep_columns(df):
wide_columns = np.array(df[:, GENDER])
wide_columns = np.c_[wide_columns, df[:, NATIVE_COUNTRY]]
wide_columns = np.c_[wide_columns, df[:, EDUCATION], df[:, OCCPATION]]
wide_columns = np.c_[wide_columns, df[:, WORKCLASS], df[:, RELATIONSHIP]]
wide_columns = np.c_[wide_columns, df[:, AGE_BUCKETS], df[:, EDUCATION_OCCUPATION]]
wide_columns = np.c_[wide_columns, df[:, NATIVECOUNTRY_OCCUPATION], df[:, AGEBUCKET_EDUCATION_OCCUPATION]]
deep_columns = np.array(df[:, WORKCLASS])
deep_columns = np.c_[deep_columns, df[:, EDUCATION], df[:, GENDER]]
deep_columns = np.c_[deep_columns, df[:, RELATIONSHIP], df[:, NATIVE_COUNTRY]]
deep_columns = np.c_[deep_columns, df[:, OCCPATION]]
deep_columns = np.c_[deep_columns, df[:, AGE], df[:, EDUCATION_NUM], df[:, CAPITAL_GAIN]]
deep_columns = np.c_[deep_columns, df[:, CAPITAL_LOSS], df[:, HOURS_PER_WEEK]]
wide_deep_columns = np.c_[wide_columns, deep_columns]
return wide_deep_columns, np.array(df[:, LABEL])
def handle():
df_train, df_test = get_data()
df_train = np.array(df_train)
df_test = np.array(df_test)
df_train, lookup_dict = feature_columns(df_train, 'train', {})
df_test, _ = feature_columns(df_test, 'test', lookup_dict)
train_data, train_label = make_wide_deep_columns(df_train)
test_data, test_label = make_wide_deep_columns(df_test)
np.savetxt("train_tensor.data", train_data, fmt="%d", delimiter=',')
np.savetxt("train_label.data", train_label, fmt="%d")
np.savetxt("test_tensor.data", test_data, fmt="%d", delimiter=',')
np.savetxt("test_label.data", test_label, fmt="%d")
handle()
|
[
"copy.deepcopy",
"pandas.read_csv",
"numpy.savetxt",
"numpy.zeros",
"numpy.array"
] |
[((1669, 1757), 'pandas.read_csv', 'pd.read_csv', (['train_file_name'], {'names': 'COLUMNS', 'skipinitialspace': '(True)', 'engine': '"""python"""'}), "(train_file_name, names=COLUMNS, skipinitialspace=True, engine=\n 'python')\n", (1680, 1757), True, 'import pandas as pd\n'), ((1849, 1948), 'pandas.read_csv', 'pd.read_csv', (['test_file_name'], {'names': 'COLUMNS', 'skipinitialspace': '(True)', 'skiprows': '(1)', 'engine': '"""python"""'}), "(test_file_name, names=COLUMNS, skipinitialspace=True, skiprows=\n 1, engine='python')\n", (1860, 1948), True, 'import pandas as pd\n'), ((3268, 3289), 'copy.deepcopy', 'copy.deepcopy', (['column'], {}), '(column)\n', (3281, 3289), False, 'import copy\n'), ((5112, 5143), 'numpy.zeros', 'np.zeros', (['(columns.shape[0], 1)'], {}), '((columns.shape[0], 1))\n', (5120, 5143), True, 'import numpy as np\n'), ((7328, 7351), 'numpy.array', 'np.array', (['df[:, GENDER]'], {}), '(df[:, GENDER])\n', (7336, 7351), True, 'import numpy as np\n'), ((7786, 7812), 'numpy.array', 'np.array', (['df[:, WORKCLASS]'], {}), '(df[:, WORKCLASS])\n', (7794, 7812), True, 'import numpy as np\n'), ((8381, 8399), 'numpy.array', 'np.array', (['df_train'], {}), '(df_train)\n', (8389, 8399), True, 'import numpy as np\n'), ((8414, 8431), 'numpy.array', 'np.array', (['df_test'], {}), '(df_test)\n', (8422, 8431), True, 'import numpy as np\n'), ((8689, 8757), 'numpy.savetxt', 'np.savetxt', (['"""train_tensor.data"""', 'train_data'], {'fmt': '"""%d"""', 'delimiter': '""","""'}), "('train_tensor.data', train_data, fmt='%d', delimiter=',')\n", (8699, 8757), True, 'import numpy as np\n'), ((8762, 8815), 'numpy.savetxt', 'np.savetxt', (['"""train_label.data"""', 'train_label'], {'fmt': '"""%d"""'}), "('train_label.data', train_label, fmt='%d')\n", (8772, 8815), True, 'import numpy as np\n'), ((8820, 8886), 'numpy.savetxt', 'np.savetxt', (['"""test_tensor.data"""', 'test_data'], {'fmt': '"""%d"""', 'delimiter': '""","""'}), "('test_tensor.data', test_data, fmt='%d', delimiter=',')\n", (8830, 8886), True, 'import numpy as np\n'), ((8891, 8942), 'numpy.savetxt', 'np.savetxt', (['"""test_label.data"""', 'test_label'], {'fmt': '"""%d"""'}), "('test_label.data', test_label, fmt='%d')\n", (8901, 8942), True, 'import numpy as np\n'), ((8292, 8314), 'numpy.array', 'np.array', (['df[:, LABEL]'], {}), '(df[:, LABEL])\n', (8300, 8314), True, 'import numpy as np\n')]
|
import tensorflow as tf
import tensorflow_addons as tfa
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
from sklearn import metrics
from sklearn import preprocessing
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
import pandas as pd
import numpy as np
import argparse
import time
import grpc
from protobuf.api_pb2 import ModelParameters, ModelResults, Empty, Optimizer, ActivationFunc
from protobuf.api_pb2_grpc import APIStub
def ParseArgs() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument("-s","--server", action="store", dest="server",
type=str, default="localhost:10000",
help="Server address to connect to")
return parser.parse_args()
def CreateModel(params: ModelParameters, shape) -> Sequential:
model = Sequential()
activation_func = ActivationFunc.Name(params.activation_func).lower()
optimizers = {
Optimizer.Adam: tf.keras.optimizers.Adam,
Optimizer.SGD: tf.keras.optimizers.SGD,
Optimizer.RMSprop: tf.keras.optimizers.RMSprop
}
for i, layer in enumerate(params.layers):
if i == 0:
model.add(Dense(units=layer.num_neurons,
activation=activation_func,
input_shape=(shape[1],)))
else:
model.add(Dense(units=layer.num_neurons,
activation=activation_func))
if params.dropout:
model.add(Dropout(0.25))
model.add(Dense(1, activation="sigmoid"))
optimizer = optimizers[params.optimizer]
model.compile(optimizer=optimizer(params.learning_rate),
loss=tf.keras.losses.binary_crossentropy,
metrics=["accuracy"])
return model
if __name__ == "__main__":
args = ParseArgs()
df = pd.read_csv("https://archive.ics.uci.edu/ml/machine-learning-databases/00547/Algerian_forest_fires_dataset_UPDATE.csv",
skiprows=[0,124,125,126])
df.columns=df.columns.map(lambda x: x.strip())
df.drop(columns=["day", "year"], inplace=True)
df.dropna(inplace=True)
df["Classes"] = df["Classes"].map(lambda x: 1 if x.strip() == "fire" else 0)
X = np.array(df.drop(["Classes"], axis=1))
y = np.array(df["Classes"])
scaler = preprocessing.MinMaxScaler()
X = scaler.fit_transform(X)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
print(f"Connecting to {args.server}")
with grpc.insecure_channel(args.server) as channel:
stub = APIStub(channel)
while True:
try:
params = stub.GetModelParams(Empty())
print(params)
model = CreateModel(params, X.shape)
# Train model
model.fit(X_train, y_train,
batch_size=32,
epochs=10,
verbose=1)
y_pred = model.predict(X_test)
y_pred = (y_pred > 0.5).astype("int32")
f1_score = metrics.f1_score(y_test, y_pred)
print(f"F1: {f1_score}")
results = ModelResults()
results.model_id = params.model_id
results.recall = f1_score
print(f"Returning params")
_ = stub.ReturnModel(results)
except grpc.RpcError as rpc_error:
if rpc_error.code() == grpc.StatusCode.CANCELLED:
print("No models to evaluate now. Sleeping...")
time.sleep(0.5)
elif rpc_error.code() == grpc.StatusCode.UNAVAILABLE:
print("Server is down")
exit(0)
else:
print(rpc_error)
exit(1)
|
[
"argparse.ArgumentParser",
"tensorflow.keras.layers.Dropout",
"tensorflow.keras.layers.Dense",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"protobuf.api_pb2.ModelResults",
"sklearn.preprocessing.MinMaxScaler",
"grpc.insecure_channel",
"protobuf.api_pb2.Empty",
"time.sleep",
"sklearn.metrics.f1_score",
"numpy.array",
"tensorflow.keras.models.Sequential",
"protobuf.api_pb2_grpc.APIStub",
"protobuf.api_pb2.ActivationFunc.Name"
] |
[((588, 613), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (611, 613), False, 'import argparse\n'), ((917, 929), 'tensorflow.keras.models.Sequential', 'Sequential', ([], {}), '()\n', (927, 929), False, 'from tensorflow.keras.models import Sequential\n'), ((1942, 2100), 'pandas.read_csv', 'pd.read_csv', (['"""https://archive.ics.uci.edu/ml/machine-learning-databases/00547/Algerian_forest_fires_dataset_UPDATE.csv"""'], {'skiprows': '[0, 124, 125, 126]'}), "(\n 'https://archive.ics.uci.edu/ml/machine-learning-databases/00547/Algerian_forest_fires_dataset_UPDATE.csv'\n , skiprows=[0, 124, 125, 126])\n", (1953, 2100), True, 'import pandas as pd\n'), ((2375, 2398), 'numpy.array', 'np.array', (["df['Classes']"], {}), "(df['Classes'])\n", (2383, 2398), True, 'import numpy as np\n'), ((2413, 2441), 'sklearn.preprocessing.MinMaxScaler', 'preprocessing.MinMaxScaler', ([], {}), '()\n', (2439, 2441), False, 'from sklearn import preprocessing\n'), ((2514, 2567), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.2)', 'random_state': '(0)'}), '(X, y, test_size=0.2, random_state=0)\n', (2530, 2567), False, 'from sklearn.model_selection import train_test_split\n'), ((1624, 1654), 'tensorflow.keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (1629, 1654), False, 'from tensorflow.keras.layers import Dense, Dropout\n'), ((2620, 2654), 'grpc.insecure_channel', 'grpc.insecure_channel', (['args.server'], {}), '(args.server)\n', (2641, 2654), False, 'import grpc\n'), ((2682, 2698), 'protobuf.api_pb2_grpc.APIStub', 'APIStub', (['channel'], {}), '(channel)\n', (2689, 2698), False, 'from protobuf.api_pb2_grpc import APIStub\n'), ((953, 996), 'protobuf.api_pb2.ActivationFunc.Name', 'ActivationFunc.Name', (['params.activation_func'], {}), '(params.activation_func)\n', (972, 996), False, 'from protobuf.api_pb2 import ModelParameters, ModelResults, Empty, Optimizer, ActivationFunc\n'), ((1275, 1363), 'tensorflow.keras.layers.Dense', 'Dense', ([], {'units': 'layer.num_neurons', 'activation': 'activation_func', 'input_shape': '(shape[1],)'}), '(units=layer.num_neurons, activation=activation_func, input_shape=(\n shape[1],))\n', (1280, 1363), False, 'from tensorflow.keras.layers import Dense, Dropout\n'), ((1452, 1510), 'tensorflow.keras.layers.Dense', 'Dense', ([], {'units': 'layer.num_neurons', 'activation': 'activation_func'}), '(units=layer.num_neurons, activation=activation_func)\n', (1457, 1510), False, 'from tensorflow.keras.layers import Dense, Dropout\n'), ((1590, 1603), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.25)'], {}), '(0.25)\n', (1597, 1603), False, 'from tensorflow.keras.layers import Dense, Dropout\n'), ((3214, 3246), 'sklearn.metrics.f1_score', 'metrics.f1_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (3230, 3246), False, 'from sklearn import metrics\n'), ((3327, 3341), 'protobuf.api_pb2.ModelResults', 'ModelResults', ([], {}), '()\n', (3339, 3341), False, 'from protobuf.api_pb2 import ModelParameters, ModelResults, Empty, Optimizer, ActivationFunc\n'), ((2782, 2789), 'protobuf.api_pb2.Empty', 'Empty', ([], {}), '()\n', (2787, 2789), False, 'from protobuf.api_pb2 import ModelParameters, ModelResults, Empty, Optimizer, ActivationFunc\n'), ((3726, 3741), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (3736, 3741), False, 'import time\n')]
|
# Generated by Django 3.0.4 on 2020-04-04 04:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetable', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='account',
name='universityID',
field=models.CharField(default='xxxx', max_length=10),
),
migrations.AddField(
model_name='module',
name='moduleCode',
field=models.CharField(blank=True, max_length=9),
),
]
|
[
"django.db.models.CharField"
] |
[((332, 379), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""xxxx"""', 'max_length': '(10)'}), "(default='xxxx', max_length=10)\n", (348, 379), False, 'from django.db import migrations, models\n'), ((503, 545), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(9)'}), '(blank=True, max_length=9)\n', (519, 545), False, 'from django.db import migrations, models\n')]
|