code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# Generated by Django 3.1.12 on 2021-08-18 21:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reo', '0115_auto_20210810_1550'),
]
operations = [
migrations.AddField(
model_name='chpmodel',
name='supplementary_firing_efficiency',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='chpmodel',
name='supplementary_firing_max_steam_ratio',
field=models.FloatField(blank=True, null=True),
),
]
| [
"django.db.models.FloatField"
] | [((358, 398), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (375, 398), False, 'from django.db import migrations, models\n'), ((550, 590), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (567, 590), False, 'from django.db import migrations, models\n')] |
import factory
class ServiceCategoryFactory(factory.django.DjangoModelFactory):
class Meta:
model = "core.ServiceCategory"
name = factory.Sequence(lambda n: f"Service Category {n}")
slug = factory.Sequence(lambda n: f"service-category-{n}")
description = factory.Faker("sentence")
icon = "categories/shelter.png"
class ServiceFactory(factory.django.DjangoModelFactory):
class Meta:
model = "core.Service"
name = factory.Sequence(lambda n: f"Service {n}")
organization_name = factory.Faker("company")
description = factory.Faker("paragraph")
website = factory.Faker("url")
street_address = factory.Faker("street_address")
city = factory.Faker("city")
state = factory.Faker("state_abbr")
zip_code = factory.Faker("postcode")
latitude = factory.Faker("latitude")
longitude = factory.Faker("longitude")
operating_hours = "9am - 5pm Monday-Friday"
phone_number = factory.Faker("phone_number")
email = factory.Faker("email")
category = factory.SubFactory(ServiceCategoryFactory)
| [
"factory.Faker",
"factory.Sequence",
"factory.SubFactory"
] | [((149, 200), 'factory.Sequence', 'factory.Sequence', (["(lambda n: f'Service Category {n}')"], {}), "(lambda n: f'Service Category {n}')\n", (165, 200), False, 'import factory\n'), ((212, 263), 'factory.Sequence', 'factory.Sequence', (["(lambda n: f'service-category-{n}')"], {}), "(lambda n: f'service-category-{n}')\n", (228, 263), False, 'import factory\n'), ((282, 307), 'factory.Faker', 'factory.Faker', (['"""sentence"""'], {}), "('sentence')\n", (295, 307), False, 'import factory\n'), ((462, 504), 'factory.Sequence', 'factory.Sequence', (["(lambda n: f'Service {n}')"], {}), "(lambda n: f'Service {n}')\n", (478, 504), False, 'import factory\n'), ((529, 553), 'factory.Faker', 'factory.Faker', (['"""company"""'], {}), "('company')\n", (542, 553), False, 'import factory\n'), ((572, 598), 'factory.Faker', 'factory.Faker', (['"""paragraph"""'], {}), "('paragraph')\n", (585, 598), False, 'import factory\n'), ((613, 633), 'factory.Faker', 'factory.Faker', (['"""url"""'], {}), "('url')\n", (626, 633), False, 'import factory\n'), ((655, 686), 'factory.Faker', 'factory.Faker', (['"""street_address"""'], {}), "('street_address')\n", (668, 686), False, 'import factory\n'), ((698, 719), 'factory.Faker', 'factory.Faker', (['"""city"""'], {}), "('city')\n", (711, 719), False, 'import factory\n'), ((732, 759), 'factory.Faker', 'factory.Faker', (['"""state_abbr"""'], {}), "('state_abbr')\n", (745, 759), False, 'import factory\n'), ((775, 800), 'factory.Faker', 'factory.Faker', (['"""postcode"""'], {}), "('postcode')\n", (788, 800), False, 'import factory\n'), ((816, 841), 'factory.Faker', 'factory.Faker', (['"""latitude"""'], {}), "('latitude')\n", (829, 841), False, 'import factory\n'), ((858, 884), 'factory.Faker', 'factory.Faker', (['"""longitude"""'], {}), "('longitude')\n", (871, 884), False, 'import factory\n'), ((952, 981), 'factory.Faker', 'factory.Faker', (['"""phone_number"""'], {}), "('phone_number')\n", (965, 981), False, 'import factory\n'), ((994, 1016), 'factory.Faker', 'factory.Faker', (['"""email"""'], {}), "('email')\n", (1007, 1016), False, 'import factory\n'), ((1032, 1074), 'factory.SubFactory', 'factory.SubFactory', (['ServiceCategoryFactory'], {}), '(ServiceCategoryFactory)\n', (1050, 1074), False, 'import factory\n')] |
# Generated by Django 2.1.7 on 2019-03-22 01:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('nxos_config_import', '0009_auto_20190322_1408'),
]
operations = [
migrations.AddField(
model_name='objectconfigurationstatus',
name='post_url',
field=models.URLField(default='none'),
),
]
| [
"django.db.models.URLField"
] | [((366, 397), 'django.db.models.URLField', 'models.URLField', ([], {'default': '"""none"""'}), "(default='none')\n", (381, 397), False, 'from django.db import migrations, models\n')] |
import logging
import psycopg2
import psycopg2.extras
import socket
import sys
import time
from cluster_under_test import *
class DbRetriable:
"""
Wrapper around psycopg2, which offers convenient retry functionality.
If connection to postgres is lost during query execution or between
queries, retry with increasing intervals.
Low level functionality: create_connection_with_cursor, run_with_fail, run_with_retry
Here you have access to both connection and cursor objects and can e.g
run multiple inserts with cursor.execute and the commit them together with connection.commit()
More convenient `execute`: run query, commit, return the records. Is
sutable for both: selects and insert/update/delete. Supports auto-retry.
Usage:
db = DbRetriable(dbname="postgres", user="...", password="...")
"""
def __init__(self, **other_connection_args):
"""Saves connection_args so they can be later used for connection retry."""
self.other_connection_args = other_connection_args
self.ntry = 1
def create_connection_with_cursor(self):
"""@returns tuple with connection and cursor"""
# Reresolve the host name on every connection
resolved = ClusterUnderTest.resolve_service_url()
con = psycopg2.connect(host=resolved, **self.other_connection_args)
cur = con.cursor()
return (con, cur)
def run_with_retry(self):
'''
Runs a block until queries succeed.
Generator provides following to the executed block:
* psycopg2.connection object
* psycopg2.cursor object
* number of retries so far
Example:
>>> for (con, cur, ntry) in db.run_with_retry():
... cur.execute("""INSERT INTO testdata(batch, try, name)
... SELECT %s, %s, md5(random()::text)
... FROM generate_series(1,%s);""",
... (self.batch, ntry, self.BATCH_SIZE))
... con.commit()
'''
last_exception = ''
delay = 1
while True:
try:
con, cur = self.create_connection_with_cursor()
yield con, cur, self.ntry
con.commit()
break
except psycopg2.OperationalError as e:
self.ntry +=1
if str(e) == last_exception:
sys.stdout.write('+')
sys.stdout.flush()
else:
last_exception = str(e)
print(e)
time.sleep(delay)
delay = delay if delay > 15 else delay*2
if last_exception != '':
print()
def run_with_fail(self):
"""
Similar API to run_with_retry. but try to connect and run the block only once. Fail on failure.
"""
con, cur = self.create_connection_with_cursor()
yield con, cur, self.ntry
def execute(self, query, params=None, retry=False):
"""
Shortcut to
* run query with params
* with retry if desired and necessary
* commits at the end
* return the dataset as array, if any
Is sutable for both: selects and insert/update/delete
>>> print(db.execute("SELECT count(*) from testdata;")[0])
"""
if retry:
for (con, cur, ntry) in self.run_with_retry():
cur.execute(query, params)
else:
for (con, cur, ntry) in self.run_with_fail():
cur.execute(query, params)
try:
res = cur.fetchall()
except psycopg2.ProgrammingError as ex:
res = None # no results to fetch
con.commit()
return res
| [
"psycopg2.connect",
"sys.stdout.flush",
"time.sleep",
"sys.stdout.write"
] | [((1299, 1360), 'psycopg2.connect', 'psycopg2.connect', ([], {'host': 'resolved'}), '(host=resolved, **self.other_connection_args)\n', (1315, 1360), False, 'import psycopg2\n'), ((2585, 2602), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (2595, 2602), False, 'import time\n'), ((2413, 2434), 'sys.stdout.write', 'sys.stdout.write', (['"""+"""'], {}), "('+')\n", (2429, 2434), False, 'import sys\n'), ((2455, 2473), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2471, 2473), False, 'import sys\n')] |
#!/usr/bin/python3
import json
from argparse import ArgumentParser
def get_args():
p = ArgumentParser(description='Merge CLOSURE xdconf.ini files')
p.add_argument('-f', '--files', required=True, type=str, help='Input files')
p.add_argument('-o', '--outfile', required=False, type=str, default='xdconf.ini', help='Output file')
return p.parse_args()
def main():
args = get_args()
print('Options selected:')
for x in vars(args).items(): print(' %s: %s' % x)
files=args.files.split(' ')
if len(files) < 1:
print('Require at least one file to merge')
return
data = {'enclaves': []}
for f in files:
with open(f,'r') as inf:
cur = json.load(inf)
enc = cur['enclaves']
for e in enc:
# find matching enclave e1 in data['enclaves']
found = False;
for e1 in data['enclaves']:
if e['enclave'] == e1['enclave']:
found = True;
break;
# if e not in data['enclaves'], simply add enclave to data['enclaves']
if not found:
data['enclaves'].append(e)
else:
if e['inuri'] != e1['inuri'] or e['outuri'] != e1['outuri']:
print('URI do not match, merge not possible')
exit
# XXX: need to check for duplicates
print("Warning: Not checking for duplicate halmaps")
e1['halmaps'].extend(e['halmaps'])
with open(args.outfile, 'w') as outf:
json.dump(data,outf,indent=2)
if __name__ == '__main__':
main()
| [
"json.load",
"json.dump",
"argparse.ArgumentParser"
] | [((97, 157), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Merge CLOSURE xdconf.ini files"""'}), "(description='Merge CLOSURE xdconf.ini files')\n", (111, 157), False, 'from argparse import ArgumentParser\n'), ((1406, 1437), 'json.dump', 'json.dump', (['data', 'outf'], {'indent': '(2)'}), '(data, outf, indent=2)\n', (1415, 1437), False, 'import json\n'), ((679, 693), 'json.load', 'json.load', (['inf'], {}), '(inf)\n', (688, 693), False, 'import json\n')] |
'''
Copyright (c) The Dojo Foundation 2011. All Rights Reserved.
Copyright (c) IBM Corporation 2008, 2011. All Rights Reserved.
'''
# std lib
import ConfigParser
# tornado
import tornado.web
from base import AuthBase
class IniAuth(AuthBase):
cookieName = 'coweb.auth.ini.username'
def __init__(self, container, iniPath='users.ini'):
super(IniAuth, self).__init__(container)
# compute abs path to ini file
self._iniPath = self._container.get_absolute_path(iniPath)
def requires_login(self):
'''Requires user login.'''
return True
def requires_cookies(self):
'''Uses tornado's secure cookies'.'''
return True
def get_current_user(self, handler):
'''Gets the current username from the secure cookie.'''
return handler.get_secure_cookie(self.cookieName)
def check_credentials(self, handler, username, password):
'''Checks the login credentials against a simple INI file.'''
# @todo: put this on a timer or something; wasteful to do each time
users = ConfigParser.ConfigParser()
users.optionxform = str
users.read(self._iniPath)
try:
pw = users.get('md5', username)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
pass
else:
known = (pw == password)
try:
pw = users.get('plain', username)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
known = False
else:
known = (pw == password)
if known:
handler.set_secure_cookie(self.cookieName, username)
else:
raise tornado.web.HTTPError(403)
def clear_credentials(self, handler):
'''Clears the authentication cookie.'''
handler.clear_cookie(self.cookieName) | [
"ConfigParser.ConfigParser"
] | [((1083, 1110), 'ConfigParser.ConfigParser', 'ConfigParser.ConfigParser', ([], {}), '()\n', (1108, 1110), False, 'import ConfigParser\n')] |
from django.conf import settings
from django_filters.rest_framework import DjangoFilterBackend
from django_filters.rest_framework import UUIDFilter
from rest_framework.permissions import IsAuthenticated
from contentcuration.celery import app
from contentcuration.models import Channel
from contentcuration.models import Task
from contentcuration.viewsets.base import DestroyModelMixin
from contentcuration.viewsets.base import ReadOnlyValuesViewset
from contentcuration.viewsets.base import RequiredFilterSet
class TaskFilter(RequiredFilterSet):
channel = UUIDFilter(method="filter_channel")
def filter_channel(self, queryset, name, value):
channel_queryset = Channel.filter_view_queryset(Channel.objects.all(), self.request.user)
if channel_queryset.filter(id=value).exists():
return queryset.filter(channel_id=value)
return queryset.none()
class Meta:
model = Task
fields = ("channel",)
class TaskViewSet(ReadOnlyValuesViewset, DestroyModelMixin):
queryset = Task.objects.all()
permission_classes = [IsAuthenticated]
filter_backends = (DjangoFilterBackend,)
filter_class = TaskFilter
lookup_field = "task_id"
values = (
"task_id",
"task_type",
"created",
"status",
"is_progress_tracking",
"user_id",
"metadata",
)
field_map = {"user": "user_id"}
@classmethod
def id_attr(cls):
return "task_id"
def perform_destroy(self, instance):
# TODO: Add logic to delete the Celery task using app.control.revoke(). This will require some extensive
# testing to ensure terminating in-progress tasks will not put the db in an indeterminate state.
app.control.revoke(instance.task_id, terminate=True)
instance.delete()
def get_edit_queryset(self):
return Task.objects.filter(user=self.request.user)
def consolidate(self, items, queryset):
if not settings.CELERY_TASK_ALWAYS_EAGER:
for item in items:
result = app.AsyncResult(item["task_id"])
if result and result.status:
item["status"] = result.status
if "progress" not in item["metadata"]:
# Just flagging this, but this appears to be the correct way to get task metadata,
# even though the API is marked as private.
meta = result._get_task_meta()
if (
meta
and "result" in meta
and meta["result"]
and not isinstance(meta["result"], Exception)
and "progress" in meta["result"]
):
item["metadata"]["progress"] = meta["result"]["progress"]
else:
item["metadata"]["progress"] = None
item["channel"] = (
item.get("metadata", {}).get("affects", {}).get("channel")
)
return items
| [
"contentcuration.celery.app.control.revoke",
"django_filters.rest_framework.UUIDFilter",
"contentcuration.celery.app.AsyncResult",
"contentcuration.models.Task.objects.all",
"contentcuration.models.Task.objects.filter",
"contentcuration.models.Channel.objects.all"
] | [((563, 598), 'django_filters.rest_framework.UUIDFilter', 'UUIDFilter', ([], {'method': '"""filter_channel"""'}), "(method='filter_channel')\n", (573, 598), False, 'from django_filters.rest_framework import UUIDFilter\n'), ((1036, 1054), 'contentcuration.models.Task.objects.all', 'Task.objects.all', ([], {}), '()\n', (1052, 1054), False, 'from contentcuration.models import Task\n'), ((1742, 1794), 'contentcuration.celery.app.control.revoke', 'app.control.revoke', (['instance.task_id'], {'terminate': '(True)'}), '(instance.task_id, terminate=True)\n', (1760, 1794), False, 'from contentcuration.celery import app\n'), ((1870, 1913), 'contentcuration.models.Task.objects.filter', 'Task.objects.filter', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (1889, 1913), False, 'from contentcuration.models import Task\n'), ((709, 730), 'contentcuration.models.Channel.objects.all', 'Channel.objects.all', ([], {}), '()\n', (728, 730), False, 'from contentcuration.models import Channel\n'), ((2065, 2097), 'contentcuration.celery.app.AsyncResult', 'app.AsyncResult', (["item['task_id']"], {}), "(item['task_id'])\n", (2080, 2097), False, 'from contentcuration.celery import app\n')] |
from django.shortcuts import render
from django.http import HttpResponse, JsonResponse
from django.core import serializers
from .models import Event
from .serializers import EventSerializer
from django.db.models import Q
import datetime
def get_events(request):
startDate = request.GET.get('start')
endDate = request.GET.get('end')
start=datetime.date.today()
end=datetime.date.today()
chamber = request.GET.get('chamber')
committee = request.GET.get('committee')
type = request.GET.get('type')
if startDate is not None:
start = datetime.datetime.strptime(startDate, '%Y-%m-%dT%H:%M:%S%z')
if endDate is not None:
end = datetime.datetime.strptime(endDate, '%Y-%m-%dT%H:%M:%S%z')
new_end = end + datetime.timedelta(days=1)
q = Q()
if start:
q &= Q(start__gte=start)
if end:
q &= Q(end__lte=new_end)
if chamber and chamber != "all":
q &= Q(chamber=chamber)
if committee and committee != "all":
q &= Q(committee=committee)
if type and type != "all":
q &= Q(type=type)
events = Event.objects.filter(q).order_by('start', 'startTime')
serializer = EventSerializer(events, many=True)
return JsonResponse(serializer.data, safe = False)
def get_committees(request):
committees = Event.objects.order_by('committee').values_list('committee', flat=True).distinct()
return JsonResponse(list(committees), safe = False)
| [
"django.http.JsonResponse",
"datetime.datetime.strptime",
"datetime.date.today",
"django.db.models.Q",
"datetime.timedelta"
] | [((352, 373), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (371, 373), False, 'import datetime\n'), ((382, 403), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (401, 403), False, 'import datetime\n'), ((796, 799), 'django.db.models.Q', 'Q', ([], {}), '()\n', (797, 799), False, 'from django.db.models import Q\n'), ((1229, 1270), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {'safe': '(False)'}), '(serializer.data, safe=False)\n', (1241, 1270), False, 'from django.http import HttpResponse, JsonResponse\n'), ((576, 636), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['startDate', '"""%Y-%m-%dT%H:%M:%S%z"""'], {}), "(startDate, '%Y-%m-%dT%H:%M:%S%z')\n", (602, 636), False, 'import datetime\n'), ((680, 738), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['endDate', '"""%Y-%m-%dT%H:%M:%S%z"""'], {}), "(endDate, '%Y-%m-%dT%H:%M:%S%z')\n", (706, 738), False, 'import datetime\n'), ((760, 786), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (778, 786), False, 'import datetime\n'), ((828, 847), 'django.db.models.Q', 'Q', ([], {'start__gte': 'start'}), '(start__gte=start)\n', (829, 847), False, 'from django.db.models import Q\n'), ((873, 892), 'django.db.models.Q', 'Q', ([], {'end__lte': 'new_end'}), '(end__lte=new_end)\n', (874, 892), False, 'from django.db.models import Q\n'), ((943, 961), 'django.db.models.Q', 'Q', ([], {'chamber': 'chamber'}), '(chamber=chamber)\n', (944, 961), False, 'from django.db.models import Q\n'), ((1016, 1038), 'django.db.models.Q', 'Q', ([], {'committee': 'committee'}), '(committee=committee)\n', (1017, 1038), False, 'from django.db.models import Q\n'), ((1083, 1095), 'django.db.models.Q', 'Q', ([], {'type': 'type'}), '(type=type)\n', (1084, 1095), False, 'from django.db.models import Q\n')] |
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from azure.keyvault.certificates._shared import HttpChallengeCache
from azure.keyvault.certificates._shared.client_base import DEFAULT_VERSION
from devtools_testutils import AzureTestCase
from parameterized import parameterized
import pytest
def suffixed_test_name(testcase_func, param_num, param):
return "{}_{}".format(testcase_func.__name__, parameterized.to_safe_name(param.kwargs.get("api_version")))
class CertificatesTestCase(AzureTestCase):
def tearDown(self):
HttpChallengeCache.clear()
assert len(HttpChallengeCache._cache) == 0
super(CertificatesTestCase, self).tearDown()
def create_client(self, vault_uri, **kwargs):
if kwargs.pop("is_async", False):
from azure.keyvault.certificates.aio import CertificateClient
credential = self.get_credential(CertificateClient, is_async=True)
else:
from azure.keyvault.certificates import CertificateClient
credential = self.get_credential(CertificateClient)
return self.create_client_from_credential(
CertificateClient, credential=credential, vault_url=vault_uri, **kwargs
)
def _skip_if_not_configured(self, api_version, **kwargs):
if self.is_live and api_version != DEFAULT_VERSION:
pytest.skip("This test only uses the default API version for live tests")
| [
"azure.keyvault.certificates._shared.HttpChallengeCache.clear",
"pytest.skip"
] | [((640, 666), 'azure.keyvault.certificates._shared.HttpChallengeCache.clear', 'HttpChallengeCache.clear', ([], {}), '()\n', (664, 666), False, 'from azure.keyvault.certificates._shared import HttpChallengeCache\n'), ((1445, 1518), 'pytest.skip', 'pytest.skip', (['"""This test only uses the default API version for live tests"""'], {}), "('This test only uses the default API version for live tests')\n", (1456, 1518), False, 'import pytest\n')] |
import logging
from pyramid.httpexceptions import HTTPNotImplemented
from pyramid.renderers import render, render_to_response
log = logging.getLogger(__name__)
class RestView(object):
renderers = {}
def __init__(self, request):
self.request = request
self.params = request.params
self.url = request.route_url
self.c = request.tmpl_context
self.routes = self.request.matchdict
def render_(self, *args, **kwargs):
kwargs['request'] = self.request
return render(*args, **kwargs)
def render(self, *args, **kwargs):
kwargs['request'] = self.request
return render_to_response(*args, **kwargs)
def index(self):
raise HTTPNotImplemented()
def new(self):
raise HTTPNotImplemented()
def create(self):
raise HTTPNotImplemented()
def view(self):
raise HTTPNotImplemented()
def edit(self):
raise HTTPNotImplemented()
def update(self):
raise HTTPNotImplemented()
def delete(self):
raise HTTPNotImplemented() | [
"logging.getLogger",
"pyramid.renderers.render",
"pyramid.httpexceptions.HTTPNotImplemented",
"pyramid.renderers.render_to_response"
] | [((134, 161), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (151, 161), False, 'import logging\n'), ((527, 550), 'pyramid.renderers.render', 'render', (['*args'], {}), '(*args, **kwargs)\n', (533, 550), False, 'from pyramid.renderers import render, render_to_response\n'), ((647, 682), 'pyramid.renderers.render_to_response', 'render_to_response', (['*args'], {}), '(*args, **kwargs)\n', (665, 682), False, 'from pyramid.renderers import render, render_to_response\n'), ((719, 739), 'pyramid.httpexceptions.HTTPNotImplemented', 'HTTPNotImplemented', ([], {}), '()\n', (737, 739), False, 'from pyramid.httpexceptions import HTTPNotImplemented\n'), ((774, 794), 'pyramid.httpexceptions.HTTPNotImplemented', 'HTTPNotImplemented', ([], {}), '()\n', (792, 794), False, 'from pyramid.httpexceptions import HTTPNotImplemented\n'), ((832, 852), 'pyramid.httpexceptions.HTTPNotImplemented', 'HTTPNotImplemented', ([], {}), '()\n', (850, 852), False, 'from pyramid.httpexceptions import HTTPNotImplemented\n'), ((888, 908), 'pyramid.httpexceptions.HTTPNotImplemented', 'HTTPNotImplemented', ([], {}), '()\n', (906, 908), False, 'from pyramid.httpexceptions import HTTPNotImplemented\n'), ((944, 964), 'pyramid.httpexceptions.HTTPNotImplemented', 'HTTPNotImplemented', ([], {}), '()\n', (962, 964), False, 'from pyramid.httpexceptions import HTTPNotImplemented\n'), ((1002, 1022), 'pyramid.httpexceptions.HTTPNotImplemented', 'HTTPNotImplemented', ([], {}), '()\n', (1020, 1022), False, 'from pyramid.httpexceptions import HTTPNotImplemented\n'), ((1060, 1080), 'pyramid.httpexceptions.HTTPNotImplemented', 'HTTPNotImplemented', ([], {}), '()\n', (1078, 1080), False, 'from pyramid.httpexceptions import HTTPNotImplemented\n')] |
# Let's make an database
#
# this is like the worst code ever
#
# just to make an test DB for burn the subs
import time
from datetime import datetime
from bts.dataBaseClass import Sub
def main():
fileName = open("subscriberListTest.txt")
print("making:")
for entry in fileName:
entry = entry.strip()
dateTime = datetime.utcnow()
dbEntry = Sub.create(
userName = entry,
entryTime = dateTime,
status = 2,
# userName = 'Yfj',
fontSize = 72,
positionX = 1000,
positionY = 1000
)
print(entry)
time.sleep(0.2)
dbEntry.save()
print("done, you fuck.")
fileName.close()
if __name__ == "__main__":
main() | [
"bts.dataBaseClass.Sub.create",
"time.sleep",
"datetime.datetime.utcnow"
] | [((360, 377), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (375, 377), False, 'from datetime import datetime\n'), ((407, 512), 'bts.dataBaseClass.Sub.create', 'Sub.create', ([], {'userName': 'entry', 'entryTime': 'dateTime', 'status': '(2)', 'fontSize': '(72)', 'positionX': '(1000)', 'positionY': '(1000)'}), '(userName=entry, entryTime=dateTime, status=2, fontSize=72,\n positionX=1000, positionY=1000)\n', (417, 512), False, 'from bts.dataBaseClass import Sub\n'), ((684, 699), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (694, 699), False, 'import time\n')] |
from getpass import getpass
from nxapi_plumbing import Device
from lxml import etree
from pprint import pprint as pp
# Disable Self-signed Certificate Warnings
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
'''
7. NX-API using XML and the nxapi_plumbing library
7a. Create an nxapi_plumbing "Device" object for nxos1. The api_format should be
"xml" and the transport should be "https" (port 8443). Use getpass() to capture
the device's password. Send the "show interface Ethernet2/1" command to the device,
parse the output, and print out the following information:
Interface: Ethernet2/1; State: up; MTU: 1500
7b. Run the following two show commands on the nxos1 device using a single method
and passing in a list of commands: "show system uptime" and "show system resources".
Print the XML output from these two commands.
7c. Using the nxapi_plumbing config_list() method, configure two loopbacks on
nxos1 including interface descriptions. Pick random loopback interface numbers
between 100 and 199.
'''
device = Device(
api_format="xml",
host="nxos1.lasthop.io",
username="pyclass",
password=getpass(),
transport="https",
port=8443,
verify=False,
)
intf_output = device.show("show interface Ethernet2/1")
print('7a')
print(f'Interface: {intf_output.find(".//interface").text}')
print(f'State: {intf_output.find(".//state").text}')
print(f'MTU: {intf_output.find(".//eth_mtu").text}')
print('7b')
show_output = device.show_list(["show system uptime", "show system resources"])
for output in show_output:
print(etree.tostring(output, encoding="unicode"))
commands = [
"interface loopback151",
"description loopback151",
"no shutdown",
"interface loopback152",
"description loopback152",
"no shutdown",
]
print('7c')
output = device.config_list(commands)
# Look at the output XML for each configuration command
for msg in output:
print(etree.tostring(msg, encoding="unicode"))
| [
"requests.packages.urllib3.disable_warnings",
"getpass.getpass",
"lxml.etree.tostring"
] | [((249, 315), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', (['InsecureRequestWarning'], {}), '(InsecureRequestWarning)\n', (291, 315), False, 'import requests\n'), ((1227, 1236), 'getpass.getpass', 'getpass', ([], {}), '()\n', (1234, 1236), False, 'from getpass import getpass\n'), ((1663, 1705), 'lxml.etree.tostring', 'etree.tostring', (['output'], {'encoding': '"""unicode"""'}), "(output, encoding='unicode')\n", (1677, 1705), False, 'from lxml import etree\n'), ((2017, 2056), 'lxml.etree.tostring', 'etree.tostring', (['msg'], {'encoding': '"""unicode"""'}), "(msg, encoding='unicode')\n", (2031, 2056), False, 'from lxml import etree\n')] |
import os
import os.path
import webapp2
import logging
from webapp2 import WSGIApplication, Route
from google.appengine.api import users
# hack until we can make this public
cache = dict()
class Content(webapp2.RequestHandler):
def get(self, *args, **kwargs):
urlPath = args[0]
root = os.path.split(__file__)[0]
errorPath = os.path.join(root, '404', 'index.html')
try:
paths = [
os.path.join(root, urlPath + '.html'),
os.path.join(root, urlPath + 'index.html'),
os.path.join(root, urlPath + '/index.html'),
os.path.join(root, urlPath)
]
validPaths = [path for path in paths if os.path.isfile(path)]
if len(validPaths) > 0:
path = validPaths[0]
else:
path = errorPath
self.response.set_status(404)
if path.endswith(".css"):
self.response.headers['Content-Type'] = 'text/css'
elif path.endswith(".js"):
self.response.headers['Content-Type'] = 'application/javascript'
elif path.endswith(".html"):
self.response.headers['Content-Type'] = 'text/html'
elif path.endswith(".svg"):
self.response.headers['Content-Type'] = 'image/svg+xml'
self.response.headers['Cache-Control'] = 'public'
self.response.headers['max-age'] = '300' # 5 minutes
key = 'pages-' + path
if key in cache:
self.response.out.write(cache[key])
else:
f = open(path, 'r')
content = f.read()
cache[key] = content
self.response.out.write(content)
except:
logging.exception("unable to serve page")
path = errorPath
f = open(path, 'r')
content = f.read()
self.response.out.write(content)
self.response.set_status(404)
app = WSGIApplication([
Route(r'/<:.*>', handler=Content)
], debug=True)
| [
"webapp2.Route",
"os.path.join",
"logging.exception",
"os.path.isfile",
"os.path.split"
] | [((341, 380), 'os.path.join', 'os.path.join', (['root', '"""404"""', '"""index.html"""'], {}), "(root, '404', 'index.html')\n", (353, 380), False, 'import os\n'), ((1781, 1813), 'webapp2.Route', 'Route', (['"""/<:.*>"""'], {'handler': 'Content'}), "('/<:.*>', handler=Content)\n", (1786, 1813), False, 'from webapp2 import WSGIApplication, Route\n'), ((298, 321), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (311, 321), False, 'import os\n'), ((414, 451), 'os.path.join', 'os.path.join', (['root', "(urlPath + '.html')"], {}), "(root, urlPath + '.html')\n", (426, 451), False, 'import os\n'), ((461, 503), 'os.path.join', 'os.path.join', (['root', "(urlPath + 'index.html')"], {}), "(root, urlPath + 'index.html')\n", (473, 503), False, 'import os\n'), ((513, 556), 'os.path.join', 'os.path.join', (['root', "(urlPath + '/index.html')"], {}), "(root, urlPath + '/index.html')\n", (525, 556), False, 'import os\n'), ((566, 593), 'os.path.join', 'os.path.join', (['root', 'urlPath'], {}), '(root, urlPath)\n', (578, 593), False, 'import os\n'), ((1563, 1604), 'logging.exception', 'logging.exception', (['"""unable to serve page"""'], {}), "('unable to serve page')\n", (1580, 1604), False, 'import logging\n'), ((649, 669), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (663, 669), False, 'import os\n')] |
# This file is used to define an AML pipeline for training the teacher on new labeled data
import json
import shutil
import os
from azureml.core import Workspace, Run, Experiment, Datastore
from azureml.data.data_reference import DataReference
from azureml.pipeline.core.schedule import ScheduleRecurrence, Schedule
from azureml.pipeline.core import Pipeline, PipelineData
from azureml.pipeline.steps import PythonScriptStep
from azureml.pipeline.core import PublishedPipeline
from azureml.core.runconfig import CondaDependencies, RunConfiguration
from azureml.core.runconfig import DEFAULT_GPU_IMAGE
from azureml.core.compute import AmlCompute
from azureml.core.compute import ComputeTarget
from azureml.core.authentication import ServicePrincipalAuthentication
from azureml.train.estimator import Estimator
config_json = 'config.json'
with open(config_json, 'r') as f:
config = json.load(f)
try:
svc_pr = ServicePrincipalAuthentication(
tenant_id=config['tenant_id'],
service_principal_id=config['service_principal_id'],
service_principal_password=config['<PASSWORD>'])
except KeyError as e:
print("WARNING: No Service Principal found in config.json. This is fine if we are operating in DevOps.")
svc_pr = None
pass
ws = Workspace.from_config(path=config_json, auth=svc_pr)
print(ws.name, ws.resource_group, ws.location, ws.subscription_id, sep = '\n')
def_blob_store = ws.get_default_datastore()
print("Blobstore's name: {}".format(def_blob_store.name))
base_dir = '.'
def_blob_store = ws.get_default_datastore()
# folder for scripts that need to be uploaded to Aml compute target
script_folder = './scripts'
os.makedirs(script_folder, exist_ok=True)
os.makedirs(os.path.join(script_folder, 'utils'), exist_ok=True)
shutil.copy(os.path.join(base_dir, 'config.json'), script_folder)
shutil.copy(os.path.join(base_dir, 'get_logits_from_xception.py'), script_folder)
shutil.copy(os.path.join(base_dir, 'squeezenet.py'), script_folder)
shutil.copy(os.path.join(base_dir, 'kd_squeezenet.py'), script_folder)
shutil.copy(os.path.join(base_dir, 'model_registration.py'), script_folder)
shutil.copy(os.path.join(base_dir, 'squeezenet_weights.hdf5'), script_folder)
shutil.copy(os.path.join('./utils', 'image_preprocessing_ver1.py'), os.path.join(script_folder, 'utils'))
shutil.copy(os.path.join('./utils', 'image_preprocessing_ver2.py'), os.path.join(script_folder, 'utils'))
cpu_compute_name = config['cpu_compute']
try:
cpu_compute_target = AmlCompute(ws, cpu_compute_name)
print("found existing compute target: %s" % cpu_compute_name)
except:# ComputeTargetException:
print("creating new compute target")
provisioning_config = AmlCompute.provisioning_configuration(
vm_size='STANDARD_D2_V2',
max_nodes=4,
idle_seconds_before_scaledown=1800)
cpu_compute_target = ComputeTarget.create(ws, cpu_compute_name, provisioning_config)
cpu_compute_target.wait_for_completion(
show_output=True,
min_node_count=None,
timeout_in_minutes=20)
# use get_status() to get a detailed status for the current cluster.
print(cpu_compute_target.get_status().serialize())
# choose a name for your cluster
gpu_compute_name = config['gpu_compute']
try:
gpu_compute_target = AmlCompute(workspace=ws, name=gpu_compute_name)
print("found existing compute target: %s" % gpu_compute_name)
except:
print('Creating a new compute target...')
provisioning_config = AmlCompute.provisioning_configuration(
vm_size='STANDARD_NC6',
max_nodes=10,
idle_seconds_before_scaledown=1800)
# create the cluster
gpu_compute_target = ComputeTarget.create(ws, gpu_compute_name, provisioning_config)
# can poll for a minimum number of nodes and for a specific timeout.
# if no min node count is provided it uses the scale settings for the cluster
gpu_compute_target.wait_for_completion(
show_output=True,
min_node_count=None,
timeout_in_minutes=20)
# use get_status() to get a detailed status for the current cluster.
try:
print(gpu_compute_target.get_status().serialize())
except BaseException as e:
print("Could not get status of compute target.")
print(e)
# conda dependencies for compute targets
gpu_cd = CondaDependencies.create(
conda_packages=['cudatoolkit=10.0.130'],
pip_packages=['keras', 'tensorflow', 'tensorflow-gpu', 'matplotlib', 'pillow', 'six', 'numpy', 'azureml-sdk', 'tqdm'])
# Runconfigs
gpu_compute_run_config = RunConfiguration(conda_dependencies=gpu_cd)
gpu_compute_run_config.environment.docker.enabled = True
gpu_compute_run_config.environment.docker.gpu_support = True
gpu_compute_run_config.environment.docker.base_image = DEFAULT_GPU_IMAGE
gpu_compute_run_config.environment.spark.precache_packages = False
print("PipelineData object created")
path_on_datastore = os.path.join("knowledge_distillation", "data")
# DataReference to where video data is stored.
labeled_data = DataReference(
datastore=def_blob_store,
data_reference_name="labeled_data",
path_on_datastore=path_on_datastore)
print("DataReference object created")
# Naming the intermediate data as processed_data1 and assigning it to the variable processed_data1.
# raw_data = PipelineData("raw_video_fames", datastore=def_blob_store)
logits_data = PipelineData("logits_from_xception", datastore=def_blob_store)
data_metrics = PipelineData("data_metrics", datastore=def_blob_store)
data_output = PipelineData("output_data", datastore=def_blob_store)
# prepare dataset for training/testing prednet
get_logits_from_xception = PythonScriptStep(
name='get_logits_from_xception',
script_name="get_logits_from_xception.py",
arguments=["--data-folder", labeled_data, "--output_data", logits_data],
inputs=[labeled_data],
outputs=[logits_data],
compute_target=gpu_compute_target,
source_directory=script_folder,
runconfig=gpu_compute_run_config,
allow_reuse=True,
hash_paths=['.']
)
print("logit step created")
# upload data to default datastore
def_blob_store = ws.get_default_datastore()
# script_params = {
# '--data-folder': def_blob_store.path('256_ObjectCategories_preproc').as_mount(),
# '--remote_execution': ""
# estimator_entry_script_arguments=[
# '--data-folder', preprocessed_data,
# '--remote_execution',
# '--dataset', dataset
# ],
# }
est = Estimator(source_directory=script_folder,
compute_target=gpu_compute_target,
pip_packages=['keras', 'tensorflow', 'tensorflow-gpu', 'matplotlib', 'pillow', 'six', 'numpy', 'azureml-sdk', 'tqdm'],
conda_packages=['cudatoolkit=10.0.130'],
entry_script='kd_squeezenet.py',
use_gpu=True,
node_count=1)
from azureml.train.hyperdrive import RandomParameterSampling, BanditPolicy, HyperDriveConfig, PrimaryMetricGoal
from azureml.pipeline.steps import HyperDriveStep
from azureml.train.hyperdrive import choice, loguniform, uniform
ps = RandomParameterSampling(
{
'--learning_rate': uniform(1e-3, 2e-2),
'--momentum': uniform(.1, .95),
'--weight_decay': loguniform(-5, -3),
'--temperature': uniform(1, 9),
# '--lambda_const': uniform(.1, .3),
'--transfer_learning': choice("True", "False")
}
)
policy = BanditPolicy(evaluation_interval=2, slack_factor=0.1, delay_evaluation=10)
hdc = HyperDriveConfig(estimator=est,
hyperparameter_sampling=ps,
policy=policy,
primary_metric_name='val_loss',
primary_metric_goal=PrimaryMetricGoal.MINIMIZE,
max_total_runs=5, #100,
max_concurrent_runs=5)
hd_step = HyperDriveStep(
name="train_w_hyperdrive",
hyperdrive_config=hdc,
estimator_entry_script_arguments=[
'--data-folder', labeled_data,
'--logits-folder', logits_data,
'--remote_execution'
],
# estimator_entry_script_arguments=script_params,
inputs=[labeled_data, logits_data],
metrics_output = data_metrics,
allow_reuse=True
)
hd_step.run_after(get_logits_from_xception)
registration_step = PythonScriptStep(
name='register_model',
script_name='model_registration.py',
arguments=['--input_dir', data_metrics, '--output_dir', data_output],
compute_target=gpu_compute_target,
inputs=[data_metrics],
outputs=[data_output],
source_directory=script_folder,
runconfig=gpu_compute_run_config,
allow_reuse=True,
hash_paths=['.']
)
registration_step.run_after(hd_step)
pipeline = Pipeline(workspace=ws, steps=[get_logits_from_xception, hd_step, registration_step])
print ("Pipeline is built")
pipeline.validate()
print("Simple validation complete")
pipeline_name = 'kd_teach_the_student'
# We need to disable (delete) previously published pipelines, because we can't have two published pipelines with the same name
from utils.azure import disable_pipeline
disable_pipeline(pipeline_name=pipeline_name, prefix='', dry_run=False)
published_pipeline = pipeline.publish(name=pipeline_name)
print("Student pipeline published")
schedule = Schedule.create(
workspace=ws,
name=pipeline_name + "_sch",
pipeline_id=published_pipeline.id,
experiment_name=pipeline_name,
datastore=def_blob_store,
wait_for_provisioning=True,
description="Datastore scheduler for Pipeline" + pipeline_name,
path_on_datastore=path_on_datastore,
polling_interval=60)
| [
"azureml.core.runconfig.CondaDependencies.create",
"azureml.train.hyperdrive.loguniform",
"azureml.core.compute.AmlCompute.provisioning_configuration",
"azureml.core.compute.AmlCompute",
"azureml.pipeline.core.PipelineData",
"utils.azure.disable_pipeline",
"azureml.pipeline.steps.HyperDriveStep",
"azu... | [((1277, 1329), 'azureml.core.Workspace.from_config', 'Workspace.from_config', ([], {'path': 'config_json', 'auth': 'svc_pr'}), '(path=config_json, auth=svc_pr)\n', (1298, 1329), False, 'from azureml.core import Workspace, Run, Experiment, Datastore\n'), ((1677, 1718), 'os.makedirs', 'os.makedirs', (['script_folder'], {'exist_ok': '(True)'}), '(script_folder, exist_ok=True)\n', (1688, 1718), False, 'import os\n'), ((4319, 4511), 'azureml.core.runconfig.CondaDependencies.create', 'CondaDependencies.create', ([], {'conda_packages': "['cudatoolkit=10.0.130']", 'pip_packages': "['keras', 'tensorflow', 'tensorflow-gpu', 'matplotlib', 'pillow', 'six',\n 'numpy', 'azureml-sdk', 'tqdm']"}), "(conda_packages=['cudatoolkit=10.0.130'],\n pip_packages=['keras', 'tensorflow', 'tensorflow-gpu', 'matplotlib',\n 'pillow', 'six', 'numpy', 'azureml-sdk', 'tqdm'])\n", (4343, 4511), False, 'from azureml.core.runconfig import CondaDependencies, RunConfiguration\n'), ((4552, 4595), 'azureml.core.runconfig.RunConfiguration', 'RunConfiguration', ([], {'conda_dependencies': 'gpu_cd'}), '(conda_dependencies=gpu_cd)\n', (4568, 4595), False, 'from azureml.core.runconfig import CondaDependencies, RunConfiguration\n'), ((4913, 4959), 'os.path.join', 'os.path.join', (['"""knowledge_distillation"""', '"""data"""'], {}), "('knowledge_distillation', 'data')\n", (4925, 4959), False, 'import os\n'), ((5023, 5139), 'azureml.data.data_reference.DataReference', 'DataReference', ([], {'datastore': 'def_blob_store', 'data_reference_name': '"""labeled_data"""', 'path_on_datastore': 'path_on_datastore'}), "(datastore=def_blob_store, data_reference_name='labeled_data',\n path_on_datastore=path_on_datastore)\n", (5036, 5139), False, 'from azureml.data.data_reference import DataReference\n'), ((5377, 5439), 'azureml.pipeline.core.PipelineData', 'PipelineData', (['"""logits_from_xception"""'], {'datastore': 'def_blob_store'}), "('logits_from_xception', datastore=def_blob_store)\n", (5389, 5439), False, 'from azureml.pipeline.core import Pipeline, PipelineData\n'), ((5455, 5509), 'azureml.pipeline.core.PipelineData', 'PipelineData', (['"""data_metrics"""'], {'datastore': 'def_blob_store'}), "('data_metrics', datastore=def_blob_store)\n", (5467, 5509), False, 'from azureml.pipeline.core import Pipeline, PipelineData\n'), ((5524, 5577), 'azureml.pipeline.core.PipelineData', 'PipelineData', (['"""output_data"""'], {'datastore': 'def_blob_store'}), "('output_data', datastore=def_blob_store)\n", (5536, 5577), False, 'from azureml.pipeline.core import Pipeline, PipelineData\n'), ((5653, 6024), 'azureml.pipeline.steps.PythonScriptStep', 'PythonScriptStep', ([], {'name': '"""get_logits_from_xception"""', 'script_name': '"""get_logits_from_xception.py"""', 'arguments': "['--data-folder', labeled_data, '--output_data', logits_data]", 'inputs': '[labeled_data]', 'outputs': '[logits_data]', 'compute_target': 'gpu_compute_target', 'source_directory': 'script_folder', 'runconfig': 'gpu_compute_run_config', 'allow_reuse': '(True)', 'hash_paths': "['.']"}), "(name='get_logits_from_xception', script_name=\n 'get_logits_from_xception.py', arguments=['--data-folder', labeled_data,\n '--output_data', logits_data], inputs=[labeled_data], outputs=[\n logits_data], compute_target=gpu_compute_target, source_directory=\n script_folder, runconfig=gpu_compute_run_config, allow_reuse=True,\n hash_paths=['.'])\n", (5669, 6024), False, 'from azureml.pipeline.steps import PythonScriptStep\n'), ((6489, 6803), 'azureml.train.estimator.Estimator', 'Estimator', ([], {'source_directory': 'script_folder', 'compute_target': 'gpu_compute_target', 'pip_packages': "['keras', 'tensorflow', 'tensorflow-gpu', 'matplotlib', 'pillow', 'six',\n 'numpy', 'azureml-sdk', 'tqdm']", 'conda_packages': "['cudatoolkit=10.0.130']", 'entry_script': '"""kd_squeezenet.py"""', 'use_gpu': '(True)', 'node_count': '(1)'}), "(source_directory=script_folder, compute_target=gpu_compute_target,\n pip_packages=['keras', 'tensorflow', 'tensorflow-gpu', 'matplotlib',\n 'pillow', 'six', 'numpy', 'azureml-sdk', 'tqdm'], conda_packages=[\n 'cudatoolkit=10.0.130'], entry_script='kd_squeezenet.py', use_gpu=True,\n node_count=1)\n", (6498, 6803), False, 'from azureml.train.estimator import Estimator\n'), ((7442, 7516), 'azureml.train.hyperdrive.BanditPolicy', 'BanditPolicy', ([], {'evaluation_interval': '(2)', 'slack_factor': '(0.1)', 'delay_evaluation': '(10)'}), '(evaluation_interval=2, slack_factor=0.1, delay_evaluation=10)\n', (7454, 7516), False, 'from azureml.train.hyperdrive import RandomParameterSampling, BanditPolicy, HyperDriveConfig, PrimaryMetricGoal\n'), ((7524, 7728), 'azureml.train.hyperdrive.HyperDriveConfig', 'HyperDriveConfig', ([], {'estimator': 'est', 'hyperparameter_sampling': 'ps', 'policy': 'policy', 'primary_metric_name': '"""val_loss"""', 'primary_metric_goal': 'PrimaryMetricGoal.MINIMIZE', 'max_total_runs': '(5)', 'max_concurrent_runs': '(5)'}), "(estimator=est, hyperparameter_sampling=ps, policy=policy,\n primary_metric_name='val_loss', primary_metric_goal=PrimaryMetricGoal.\n MINIMIZE, max_total_runs=5, max_concurrent_runs=5)\n", (7540, 7728), False, 'from azureml.train.hyperdrive import RandomParameterSampling, BanditPolicy, HyperDriveConfig, PrimaryMetricGoal\n'), ((7898, 8178), 'azureml.pipeline.steps.HyperDriveStep', 'HyperDriveStep', ([], {'name': '"""train_w_hyperdrive"""', 'hyperdrive_config': 'hdc', 'estimator_entry_script_arguments': "['--data-folder', labeled_data, '--logits-folder', logits_data,\n '--remote_execution']", 'inputs': '[labeled_data, logits_data]', 'metrics_output': 'data_metrics', 'allow_reuse': '(True)'}), "(name='train_w_hyperdrive', hyperdrive_config=hdc,\n estimator_entry_script_arguments=['--data-folder', labeled_data,\n '--logits-folder', logits_data, '--remote_execution'], inputs=[\n labeled_data, logits_data], metrics_output=data_metrics, allow_reuse=True)\n", (7912, 8178), False, 'from azureml.pipeline.steps import HyperDriveStep\n'), ((8349, 8696), 'azureml.pipeline.steps.PythonScriptStep', 'PythonScriptStep', ([], {'name': '"""register_model"""', 'script_name': '"""model_registration.py"""', 'arguments': "['--input_dir', data_metrics, '--output_dir', data_output]", 'compute_target': 'gpu_compute_target', 'inputs': '[data_metrics]', 'outputs': '[data_output]', 'source_directory': 'script_folder', 'runconfig': 'gpu_compute_run_config', 'allow_reuse': '(True)', 'hash_paths': "['.']"}), "(name='register_model', script_name='model_registration.py',\n arguments=['--input_dir', data_metrics, '--output_dir', data_output],\n compute_target=gpu_compute_target, inputs=[data_metrics], outputs=[\n data_output], source_directory=script_folder, runconfig=\n gpu_compute_run_config, allow_reuse=True, hash_paths=['.'])\n", (8365, 8696), False, 'from azureml.pipeline.steps import PythonScriptStep\n'), ((8770, 8858), 'azureml.pipeline.core.Pipeline', 'Pipeline', ([], {'workspace': 'ws', 'steps': '[get_logits_from_xception, hd_step, registration_step]'}), '(workspace=ws, steps=[get_logits_from_xception, hd_step,\n registration_step])\n', (8778, 8858), False, 'from azureml.pipeline.core import Pipeline, PipelineData\n'), ((9150, 9221), 'utils.azure.disable_pipeline', 'disable_pipeline', ([], {'pipeline_name': 'pipeline_name', 'prefix': '""""""', 'dry_run': '(False)'}), "(pipeline_name=pipeline_name, prefix='', dry_run=False)\n", (9166, 9221), False, 'from utils.azure import disable_pipeline\n'), ((9329, 9649), 'azureml.pipeline.core.schedule.Schedule.create', 'Schedule.create', ([], {'workspace': 'ws', 'name': "(pipeline_name + '_sch')", 'pipeline_id': 'published_pipeline.id', 'experiment_name': 'pipeline_name', 'datastore': 'def_blob_store', 'wait_for_provisioning': '(True)', 'description': "('Datastore scheduler for Pipeline' + pipeline_name)", 'path_on_datastore': 'path_on_datastore', 'polling_interval': '(60)'}), "(workspace=ws, name=pipeline_name + '_sch', pipeline_id=\n published_pipeline.id, experiment_name=pipeline_name, datastore=\n def_blob_store, wait_for_provisioning=True, description=\n 'Datastore scheduler for Pipeline' + pipeline_name, path_on_datastore=\n path_on_datastore, polling_interval=60)\n", (9344, 9649), False, 'from azureml.pipeline.core.schedule import ScheduleRecurrence, Schedule\n'), ((892, 904), 'json.load', 'json.load', (['f'], {}), '(f)\n', (901, 904), False, 'import json\n'), ((924, 1095), 'azureml.core.authentication.ServicePrincipalAuthentication', 'ServicePrincipalAuthentication', ([], {'tenant_id': "config['tenant_id']", 'service_principal_id': "config['service_principal_id']", 'service_principal_password': "config['<PASSWORD>']"}), "(tenant_id=config['tenant_id'],\n service_principal_id=config['service_principal_id'],\n service_principal_password=config['<PASSWORD>'])\n", (954, 1095), False, 'from azureml.core.authentication import ServicePrincipalAuthentication\n'), ((1731, 1767), 'os.path.join', 'os.path.join', (['script_folder', '"""utils"""'], {}), "(script_folder, 'utils')\n", (1743, 1767), False, 'import os\n'), ((1797, 1834), 'os.path.join', 'os.path.join', (['base_dir', '"""config.json"""'], {}), "(base_dir, 'config.json')\n", (1809, 1834), False, 'import os\n'), ((1863, 1916), 'os.path.join', 'os.path.join', (['base_dir', '"""get_logits_from_xception.py"""'], {}), "(base_dir, 'get_logits_from_xception.py')\n", (1875, 1916), False, 'import os\n'), ((1945, 1984), 'os.path.join', 'os.path.join', (['base_dir', '"""squeezenet.py"""'], {}), "(base_dir, 'squeezenet.py')\n", (1957, 1984), False, 'import os\n'), ((2013, 2055), 'os.path.join', 'os.path.join', (['base_dir', '"""kd_squeezenet.py"""'], {}), "(base_dir, 'kd_squeezenet.py')\n", (2025, 2055), False, 'import os\n'), ((2084, 2131), 'os.path.join', 'os.path.join', (['base_dir', '"""model_registration.py"""'], {}), "(base_dir, 'model_registration.py')\n", (2096, 2131), False, 'import os\n'), ((2160, 2209), 'os.path.join', 'os.path.join', (['base_dir', '"""squeezenet_weights.hdf5"""'], {}), "(base_dir, 'squeezenet_weights.hdf5')\n", (2172, 2209), False, 'import os\n'), ((2238, 2292), 'os.path.join', 'os.path.join', (['"""./utils"""', '"""image_preprocessing_ver1.py"""'], {}), "('./utils', 'image_preprocessing_ver1.py')\n", (2250, 2292), False, 'import os\n'), ((2294, 2330), 'os.path.join', 'os.path.join', (['script_folder', '"""utils"""'], {}), "(script_folder, 'utils')\n", (2306, 2330), False, 'import os\n'), ((2344, 2398), 'os.path.join', 'os.path.join', (['"""./utils"""', '"""image_preprocessing_ver2.py"""'], {}), "('./utils', 'image_preprocessing_ver2.py')\n", (2356, 2398), False, 'import os\n'), ((2400, 2436), 'os.path.join', 'os.path.join', (['script_folder', '"""utils"""'], {}), "(script_folder, 'utils')\n", (2412, 2436), False, 'import os\n'), ((2510, 2542), 'azureml.core.compute.AmlCompute', 'AmlCompute', (['ws', 'cpu_compute_name'], {}), '(ws, cpu_compute_name)\n', (2520, 2542), False, 'from azureml.core.compute import AmlCompute\n'), ((3309, 3356), 'azureml.core.compute.AmlCompute', 'AmlCompute', ([], {'workspace': 'ws', 'name': 'gpu_compute_name'}), '(workspace=ws, name=gpu_compute_name)\n', (3319, 3356), False, 'from azureml.core.compute import AmlCompute\n'), ((2714, 2830), 'azureml.core.compute.AmlCompute.provisioning_configuration', 'AmlCompute.provisioning_configuration', ([], {'vm_size': '"""STANDARD_D2_V2"""', 'max_nodes': '(4)', 'idle_seconds_before_scaledown': '(1800)'}), "(vm_size='STANDARD_D2_V2', max_nodes=4,\n idle_seconds_before_scaledown=1800)\n", (2751, 2830), False, 'from azureml.core.compute import AmlCompute\n'), ((2881, 2944), 'azureml.core.compute.ComputeTarget.create', 'ComputeTarget.create', (['ws', 'cpu_compute_name', 'provisioning_config'], {}), '(ws, cpu_compute_name, provisioning_config)\n', (2901, 2944), False, 'from azureml.core.compute import ComputeTarget\n'), ((3504, 3619), 'azureml.core.compute.AmlCompute.provisioning_configuration', 'AmlCompute.provisioning_configuration', ([], {'vm_size': '"""STANDARD_NC6"""', 'max_nodes': '(10)', 'idle_seconds_before_scaledown': '(1800)'}), "(vm_size='STANDARD_NC6', max_nodes=10,\n idle_seconds_before_scaledown=1800)\n", (3541, 3619), False, 'from azureml.core.compute import AmlCompute\n'), ((3692, 3755), 'azureml.core.compute.ComputeTarget.create', 'ComputeTarget.create', (['ws', 'gpu_compute_name', 'provisioning_config'], {}), '(ws, gpu_compute_name, provisioning_config)\n', (3712, 3755), False, 'from azureml.core.compute import ComputeTarget\n'), ((7177, 7197), 'azureml.train.hyperdrive.uniform', 'uniform', (['(0.001)', '(0.02)'], {}), '(0.001, 0.02)\n', (7184, 7197), False, 'from azureml.train.hyperdrive import choice, loguniform, uniform\n'), ((7220, 7238), 'azureml.train.hyperdrive.uniform', 'uniform', (['(0.1)', '(0.95)'], {}), '(0.1, 0.95)\n', (7227, 7238), False, 'from azureml.train.hyperdrive import choice, loguniform, uniform\n'), ((7264, 7282), 'azureml.train.hyperdrive.loguniform', 'loguniform', (['(-5)', '(-3)'], {}), '(-5, -3)\n', (7274, 7282), False, 'from azureml.train.hyperdrive import choice, loguniform, uniform\n'), ((7309, 7322), 'azureml.train.hyperdrive.uniform', 'uniform', (['(1)', '(9)'], {}), '(1, 9)\n', (7316, 7322), False, 'from azureml.train.hyperdrive import choice, loguniform, uniform\n'), ((7400, 7423), 'azureml.train.hyperdrive.choice', 'choice', (['"""True"""', '"""False"""'], {}), "('True', 'False')\n", (7406, 7423), False, 'from azureml.train.hyperdrive import choice, loguniform, uniform\n')] |
"""
-------------------------------------------------------------------------
Library of RTL queues
-------------------------------------------------------------------------
Author : <NAME>
Date : Mar 23, 2019
"""
from pymtl3 import *
from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL
from pymtl3.stdlib.rtl import Mux, RegisterFile
#-------------------------------------------------------------------------
# Dpath and Ctrl for NormalQueueRTL
#-------------------------------------------------------------------------
class NormalQueueDpathRTL( Component ):
def construct( s, EntryType, num_entries=2 ):
# Interface
s.enq_msg = InPort( EntryType )
s.deq_msg = OutPort( EntryType )
s.wen = InPort( Bits1 )
s.waddr = InPort( mk_bits( clog2( num_entries ) ) )
s.raddr = InPort( mk_bits( clog2( num_entries ) ) )
# Component
s.queue = RegisterFile( EntryType, num_entries )(
raddr = { 0: s.raddr },
rdata = { 0: s.deq_msg },
wen = { 0: s.wen },
waddr = { 0: s.waddr },
wdata = { 0: s.enq_msg },
)
class NormalQueueCtrlRTL( Component ):
def construct( s, num_entries=2 ):
# Constants
addr_nbits = clog2 ( num_entries )
count_nbits = clog2 ( num_entries+1 )
PtrType = mk_bits ( addr_nbits )
CountType = mk_bits ( count_nbits )
s.last_idx = PtrType ( num_entries-1 )
s.num_entries = CountType( num_entries )
# Interface
s.enq_en = InPort ( Bits1 )
s.enq_rdy = OutPort( Bits1 )
s.deq_en = InPort ( Bits1 )
s.deq_rdy = OutPort( Bits1 )
s.count = OutPort( CountType )
s.wen = OutPort( Bits1 )
s.waddr = OutPort( PtrType )
s.raddr = OutPort( PtrType )
# Registers
s.head = Wire( PtrType )
s.tail = Wire( PtrType )
# Wires
s.enq_xfer = Wire( Bits1 )
s.deq_xfer = Wire( Bits1 )
s.head_next = Wire( PtrType )
s.tail_next = Wire( PtrType )
# Connections
connect( s.wen, s.enq_xfer )
connect( s.waddr, s.tail )
connect( s.raddr, s.head )
@s.update
def up_rdy_signals():
s.enq_rdy = ( s.count < s.num_entries ) & ~s.reset
s.deq_rdy = ( s.count > CountType(0) ) & ~s.reset
@s.update
def up_xfer_signals():
s.enq_xfer = s.enq_en & s.enq_rdy
s.deq_xfer = s.deq_en & s.deq_rdy
@s.update
def up_next():
s.head_next = s.head + PtrType(1) if s.head < s.last_idx else PtrType(0)
s.tail_next = s.tail + PtrType(1) if s.tail < s.last_idx else PtrType(0)
@s.update_ff
def up_reg():
if s.reset:
s.head <<= PtrType(0)
s.tail <<= PtrType(0)
s.count <<= CountType(0)
else:
s.head <<= s.head_next if s.deq_xfer else s.head
s.tail <<= s.tail_next if s.enq_xfer else s.tail
s.count <<= s.count + CountType(1) if s.enq_xfer & ~s.deq_xfer else \
s.count - CountType(1) if s.deq_xfer & ~s.enq_xfer else \
s.count
#-------------------------------------------------------------------------
# NormalQueueRTL
#-------------------------------------------------------------------------
class NormalQueueRTL( Component ):
def construct( s, EntryType, num_entries=2 ):
# Interface
s.enq = EnqIfcRTL( EntryType )
s.deq = DeqIfcRTL( EntryType )
s.count = OutPort( mk_bits( clog2( num_entries+1 ) ) )
# Components
assert num_entries > 0
if num_entries == 1:
s.q = NormalQueue1EntryRTL( EntryType )
connect( s.enq, s.q.enq )
connect( s.deq, s.q.deq )
connect( s.count, s.q.count )
else:
s.ctrl = NormalQueueCtrlRTL ( num_entries )
s.dpath = NormalQueueDpathRTL( EntryType, num_entries )
# Connect ctrl to data path
connect( s.ctrl.wen, s.dpath.wen )
connect( s.ctrl.waddr, s.dpath.waddr )
connect( s.ctrl.raddr, s.dpath.raddr )
# Connect to interface
connect( s.enq.en, s.ctrl.enq_en )
connect( s.enq.rdy, s.ctrl.enq_rdy )
connect( s.deq.en, s.ctrl.deq_en )
connect( s.deq.rdy, s.ctrl.deq_rdy )
connect( s.count, s.ctrl.count )
connect( s.enq.msg, s.dpath.enq_msg )
connect( s.deq.msg, s.dpath.deq_msg )
# Line trace
def line_trace( s ):
return "{}({}){}".format( s.enq, s.count, s.deq )
#-------------------------------------------------------------------------
# Ctrl for PipeQueue
#-------------------------------------------------------------------------
class PipeQueueCtrlRTL( Component ):
def construct( s, num_entries=2 ):
# Constants
addr_nbits = clog2 ( num_entries )
count_nbits = clog2 ( num_entries+1 )
PtrType = mk_bits ( addr_nbits )
CountType = mk_bits ( count_nbits )
s.last_idx = PtrType ( num_entries-1 )
s.num_entries = CountType( num_entries )
# Interface
s.enq_en = InPort ( Bits1 )
s.enq_rdy = OutPort( Bits1 )
s.deq_en = InPort ( Bits1 )
s.deq_rdy = OutPort( Bits1 )
s.count = OutPort( CountType )
s.wen = OutPort( Bits1 )
s.waddr = OutPort( PtrType )
s.raddr = OutPort( PtrType )
# Registers
s.head = Wire( PtrType )
s.tail = Wire( PtrType )
# Wires
s.enq_xfer = Wire( Bits1 )
s.deq_xfer = Wire( Bits1 )
s.head_next = Wire( PtrType )
s.tail_next = Wire( PtrType )
# Connections
connect( s.wen, s.enq_xfer )
connect( s.waddr, s.tail )
connect( s.raddr, s.head )
@s.update
def up_rdy_signals():
s.deq_rdy = ( s.count > CountType(0) ) & ~s.reset
@s.update
def up_enq_rdy():
if s.reset:
s.enq_rdy = b1(0)
else:
s.enq_rdy = ( s.count < s.num_entries ) | s.deq_en
@s.update
def up_xfer_signals():
s.enq_xfer = s.enq_en & s.enq_rdy
s.deq_xfer = s.deq_en & s.deq_rdy
@s.update
def up_next():
s.head_next = s.head + PtrType(1) if s.head < s.last_idx else PtrType(0)
s.tail_next = s.tail + PtrType(1) if s.tail < s.last_idx else PtrType(0)
@s.update_ff
def up_reg():
if s.reset:
s.head <<= PtrType(0)
s.tail <<= PtrType(0)
s.count <<= CountType(0)
else:
s.head <<= s.head_next if s.deq_xfer else s.head
s.tail <<= s.tail_next if s.enq_xfer else s.tail
s.count <<= s.count + CountType(1) if s.enq_xfer & ~s.deq_xfer else \
s.count - CountType(1) if s.deq_xfer & ~s.enq_xfer else \
s.count
#-------------------------------------------------------------------------
# PipeQueueRTL
#-------------------------------------------------------------------------
class PipeQueueRTL( Component ):
def construct( s, EntryType, num_entries=2 ):
# Interface
s.enq = EnqIfcRTL( EntryType )
s.deq = DeqIfcRTL( EntryType )
s.count = OutPort( mk_bits( clog2( num_entries+1 ) ) )
# Components
assert num_entries > 0
if num_entries == 1:
s.q = PipeQueue1EntryRTL( EntryType )
connect( s.enq, s.q.enq )
connect( s.deq, s.q.deq )
connect( s.count, s.q.count )
else:
s.ctrl = PipeQueueCtrlRTL ( num_entries )
s.dpath = NormalQueueDpathRTL( EntryType, num_entries )
# Connect ctrl to data path
connect( s.ctrl.wen, s.dpath.wen )
connect( s.ctrl.waddr, s.dpath.waddr )
connect( s.ctrl.raddr, s.dpath.raddr )
# Connect to interface
connect( s.enq.en, s.ctrl.enq_en )
connect( s.enq.rdy, s.ctrl.enq_rdy )
connect( s.deq.en, s.ctrl.deq_en )
connect( s.deq.rdy, s.ctrl.deq_rdy )
connect( s.count, s.ctrl.count )
connect( s.enq.msg, s.dpath.enq_msg )
connect( s.deq.msg, s.dpath.deq_msg )
# Line trace
def line_trace( s ):
return "{}({}){}".format( s.enq, s.count, s.deq )
#-------------------------------------------------------------------------
# Ctrl and Dpath for BypassQueue
#-------------------------------------------------------------------------
class BypassQueueDpathRTL( Component ):
def construct( s, EntryType, num_entries=2 ):
# Interface
s.enq_msg = InPort( EntryType )
s.deq_msg = OutPort( EntryType )
s.wen = InPort( Bits1 )
s.waddr = InPort( mk_bits( clog2( num_entries ) ) )
s.raddr = InPort( mk_bits( clog2( num_entries ) ) )
s.mux_sel = InPort( Bits1 )
# Component
s.queue = RegisterFile( EntryType, num_entries )(
raddr = { 0: s.raddr },
wen = { 0: s.wen },
waddr = { 0: s.waddr },
wdata = { 0: s.enq_msg },
)
s.mux = Mux( EntryType, 2 )(
sel = s.mux_sel,
in_ = { 0: s.queue.rdata[0], 1: s.enq_msg },
out = s.deq_msg,
)
class BypassQueueCtrlRTL( Component ):
def construct( s, num_entries=2 ):
# Constants
addr_nbits = clog2 ( num_entries )
count_nbits = clog2 ( num_entries+1 )
PtrType = mk_bits ( addr_nbits )
CountType = mk_bits ( count_nbits )
s.last_idx = PtrType ( num_entries-1 )
s.num_entries = CountType( num_entries )
# Interface
s.enq_en = InPort ( Bits1 )
s.enq_rdy = OutPort( Bits1 )
s.deq_en = InPort ( Bits1 )
s.deq_rdy = OutPort( Bits1 )
s.count = OutPort( CountType )
s.wen = OutPort( Bits1 )
s.waddr = OutPort( PtrType )
s.raddr = OutPort( PtrType )
s.mux_sel = OutPort( Bits1 )
# Registers
s.head = Wire( PtrType )
s.tail = Wire( PtrType )
# Wires
s.enq_xfer = Wire( Bits1 )
s.deq_xfer = Wire( Bits1 )
s.head_next = Wire( PtrType )
s.tail_next = Wire( PtrType )
# Connections
connect( s.wen, s.enq_xfer )
connect( s.waddr, s.tail )
connect( s.raddr, s.head )
@s.update
def up_enq_rdy():
s.enq_rdy = ( s.count < s.num_entries ) & ~s.reset
@s.update
def up_deq_rdy():
if s.reset:
s.deq_rdy = b1(0)
else:
s.deq_rdy = ( s.count > CountType(0) ) | s.enq_en
@s.update
def up_mux_sel():
s.mux_sel = s.count == CountType(0)
@s.update
def up_xfer_signals():
s.enq_xfer = s.enq_en & s.enq_rdy
s.deq_xfer = s.deq_en & s.deq_rdy
@s.update
def up_next():
s.head_next = s.head + PtrType(1) if s.head < s.last_idx else PtrType(0)
s.tail_next = s.tail + PtrType(1) if s.tail < s.last_idx else PtrType(0)
@s.update_ff
def up_reg():
if s.reset:
s.head <<= PtrType(0)
s.tail <<= PtrType(0)
s.count <<= CountType(0)
else:
s.head <<= s.head_next if s.deq_xfer else s.head
s.tail <<= s.tail_next if s.enq_xfer else s.tail
s.count <<= s.count + CountType(1) if s.enq_xfer & ~s.deq_xfer else \
s.count - CountType(1) if s.deq_xfer & ~s.enq_xfer else \
s.count
#-------------------------------------------------------------------------
# BypassQueueRTL
#-------------------------------------------------------------------------
class BypassQueueRTL( Component ):
def construct( s, EntryType, num_entries=2 ):
# Interface
s.enq = EnqIfcRTL( EntryType )
s.deq = DeqIfcRTL( EntryType )
s.count = OutPort( mk_bits( clog2( num_entries+1 ) ) )
# Components
assert num_entries > 0
if num_entries == 1:
s.q = BypassQueue1EntryRTL( EntryType )
connect( s.enq, s.q.enq )
connect( s.deq, s.q.deq )
connect( s.count, s.q.count )
else:
s.ctrl = BypassQueueCtrlRTL ( num_entries )
s.dpath = BypassQueueDpathRTL( EntryType, num_entries )
# Connect ctrl to data path
connect( s.ctrl.wen, s.dpath.wen )
connect( s.ctrl.waddr, s.dpath.waddr )
connect( s.ctrl.raddr, s.dpath.raddr )
connect( s.ctrl.mux_sel, s.dpath.mux_sel )
# Connect to interface
connect( s.enq.en, s.ctrl.enq_en )
connect( s.enq.rdy, s.ctrl.enq_rdy )
connect( s.deq.en, s.ctrl.deq_en )
connect( s.deq.rdy, s.ctrl.deq_rdy )
connect( s.count, s.ctrl.count )
connect( s.enq.msg, s.dpath.enq_msg )
connect( s.deq.msg, s.dpath.deq_msg )
# Line trace
def line_trace( s ):
return "{}({}){}".format( s.enq, s.count, s.deq )
#-------------------------------------------------------------------------
# NormalQueue1EntryRTL
#-------------------------------------------------------------------------
class NormalQueue1EntryRTL( Component ):
def construct( s, EntryType ):
# Interface
s.enq = EnqIfcRTL( EntryType )
s.deq = DeqIfcRTL( EntryType )
s.count = OutPort ( Bits1 )
# Components
s.entry = Wire( EntryType )
s.full = Wire( Bits1 )
connect( s.count, s.full )
# Logic
@s.update_ff
def up_full():
if s.reset:
s.full <<= b1(0)
else:
s.full <<= ~s.deq.en & (s.enq.en | s.full)
@s.update_ff
def up_entry():
if s.enq.en:
s.entry <<= s.enq.msg
@s.update
def up_enq_rdy():
if s.reset:
s.enq.rdy = b1(0)
else:
s.enq.rdy = ~s.full
@s.update
def up_deq_rdy():
s.deq.rdy = s.full & ~s.reset
connect( s.entry, s.deq.msg )
def line_trace( s ):
return "{}({}){}".format( s.enq, s.full, s.deq )
#-------------------------------------------------------------------------
# PipeQueue1EntryRTL
#-------------------------------------------------------------------------
class PipeQueue1EntryRTL( Component ):
def construct( s, EntryType ):
# Interface
s.enq = EnqIfcRTL( EntryType )
s.deq = DeqIfcRTL( EntryType )
s.count = OutPort ( Bits1 )
# Components
s.entry = Wire( EntryType )
s.full = Wire( Bits1 )
connect( s.count, s.full )
# Logic
@s.update_ff
def up_full():
if s.reset:
s.full <<= b1(0)
else:
s.full <<= s.enq.en | s.full & ~s.deq.en
@s.update_ff
def up_entry():
if s.enq.en:
s.entry <<= s.enq.msg
@s.update
def up_enq_rdy():
s.enq.rdy = ( ~s.full | s.deq.en ) & ~s.reset
@s.update
def up_deq_rdy():
s.deq.rdy = s.full & ~s.reset
connect( s.entry, s.deq.msg )
def line_trace( s ):
return "{}({}){}".format( s.enq, s.full, s.deq )
#-------------------------------------------------------------------------
# BypassQueue1EntryRTL
#-------------------------------------------------------------------------
class BypassQueue1EntryRTL( Component ):
def construct( s, EntryType ):
# Interface
s.enq = EnqIfcRTL( EntryType )
s.deq = DeqIfcRTL( EntryType )
s.count = OutPort ( Bits1 )
# Components
s.entry = Wire( EntryType )
s.full = Wire( Bits1 )
connect( s.count, s.full )
# Logic
@s.update_ff
def up_full():
if s.reset:
s.full <<= b1(0)
else:
s.full <<= ~s.deq.en & (s.enq.en | s.full)
@s.update_ff
def up_entry():
if s.enq.en & ~s.deq.en:
s.entry <<= s.enq.msg
@s.update
def up_enq_rdy():
s.enq.rdy = ~s.full & ~s.reset
@s.update
def up_deq_rdy():
s.deq.rdy = ( s.full | s.enq.en ) & ~s.reset
@s.update
def up_deq_msg():
s.deq.msg = s.entry if s.full else s.enq.msg
def line_trace( s ):
return "{}({}){}".format( s.enq, s.full, s.deq )
| [
"pymtl3.stdlib.ifcs.DeqIfcRTL",
"pymtl3.stdlib.rtl.Mux",
"pymtl3.stdlib.ifcs.EnqIfcRTL",
"pymtl3.stdlib.rtl.RegisterFile"
] | [((3348, 3368), 'pymtl3.stdlib.ifcs.EnqIfcRTL', 'EnqIfcRTL', (['EntryType'], {}), '(EntryType)\n', (3357, 3368), False, 'from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL\n'), ((3385, 3405), 'pymtl3.stdlib.ifcs.DeqIfcRTL', 'DeqIfcRTL', (['EntryType'], {}), '(EntryType)\n', (3394, 3405), False, 'from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL\n'), ((6946, 6966), 'pymtl3.stdlib.ifcs.EnqIfcRTL', 'EnqIfcRTL', (['EntryType'], {}), '(EntryType)\n', (6955, 6966), False, 'from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL\n'), ((6983, 7003), 'pymtl3.stdlib.ifcs.DeqIfcRTL', 'DeqIfcRTL', (['EntryType'], {}), '(EntryType)\n', (6992, 7003), False, 'from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL\n'), ((11373, 11393), 'pymtl3.stdlib.ifcs.EnqIfcRTL', 'EnqIfcRTL', (['EntryType'], {}), '(EntryType)\n', (11382, 11393), False, 'from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL\n'), ((11410, 11430), 'pymtl3.stdlib.ifcs.DeqIfcRTL', 'DeqIfcRTL', (['EntryType'], {}), '(EntryType)\n', (11419, 11430), False, 'from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL\n'), ((12784, 12804), 'pymtl3.stdlib.ifcs.EnqIfcRTL', 'EnqIfcRTL', (['EntryType'], {}), '(EntryType)\n', (12793, 12804), False, 'from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL\n'), ((12821, 12841), 'pymtl3.stdlib.ifcs.DeqIfcRTL', 'DeqIfcRTL', (['EntryType'], {}), '(EntryType)\n', (12830, 12841), False, 'from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL\n'), ((13819, 13839), 'pymtl3.stdlib.ifcs.EnqIfcRTL', 'EnqIfcRTL', (['EntryType'], {}), '(EntryType)\n', (13828, 13839), False, 'from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL\n'), ((13856, 13876), 'pymtl3.stdlib.ifcs.DeqIfcRTL', 'DeqIfcRTL', (['EntryType'], {}), '(EntryType)\n', (13865, 13876), False, 'from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL\n'), ((14824, 14844), 'pymtl3.stdlib.ifcs.EnqIfcRTL', 'EnqIfcRTL', (['EntryType'], {}), '(EntryType)\n', (14833, 14844), False, 'from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL\n'), ((14861, 14881), 'pymtl3.stdlib.ifcs.DeqIfcRTL', 'DeqIfcRTL', (['EntryType'], {}), '(EntryType)\n', (14870, 14881), False, 'from pymtl3.stdlib.ifcs import DeqIfcRTL, EnqIfcRTL\n'), ((884, 920), 'pymtl3.stdlib.rtl.RegisterFile', 'RegisterFile', (['EntryType', 'num_entries'], {}), '(EntryType, num_entries)\n', (896, 920), False, 'from pymtl3.stdlib.rtl import Mux, RegisterFile\n'), ((8601, 8637), 'pymtl3.stdlib.rtl.RegisterFile', 'RegisterFile', (['EntryType', 'num_entries'], {}), '(EntryType, num_entries)\n', (8613, 8637), False, 'from pymtl3.stdlib.rtl import Mux, RegisterFile\n'), ((8788, 8805), 'pymtl3.stdlib.rtl.Mux', 'Mux', (['EntryType', '(2)'], {}), '(EntryType, 2)\n', (8791, 8805), False, 'from pymtl3.stdlib.rtl import Mux, RegisterFile\n')] |
# coding=utf-8
#
# Copyright Zucker
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
import subprocess
from shutil import copyfile
import shutil
import re
import zipfile
import copy
from pathlib import Path
class CloneProject:
target_project_name = ""
def clone(self, current_path, output_path, target_project_name, output_project_name):
self.target_project_name = target_project_name
# 如果output目录不存在,则创建文件夹
if not os.path.exists(output_path):
os.mkdir(output_path)
self.__copy_dir(current_path, output_path, output_project_name, 0)
# 拷贝文件夹
def __copy_dir(self, source_dir, target_dir, target, index):
# 源文件夹
dir = os.path.join(source_dir, target)
if index == 0:
dir = os.path.join(source_dir, self.target_project_name)
else:
dir = os.path.join(source_dir, target)
# 源文件夹下的文件列表
files = os.listdir(dir)
# 目标文件夹
target_dir = os.path.join(target_dir, target)
if not os.path.exists(target_dir):
os.mkdir(target_dir)
if index == 0:
self.fileSize = 0
for dirpath, dirnames, filenames in os.walk(dir):
for file in filenames:
self.fileSize = self.fileSize + 1
self.count = 0
for f in files:
sourcefile = os.path.join(dir, f)
targetfile = os.path.join(target_dir, f)
if os.path.isdir(sourcefile):
index += 1
self.__copy_dir(dir, target_dir, f, index)
if os.path.isfile(sourcefile) and not os.path.exists(targetfile):
copyfile(sourcefile, targetfile)
self.count += 1
print("Cloning: {0}%".format(round((self.count + 1) * 100 / self.fileSize)), end="\r")
class TreeNode:
children = set()
parents = set()
parent = None
value = ""
def __init__(self, value):
self.value = value
self.children = set()
self.parents = set()
self.parent = None
def add_child(self, node):
self.children.add(node)
def add_parent(self, node):
self.parent = node
self.parents.add(node)
def is_root(self):
return len(self.parents) == 0 or self.parent is None
def get_level(self):
if self.is_root():
return 0
return self.parent.get_level() + 1
class Dependency:
# gradle命令,获取release下依赖树,写入文件
commend = "./gradlew -q dependencies :%s:dependencies --configuration releaseRuntimeClasspath>"
# 去重set
__node_set = set()
# root节点
rootNode = None
# 记录当前的节点
stack = []
# 所有依赖节点
allNode = []
# 移除support包和Android原生依赖包
exportArr = ["com.android.support", "android.arch.lifecycle", "com.google.android",
"com.squareup.leakcanary:leakcanary-android", "android.arch.core",
"org.jetbrains.kotlin:kotlin-stdlib-common", "org.jetbrains:annotations",
"androidx.", "project :"]
def __init__(self, output_project_path, app_dir):
self.file_name = "dependency_" + app_dir + ".txt"
self.projectPath = output_project_path
self.appDir = app_dir
def get_top_level_aars(self):
self.rootNode = TreeNode(self.appDir)
self.stack.append(self.rootNode)
self.allNode.append(self.rootNode)
# 执行gradle命令
self.commend = (self.commend % self.appDir) + os.path.join(self.projectPath, self.file_name)
# cd到工程目录下,才能正常的读取gradle命令
self.commend = ("cd %s\nchmod +x gradlew\n" % self.projectPath) + self.commend
subprocess.check_call(self.commend, shell=True)
self.__check_dependency_file()
nodes = []
for n in self.allNode:
nodeName = n.value
if len(n.parents) >= 1 and not self.__check_aar_in_export(nodeName):
for p in n.parents:
if p == self.rootNode:
nodes.append(nodeName)
continue
return nodes
def __check_dependency_file(self):
dep_file = os.path.join(self.projectPath, self.file_name)
# 逐行读取文件
with open(dep_file) as f:
line = f.readline()
while line:
line = line.rstrip("\n")
if len(line) == 0 or (
not line.startswith("+") and (not line.startswith("|")) and (not line.startswith("\\"))):
line = f.readline()
continue
line = line.replace("\\", "+").replace("+---", " ").replace("|", " ").replace(" ", "!")
current_level = line.count("!")
if current_level == 0:
line = f.readline()
continue
last_parent = self.stack.pop()
parent_level = last_parent.get_level()
while not (current_level > parent_level):
last_parent = self.stack.pop()
parent_level = last_parent.get_level()
line = line.replace("!", "").replace(" -> ", ":").replace(" (*)", "")
buffer = line.split(":")
tmp_length = len(buffer)
if tmp_length > 2:
line = "%s:%s:%s" % (buffer[0], buffer[1], buffer[-1])
if line in self.__node_set:
for node in self.allNode:
if node.value == line:
self.__update_node(node, last_parent)
break
else:
node = TreeNode(line)
self.__update_node(node, last_parent)
self.__node_set.add(node.value)
self.allNode.append(node)
node.add_parent(last_parent)
last_parent.add_child(node)
self.stack.append(last_parent)
self.stack.append(node)
line = f.readline()
# 更新依赖关系
@staticmethod
def __update_node(node, parent):
node.add_parent(parent)
parent.add_child(node)
"""
# 获取输入的多个aar依赖:该方法是要统计系列依赖比如fresco,animated-gif,webpsupport,animated-webp
方法步骤:
遍历输入的节点,将该节点的依赖树记录到result set中
copy一份des set
遍历该result set,检查每个节点的父节点是否仅在这个des set中
在-->保留该节点,否则将该节点不是独有依赖,从des set中移除
经过一次遍历后,该des set即为结果set
"""
def __get_array_node(self, array):
input_set = set()
result = set()
# 遍历输入的aar列表
for s in array:
n = self.__find_node_by_name(s)
if n is None:
print("暂无该依赖节点%s" % s)
continue
input_set.add(n)
result.add(n)
self.__add_children_node(n, result)
# copy依赖的节点,作为结果set
des = result.copy()
for rNode in result:
if rNode in input_set or 1 == len(rNode.parents):
continue
# 移除support库
for p in rNode.parents:
if p not in des:
des.remove(rNode)
break
return des
def get_input_aar(self, target_aar):
if self.__check_aar_in_export(target_aar):
print("不支持【%s】该类型的库统计!" % target_aar)
return []
array = [target_aar.lstrip(" ").rstrip(" ")]
aars = self.__get_array_node(array)
result = []
for aar in aars:
if self.__check_aar_in_export(aar.value):
continue
result.append(aar.value)
return result
# 校验传入的aar是否在去除列表中
def __check_aar_in_export(self, aar_name):
result = False
for s in self.exportArr:
if aar_name.startswith(s):
result = True
break
return result
# 遍历所有节点,记录在set中
def __add_children_node(self, node, node_set):
if len(node.children) == 0:
node_set.add(node)
return
for child in node.children:
node_set.add(child)
self.__add_children_node(child, node_set)
# 根据名称获取依赖节点
def __find_node_by_name(self, node_name):
node = None
for n in self.allNode:
if n.value.find(node_name) != -1:
node = n
break
return node
class AarCache:
# 记录传入的aar本地缓存路径
targetAarPath = ""
# gradle本地目录
gradleUserHome = ""
__envMap = os.environ
def __init__(self):
# 先检查是否将路径写在环境变量中
gradle_home = self.__envMap.get("GRADLE_USER_HOME")
if gradle_home is None or not os.path.exists(gradle_home):
# 查看默认路径 ~/user/.gradle
gradle_home = os.path.join(self.__envMap.get('HOME'), ".gradle")
self.gradleUserHome = os.path.join(gradle_home, "caches", "modules-2", "files-2.1")
# 获取率AAR
def get_aar_file(self, aar_name):
aar_info = aar_name.split(":")
aar_path = os.path.join(self.gradleUserHome, aar_info[0], aar_info[1], aar_info[2])
# print(aarPath)
if not os.path.exists(aar_path):
print("aar 本地缓存不存在")
return False
aar_file = self.__get_aar_file_(aar_path)
self.targetAarPath = aar_file
return aar_file and os.path.exists(aar_file)
@staticmethod
def __get_aar_file_(file):
for root, dirs, files in os.walk(file, topdown=False):
for name in files:
if name.endswith(".aar"):
return os.path.join(root, name)
class Compile:
def __init__(self, output_project_path):
self.outputProjectPath = output_project_path
def new_module(self, app_dirs):
compile_sdk_version = ""
build_tools_version = ""
# settings.gradle
settings = os.path.join(self.outputProjectPath, "settings.gradle")
with open(settings, 'a+') as f:
f.write("\ninclude ':zucker'")
# app build.gradle
regex = re.compile(r'dependencies([\s]*){')
STATEMENT = " implementation project(':zucker')\n"
for dir in app_dirs:
lines = []
gradle_file = os.path.join(self.outputProjectPath, dir, "build.gradle")
with open(gradle_file, 'r') as f:
has_found_dependencies = False
bracket_count = 0
for line in f.readlines():
lines.append(line)
if line.lstrip().startswith("//"):
pass
if has_found_dependencies:
for index, c in enumerate(line):
if c == '{':
bracket_count += 1
elif c == '}':
bracket_count -= 1
if bracket_count < 0:
lines.remove(STATEMENT)
has_found_dependencies = False
bracket_count = 0
if "compileSdkVersion" in line:
compile_sdk_version = line
elif "buildToolsVersion" in line:
build_tools_version = line
elif regex.search(line):
has_found_dependencies = True
bracket_count += 1
lines.append(STATEMENT)
with open(gradle_file, "w") as f:
f.writelines(lines)
f.close()
# zucker dir
zucker = os.path.join(self.outputProjectPath, "zucker")
if not Path(zucker).exists():
os.mkdir(zucker)
# src dir
src = os.path.join(zucker, "src")
if not Path(src).exists():
os.mkdir(src)
# zucker main
main = os.path.join(src, "main")
if not Path(main).exists():
os.mkdir(main)
# AndroidManifest
manifest = os.path.join(main, "AndroidManifest.xml")
with open(manifest, 'w') as f:
f.write("<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\" package=\"com.zucker\" />")
# project build.gradle
build = os.path.join(zucker, "build.gradle")
with open(build, 'w') as f:
f.write("apply plugin: 'com.android.library'\n\n")
f.write("android {\n")
f.write(compile_sdk_version + "\n")
f.write(build_tools_version + "\n")
f.write("}\n\n")
f.write("dependencies {\n\n}")
return main
def clear_flavors(self, app_dirs):
self.__clear_bucket_content('productFlavors', app_dirs)
def insert_script(self, app_dirs):
PATH = self.outputProjectPath
for targetFile in app_dirs:
package_size_path = os.path.join(PATH, targetFile, "zucker.txt")
open(package_size_path, 'w')
gradle_file = os.path.join(PATH, targetFile, 'build.gradle')
with open(gradle_file, 'a+') as f:
f.write("\n")
f.write("android.applicationVariants.all { variant ->\n")
f.write(" variant.outputs.all { output ->\n")
f.write(" if (output.outputFileName.contains('debug.apk')) {\n")
f.write(" Task assembleDebug = tasks.getByName('assembleDebug')\n")
f.write(" File file = output.outputFile\n")
f.write(" assembleDebug.doLast {\n")
f.write(" def apkSize = file.length().toString()\n")
f.write(" print('ApkSize: '+apkSize)\n")
f.write(" def packageSizeFile = new File(\"" + package_size_path + "\")\n")
f.write(" packageSizeFile.withWriter { writer ->\n")
f.write(" writer.write(apkSize)\n")
f.write(" }\n")
f.write(" }\n")
f.write(" }\n")
f.write(" }\n")
f.write("}\n\n")
def find_app_dirs(self):
app_dirs = []
PATH = self.outputProjectPath
dir_list = [x for x in os.listdir(PATH) if
os.path.isdir(os.path.join(PATH, x)) and not x.startswith('.') and not x == 'gradle']
for targetFile in dir_list:
gradle_file = os.path.join(PATH, targetFile, 'build.gradle')
if os.path.isfile(gradle_file):
with open(gradle_file) as f:
for index, line in enumerate(f.readlines()):
if "apply plugin: 'com.android.application'" in line:
app_dirs.append(targetFile)
break
return app_dirs
def __clear_bucket_content(self, TAG, appDirs):
PATH = self.outputProjectPath
for targetFile in appDirs:
gradle_file = os.path.join(PATH, targetFile, 'build.gradle')
with open(gradle_file, 'r') as f:
tag_lines = []
has_find_tag = False
has_find_start_tag = False
has_find_end_tag = False
bracket_count = 0
for line in f.readlines():
if line.lstrip().startswith("//"):
tag_lines.append(line)
continue
if not has_find_tag:
index = line.find(TAG)
if index >= 0:
has_find_tag = True
start_index = 0
end_index = len(line)
for index, c in enumerate(line):
if c == '{':
if not has_find_start_tag:
has_find_start_tag = True
start_index = index + 1
bracket_count += 1
elif c == '}':
bracket_count -= 1
if has_find_start_tag and bracket_count == 0:
has_find_end_tag = True
end_index = index
break
if has_find_end_tag:
tag_lines.append(line[0:start_index] + line[end_index:len(line)])
else:
if has_find_start_tag:
tag_lines.append(line[0:start_index] + "\n")
else:
tag_lines.append(line)
if has_find_tag and not has_find_end_tag:
start_index = -1
end_index = len(line)
for index, c in enumerate(line):
if c == '{':
if not has_find_start_tag:
has_find_start_tag = True
start_index = index + 1
bracket_count += 1
elif c == '}':
bracket_count -= 1
if has_find_start_tag and bracket_count == 0:
has_find_end_tag = True
end_index = index
break
if has_find_start_tag:
linebreak = ""
if start_index >= 0:
linebreak = "\n"
else:
start_index = 0
if has_find_end_tag:
tag_lines.append(line[0:start_index] + linebreak + " " + line[end_index:len(line)])
else:
tag_lines.append(line[0:start_index] + linebreak)
else:
tag_lines.append(line)
if has_find_tag and has_find_end_tag:
tag_lines.append(line)
if not has_find_tag and not has_find_end_tag:
tag_lines.append(line)
if has_find_tag:
fd = open(gradle_file, "w")
fd.writelines(tag_lines)
fd.close()
def compile(self):
command = "cd " + self.outputProjectPath + "\n"
command += "chmod +x gradlew" + "\n"
command += "./gradlew clean" + "\n"
command += "./gradlew assembleDebug"
subprocess.call(command, shell=True)
class MockCache:
zucker_res_size = 0
def __init__(self, origin_aar_cache_path, target_main_src_path):
# 复制文件
self.originAarCachePath = origin_aar_cache_path
self.targetMainSrcPath = target_main_src_path
self.mockAarCachePath = origin_aar_cache_path.replace(".aar", "-origin.zip")
self.mockAarOriginPath = origin_aar_cache_path.replace(".aar", "-origin.aar")
def mock_cache(self):
copyfile(self.originAarCachePath, self.mockAarCachePath)
copyfile(self.originAarCachePath, self.mockAarOriginPath)
# 解压
unzip_file = os.path.dirname(self.originAarCachePath) + "/" + (
os.path.basename(self.originAarCachePath)).replace(".aar", "")
file_zip = zipfile.ZipFile(self.mockAarCachePath, 'r')
for file in file_zip.namelist():
file_zip.extract(file, unzip_file)
file_zip.close()
self._copy_mock_file(unzip_file, self.targetMainSrcPath)
# 基础Mock
for root, dirs, files in os.walk(os.path.dirname(self.targetMainSrcPath + "/res"), topdown=False):
for name in files:
if name.startswith('values') and name.endswith('.xml'):
pass
elif name in ('AndroidManifest.xml'):
pass
elif ('layout' in root) and name.endswith('.xml'):
mypath = os.path.join(root, name)
fd = open(mypath, "w")
fd.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>")
fd.write("<FrameLayout/>")
fd.close()
elif ('drawable' in root) and name.endswith('.xml'):
mypath = os.path.join(root, name)
fd = open(mypath, "w")
fd.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>")
fd.write("<selector/>")
fd.close()
elif ('drawable' in root) and name.endswith('.9.png'):
pass
elif ('mipmap' in root) and name.endswith('.9.png'):
pass
elif ('anim' in root) and name.endswith('.xml'):
mypath = os.path.join(root, name)
fd = open(mypath, "w")
fd.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>")
fd.write("<set/>")
fd.close()
elif ('color' in root) and name.endswith('.xml'):
mypath = os.path.join(root, name)
fd = open(mypath, "w")
fd.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>")
fd.write("<selector/>")
fd.close()
elif ('xml' in root) and name.endswith('.xml'):
mypath = os.path.join(root, name)
fd = open(mypath, "w")
fd.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>")
fd.write("<paths/>")
fd.close()
else:
mypath = os.path.join(root, name)
fd = open(mypath, "w")
fd.writelines([])
fd.close()
# 遍历文件,并删除 os.path.join(path, file)
white_list = ["classes.jar"]
dirs = os.listdir(unzip_file)
for root, dirs, files in os.walk(unzip_file, topdown=False):
for name in files:
if name in white_list:
pass
else:
os.remove(os.path.join(root, name))
# 删除原有AAR
for root, dirs, files in os.walk(os.path.dirname(self.originAarCachePath), topdown=False):
for name in files:
if name in os.path.basename(self.originAarCachePath):
os.remove(os.path.join(root, name))
# 计算zucker库里面res的文件大小
self.zucker_res_size = self._get_dir_size(os.path.dirname(self.targetMainSrcPath + "/res"))
# 压缩Mock的File
self._zip_mock_file(unzip_file)
@staticmethod
def _get_dir_size(path):
# 计算指定的路径下的所有文件的大小
if os.path.isdir(path):
file_size, dir_list = 0, [path]
while dir_list:
path = dir_list.pop()
dirs = os.listdir(path)
for name in dirs:
file_path = os.path.join(path, name)
if os.path.isfile(file_path):
file_size += os.path.getsize(file_path)
else:
dir_list.append(file_path)
return file_size
elif os.path.isfile(path):
return os.path.getsize(path)
else:
print('找不到%s文件' % path)
def _copytree(self, src, dst, symlinks=False, ignore=None, copy_function=shutil.copy2):
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
if not os.path.exists(dst):
os.makedirs(dst)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if os.path.islink(srcname):
linkto = os.readlink(srcname)
if symlinks:
# We can't just leave it to `copy_function` because legacy
# code with a custom `copy_function` may rely on copytree
# doing the right thing.
os.symlink(linkto, dstname)
shutil.copystat(srcname, dstname, follow_symlinks=not symlinks)
else:
# ignore dangling symlink if the flag is on
if not os.path.exists(linkto) and False:
continue
# otherwise let the copy occurs. copy2 will raise an error
if os.path.isdir(srcname):
self._copytree(srcname, dstname, symlinks, ignore,
copy_function)
else:
copy_function(srcname, dstname)
elif os.path.isdir(srcname):
self._copytree(srcname, dstname, symlinks, ignore, copy_function)
else:
# Will raise a SpecialFileError for unsupported file types
copy_function(srcname, dstname)
# catch the Error from the recursive copytree so that we can
# continue with other files
except shutil.Error as err:
errors.extend(err.args[0])
except OSError as why:
errors.append((srcname, dstname, str(why)))
try:
shutil.copystat(src, dst)
except OSError as why:
# Copying file access times may fail on Windows
if getattr(why, 'winerror', None) is None:
errors.append((src, dst, str(why)))
if errors:
raise shutil.Error(errors)
return dst
def _copy_mock_file(self, origin_path, target_main_src_path):
origin_path = origin_path + "/res"
if os.path.exists(origin_path):
self._copytree(origin_path, target_main_src_path + "/res")
elif not os.path.exists(target_main_src_path + "/res"):
os.makedirs(target_main_src_path + "/res")
@staticmethod
def _zip_mock_file(start_dir):
start_dir = start_dir
file_news = start_dir + '.aar'
z = zipfile.ZipFile(file_news, 'w', zipfile.ZIP_DEFLATED)
for dir_path, dir_names, file_names in os.walk(start_dir):
f_path = dir_path.replace(start_dir, '')
f_path = f_path and f_path + os.sep or ''
for filename in file_names:
z.write(os.path.join(dir_path, filename), f_path + filename)
z.close()
return file_news
@staticmethod
def add_configurations(aar, output_project_path, app_dir):
aars = aar.split(":")
name = aars[1] + "-" + aars[2]
sub_path = os.path.join(output_project_path, app_dir)
build_gradle = sub_path + "/build.gradle"
if os.path.exists(build_gradle):
# 先读文件
configurations = open(build_gradle, 'r')
content = configurations.read()
post = content.find("configurations {")
if post != -1:
configurations = open(build_gradle, 'w')
content = content[:post + len("configurations {")] + "\n" + "all*.exclude group: \'" + aars[
0] + "\'" + " ,module: " + "\'" + aars[1] + "\'\n" + content[post + len("configurations {"):]
configurations.write(content)
else:
configurations = open(build_gradle, 'a+')
configurations.write("configurations {")
configurations.write("\n")
configurations.write("all*.exclude group: \'" + aars[0] + "\'" + " ,module: " + "\'" + aars[1] + "\'")
configurations.write("\n")
configurations.write("}")
configurations.close()
class RevertCache:
# 回滚修改的Cache目标AAR
def __init__(self, origin_aar_cache_path):
self.originAarCachePath = origin_aar_cache_path
def revert(self):
file_name = os.path.dirname(self.originAarCachePath)
dirs = os.listdir(file_name)
for root, dirs, files in os.walk(file_name, topdown=False):
for name in files:
if name.endswith("-origin.aar"):
pass
else:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
for root, dirs, files in os.walk(file_name, topdown=False):
for name in files:
if name.endswith("-origin.aar"):
new_name = copy.deepcopy(name)
new_name = new_name.replace("-origin.aar", ".aar")
os.rename(root + "/" + name, root + "/" + new_name)
class PackageSize:
# 统计大小,并输出最终结果
@staticmethod
def get_result(base_output_project_path, aar_output_project_path, app_dir, zucker_res_size):
base_pack_size = 0
aar_pack_size = 0
base_pack_size_path = os.path.join(base_output_project_path, app_dir, "zucker.txt")
aar_pack_size_path = os.path.join(aar_output_project_path, app_dir, "zucker.txt")
with open(base_pack_size_path) as f:
for line in f.readlines():
base_pack_size = line
print("基础包大小(basePackSize,单位Byte): " + line)
break
with open(aar_pack_size_path) as f:
for line in f.readlines():
aar_pack_size = line
aar_pack_size = int(aar_pack_size) - (int(zucker_res_size) * 2)
print("替换后的APK大小(aarPackSize,单位Byte): " + str(aar_pack_size))
break
aar_size = int(base_pack_size) - int(aar_pack_size)
print("AAR大小(aarSize,单位Byte): " + str(aar_size))
if __name__ == '__main__':
sys.argv.append("ZuckerDemo")
# 工程文件夹名称
projectName = sys.argv[1]
# 当前目录
currentPath = os.getcwd()
# 输出目录
outputPath = os.path.join(currentPath, "output")
# 资源大小
zuckerResSize = ""
# 是否找到缓存
isCacheExist = False
# 基础包:克隆、打包流程======================================
# 基础包AAR工程目录
baseOutputProjectPath = os.path.join(outputPath, projectName + "_BASE")
if not os.path.exists(baseOutputProjectPath):
# 克隆工程
cloneBaseProject = CloneProject()
cloneBaseProject.clone(currentPath, outputPath, projectName, projectName + "_BASE")
print("cloneBaseProject DONE")
# 编译工程
baseCompile = Compile(baseOutputProjectPath)
baseAppDirs = baseCompile.find_app_dirs()
print("findBaseAppDirs DONE")
baseCompile.clear_flavors(baseAppDirs)
print("clearBaseFlavors DONE")
baseCompile.insert_script(baseAppDirs)
print("insertBaseScript DONE")
baseCompile.compile()
# AAR:克隆、依赖、mock、打包流程======================================
# AAR工程目录
outputProjectPath = os.path.join(outputPath, projectName + "_AAR")
# 如果已经存在,则删除
if os.path.exists(outputProjectPath):
shutil.rmtree(outputProjectPath, True)
# 克隆工程
cloneProject = CloneProject()
cloneProject.clone(currentPath, outputPath, projectName, projectName + "_AAR")
print("cloneAARProject DONE")
# 编译工程
compile = Compile(outputProjectPath)
appDirs = compile.find_app_dirs()
print("findAARAppDirs DONE")
zuckerModuleMainDir = compile.new_module(appDirs)
print("newModule DONE: " + zuckerModuleMainDir)
compile.clear_flavors(appDirs)
print("clearAARFlavors DONE")
compile.insert_script(appDirs)
print("insertAARScript DONE")
# 遍历工程下依赖的所有AAR
for appdir in appDirs:
dependency = Dependency(outputProjectPath, appdir)
aars = dependency.get_top_level_aars()
for aar in aars:
print(aar)
target_aar = input("输入AAR名称及版本,格式xxx.xxx:xxx:xxx:")
result_aars = dependency.get_input_aar(target_aar)
print("输出AAR:")
print(result_aars)
targetAarArray = []
for aar in result_aars:
aar_cache = AarCache()
if aar_cache.get_aar_file(aar):
print(aar_cache.targetAarPath)
mockCache = MockCache(aar_cache.targetAarPath, zuckerModuleMainDir)
mockCache.mock_cache()
mockCache.add_configurations(aar, outputProjectPath, appdir)
zuckerResSize = mockCache.zucker_res_size
targetAarArray.append(aar_cache.targetAarPath)
isCacheExist = True
else:
isCacheExist = False
if isCacheExist:
compile.compile()
print("compile DONE")
# 将修改的AAR进行回滚
for path in targetAarArray:
revertCache = RevertCache(path)
revertCache.revert()
# 统计大小并输出
packSize = PackageSize()
packSize.get_result(baseOutputProjectPath, compile.outputProjectPath, appdir, zuckerResSize)
else:
print("缓存aar未找到,请重新尝试")
# break
| [
"zipfile.ZipFile",
"re.compile",
"copy.deepcopy",
"os.path.islink",
"os.walk",
"os.path.exists",
"os.listdir",
"os.readlink",
"pathlib.Path",
"sys.argv.append",
"os.path.isdir",
"subprocess.call",
"os.mkdir",
"shutil.Error",
"shutil.copystat",
"os.path.getsize",
"subprocess.check_cal... | [((30736, 30765), 'sys.argv.append', 'sys.argv.append', (['"""ZuckerDemo"""'], {}), "('ZuckerDemo')\n", (30751, 30765), False, 'import sys\n'), ((30839, 30850), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (30848, 30850), False, 'import os\n'), ((30879, 30914), 'os.path.join', 'os.path.join', (['currentPath', '"""output"""'], {}), "(currentPath, 'output')\n", (30891, 30914), False, 'import os\n'), ((31089, 31136), 'os.path.join', 'os.path.join', (['outputPath', "(projectName + '_BASE')"], {}), "(outputPath, projectName + '_BASE')\n", (31101, 31136), False, 'import os\n'), ((31836, 31882), 'os.path.join', 'os.path.join', (['outputPath', "(projectName + '_AAR')"], {}), "(outputPath, projectName + '_AAR')\n", (31848, 31882), False, 'import os\n'), ((31907, 31940), 'os.path.exists', 'os.path.exists', (['outputProjectPath'], {}), '(outputProjectPath)\n', (31921, 31940), False, 'import os\n'), ((1215, 1247), 'os.path.join', 'os.path.join', (['source_dir', 'target'], {}), '(source_dir, target)\n', (1227, 1247), False, 'import os\n'), ((1442, 1457), 'os.listdir', 'os.listdir', (['dir'], {}), '(dir)\n', (1452, 1457), False, 'import os\n'), ((1495, 1527), 'os.path.join', 'os.path.join', (['target_dir', 'target'], {}), '(target_dir, target)\n', (1507, 1527), False, 'import os\n'), ((4171, 4218), 'subprocess.check_call', 'subprocess.check_call', (['self.commend'], {'shell': '(True)'}), '(self.commend, shell=True)\n', (4192, 4218), False, 'import subprocess\n'), ((4660, 4706), 'os.path.join', 'os.path.join', (['self.projectPath', 'self.file_name'], {}), '(self.projectPath, self.file_name)\n', (4672, 4706), False, 'import os\n'), ((9346, 9407), 'os.path.join', 'os.path.join', (['gradle_home', '"""caches"""', '"""modules-2"""', '"""files-2.1"""'], {}), "(gradle_home, 'caches', 'modules-2', 'files-2.1')\n", (9358, 9407), False, 'import os\n'), ((9518, 9590), 'os.path.join', 'os.path.join', (['self.gradleUserHome', 'aar_info[0]', 'aar_info[1]', 'aar_info[2]'], {}), '(self.gradleUserHome, aar_info[0], aar_info[1], aar_info[2])\n', (9530, 9590), False, 'import os\n'), ((9939, 9967), 'os.walk', 'os.walk', (['file'], {'topdown': '(False)'}), '(file, topdown=False)\n', (9946, 9967), False, 'import os\n'), ((10357, 10412), 'os.path.join', 'os.path.join', (['self.outputProjectPath', '"""settings.gradle"""'], {}), "(self.outputProjectPath, 'settings.gradle')\n", (10369, 10412), False, 'import os\n'), ((10539, 10574), 're.compile', 're.compile', (['"""dependencies([\\\\s]*){"""'], {}), "('dependencies([\\\\s]*){')\n", (10549, 10574), False, 'import re\n'), ((12103, 12149), 'os.path.join', 'os.path.join', (['self.outputProjectPath', '"""zucker"""'], {}), "(self.outputProjectPath, 'zucker')\n", (12115, 12149), False, 'import os\n'), ((12249, 12276), 'os.path.join', 'os.path.join', (['zucker', '"""src"""'], {}), "(zucker, 'src')\n", (12261, 12276), False, 'import os\n'), ((12375, 12400), 'os.path.join', 'os.path.join', (['src', '"""main"""'], {}), "(src, 'main')\n", (12387, 12400), False, 'import os\n'), ((12509, 12550), 'os.path.join', 'os.path.join', (['main', '"""AndroidManifest.xml"""'], {}), "(main, 'AndroidManifest.xml')\n", (12521, 12550), False, 'import os\n'), ((12757, 12793), 'os.path.join', 'os.path.join', (['zucker', '"""build.gradle"""'], {}), "(zucker, 'build.gradle')\n", (12769, 12793), False, 'import os\n'), ((19369, 19405), 'subprocess.call', 'subprocess.call', (['command'], {'shell': '(True)'}), '(command, shell=True)\n', (19384, 19405), False, 'import subprocess\n'), ((19850, 19906), 'shutil.copyfile', 'copyfile', (['self.originAarCachePath', 'self.mockAarCachePath'], {}), '(self.originAarCachePath, self.mockAarCachePath)\n', (19858, 19906), False, 'from shutil import copyfile\n'), ((19915, 19972), 'shutil.copyfile', 'copyfile', (['self.originAarCachePath', 'self.mockAarOriginPath'], {}), '(self.originAarCachePath, self.mockAarOriginPath)\n', (19923, 19972), False, 'from shutil import copyfile\n'), ((20153, 20196), 'zipfile.ZipFile', 'zipfile.ZipFile', (['self.mockAarCachePath', '"""r"""'], {}), "(self.mockAarCachePath, 'r')\n", (20168, 20196), False, 'import zipfile\n'), ((22744, 22766), 'os.listdir', 'os.listdir', (['unzip_file'], {}), '(unzip_file)\n', (22754, 22766), False, 'import os\n'), ((22800, 22834), 'os.walk', 'os.walk', (['unzip_file'], {'topdown': '(False)'}), '(unzip_file, topdown=False)\n', (22807, 22834), False, 'import os\n'), ((23562, 23581), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (23575, 23581), False, 'import os\n'), ((24279, 24294), 'os.listdir', 'os.listdir', (['src'], {}), '(src)\n', (24289, 24294), False, 'import os\n'), ((26765, 26792), 'os.path.exists', 'os.path.exists', (['origin_path'], {}), '(origin_path)\n', (26779, 26792), False, 'import os\n'), ((27120, 27173), 'zipfile.ZipFile', 'zipfile.ZipFile', (['file_news', '"""w"""', 'zipfile.ZIP_DEFLATED'], {}), "(file_news, 'w', zipfile.ZIP_DEFLATED)\n", (27135, 27173), False, 'import zipfile\n'), ((27221, 27239), 'os.walk', 'os.walk', (['start_dir'], {}), '(start_dir)\n', (27228, 27239), False, 'import os\n'), ((27678, 27720), 'os.path.join', 'os.path.join', (['output_project_path', 'app_dir'], {}), '(output_project_path, app_dir)\n', (27690, 27720), False, 'import os\n'), ((27783, 27811), 'os.path.exists', 'os.path.exists', (['build_gradle'], {}), '(build_gradle)\n', (27797, 27811), False, 'import os\n'), ((28938, 28978), 'os.path.dirname', 'os.path.dirname', (['self.originAarCachePath'], {}), '(self.originAarCachePath)\n', (28953, 28978), False, 'import os\n'), ((28994, 29015), 'os.listdir', 'os.listdir', (['file_name'], {}), '(file_name)\n', (29004, 29015), False, 'import os\n'), ((29049, 29082), 'os.walk', 'os.walk', (['file_name'], {'topdown': '(False)'}), '(file_name, topdown=False)\n', (29056, 29082), False, 'import os\n'), ((29382, 29415), 'os.walk', 'os.walk', (['file_name'], {'topdown': '(False)'}), '(file_name, topdown=False)\n', (29389, 29415), False, 'import os\n'), ((29929, 29990), 'os.path.join', 'os.path.join', (['base_output_project_path', 'app_dir', '"""zucker.txt"""'], {}), "(base_output_project_path, app_dir, 'zucker.txt')\n", (29941, 29990), False, 'import os\n'), ((30020, 30080), 'os.path.join', 'os.path.join', (['aar_output_project_path', 'app_dir', '"""zucker.txt"""'], {}), "(aar_output_project_path, app_dir, 'zucker.txt')\n", (30032, 30080), False, 'import os\n'), ((31148, 31185), 'os.path.exists', 'os.path.exists', (['baseOutputProjectPath'], {}), '(baseOutputProjectPath)\n', (31162, 31185), False, 'import os\n'), ((31950, 31988), 'shutil.rmtree', 'shutil.rmtree', (['outputProjectPath', '(True)'], {}), '(outputProjectPath, True)\n', (31963, 31988), False, 'import shutil\n'), ((970, 997), 'os.path.exists', 'os.path.exists', (['output_path'], {}), '(output_path)\n', (984, 997), False, 'import os\n'), ((1011, 1032), 'os.mkdir', 'os.mkdir', (['output_path'], {}), '(output_path)\n', (1019, 1032), False, 'import os\n'), ((1289, 1339), 'os.path.join', 'os.path.join', (['source_dir', 'self.target_project_name'], {}), '(source_dir, self.target_project_name)\n', (1301, 1339), False, 'import os\n'), ((1372, 1404), 'os.path.join', 'os.path.join', (['source_dir', 'target'], {}), '(source_dir, target)\n', (1384, 1404), False, 'import os\n'), ((1543, 1569), 'os.path.exists', 'os.path.exists', (['target_dir'], {}), '(target_dir)\n', (1557, 1569), False, 'import os\n'), ((1583, 1603), 'os.mkdir', 'os.mkdir', (['target_dir'], {}), '(target_dir)\n', (1591, 1603), False, 'import os\n'), ((1705, 1717), 'os.walk', 'os.walk', (['dir'], {}), '(dir)\n', (1712, 1717), False, 'import os\n'), ((1888, 1908), 'os.path.join', 'os.path.join', (['dir', 'f'], {}), '(dir, f)\n', (1900, 1908), False, 'import os\n'), ((1934, 1961), 'os.path.join', 'os.path.join', (['target_dir', 'f'], {}), '(target_dir, f)\n', (1946, 1961), False, 'import os\n'), ((1977, 2002), 'os.path.isdir', 'os.path.isdir', (['sourcefile'], {}), '(sourcefile)\n', (1990, 2002), False, 'import os\n'), ((3994, 4040), 'os.path.join', 'os.path.join', (['self.projectPath', 'self.file_name'], {}), '(self.projectPath, self.file_name)\n', (4006, 4040), False, 'import os\n'), ((9631, 9655), 'os.path.exists', 'os.path.exists', (['aar_path'], {}), '(aar_path)\n', (9645, 9655), False, 'import os\n'), ((9831, 9855), 'os.path.exists', 'os.path.exists', (['aar_file'], {}), '(aar_file)\n', (9845, 9855), False, 'import os\n'), ((10715, 10772), 'os.path.join', 'os.path.join', (['self.outputProjectPath', 'dir', '"""build.gradle"""'], {}), "(self.outputProjectPath, dir, 'build.gradle')\n", (10727, 10772), False, 'import os\n'), ((12200, 12216), 'os.mkdir', 'os.mkdir', (['zucker'], {}), '(zucker)\n', (12208, 12216), False, 'import os\n'), ((12324, 12337), 'os.mkdir', 'os.mkdir', (['src'], {}), '(src)\n', (12332, 12337), False, 'import os\n'), ((12449, 12463), 'os.mkdir', 'os.mkdir', (['main'], {}), '(main)\n', (12457, 12463), False, 'import os\n'), ((13366, 13410), 'os.path.join', 'os.path.join', (['PATH', 'targetFile', '"""zucker.txt"""'], {}), "(PATH, targetFile, 'zucker.txt')\n", (13378, 13410), False, 'import os\n'), ((13478, 13524), 'os.path.join', 'os.path.join', (['PATH', 'targetFile', '"""build.gradle"""'], {}), "(PATH, targetFile, 'build.gradle')\n", (13490, 13524), False, 'import os\n'), ((14971, 15017), 'os.path.join', 'os.path.join', (['PATH', 'targetFile', '"""build.gradle"""'], {}), "(PATH, targetFile, 'build.gradle')\n", (14983, 15017), False, 'import os\n'), ((15033, 15060), 'os.path.isfile', 'os.path.isfile', (['gradle_file'], {}), '(gradle_file)\n', (15047, 15060), False, 'import os\n'), ((15516, 15562), 'os.path.join', 'os.path.join', (['PATH', 'targetFile', '"""build.gradle"""'], {}), "(PATH, targetFile, 'build.gradle')\n", (15528, 15562), False, 'import os\n'), ((20434, 20482), 'os.path.dirname', 'os.path.dirname', (["(self.targetMainSrcPath + '/res')"], {}), "(self.targetMainSrcPath + '/res')\n", (20449, 20482), False, 'import os\n'), ((23068, 23108), 'os.path.dirname', 'os.path.dirname', (['self.originAarCachePath'], {}), '(self.originAarCachePath)\n', (23083, 23108), False, 'import os\n'), ((23364, 23412), 'os.path.dirname', 'os.path.dirname', (["(self.targetMainSrcPath + '/res')"], {}), "(self.targetMainSrcPath + '/res')\n", (23379, 23412), False, 'import os\n'), ((24057, 24077), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (24071, 24077), False, 'import os\n'), ((24436, 24455), 'os.path.exists', 'os.path.exists', (['dst'], {}), '(dst)\n', (24450, 24455), False, 'import os\n'), ((24469, 24485), 'os.makedirs', 'os.makedirs', (['dst'], {}), '(dst)\n', (24480, 24485), False, 'import os\n'), ((24618, 24641), 'os.path.join', 'os.path.join', (['src', 'name'], {}), '(src, name)\n', (24630, 24641), False, 'import os\n'), ((24664, 24687), 'os.path.join', 'os.path.join', (['dst', 'name'], {}), '(dst, name)\n', (24676, 24687), False, 'import os\n'), ((26343, 26368), 'shutil.copystat', 'shutil.copystat', (['src', 'dst'], {}), '(src, dst)\n', (26358, 26368), False, 'import shutil\n'), ((26604, 26624), 'shutil.Error', 'shutil.Error', (['errors'], {}), '(errors)\n', (26616, 26624), False, 'import shutil\n'), ((2105, 2131), 'os.path.isfile', 'os.path.isfile', (['sourcefile'], {}), '(sourcefile)\n', (2119, 2131), False, 'import os\n'), ((2184, 2216), 'shutil.copyfile', 'copyfile', (['sourcefile', 'targetfile'], {}), '(sourcefile, targetfile)\n', (2192, 2216), False, 'from shutil import copyfile\n'), ((9174, 9201), 'os.path.exists', 'os.path.exists', (['gradle_home'], {}), '(gradle_home)\n', (9188, 9201), False, 'import os\n'), ((14783, 14799), 'os.listdir', 'os.listdir', (['PATH'], {}), '(PATH)\n', (14793, 14799), False, 'import os\n'), ((20008, 20048), 'os.path.dirname', 'os.path.dirname', (['self.originAarCachePath'], {}), '(self.originAarCachePath)\n', (20023, 20048), False, 'import os\n'), ((23716, 23732), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (23726, 23732), False, 'import os\n'), ((24098, 24119), 'os.path.getsize', 'os.path.getsize', (['path'], {}), '(path)\n', (24113, 24119), False, 'import os\n'), ((24724, 24747), 'os.path.islink', 'os.path.islink', (['srcname'], {}), '(srcname)\n', (24738, 24747), False, 'import os\n'), ((26882, 26927), 'os.path.exists', 'os.path.exists', (["(target_main_src_path + '/res')"], {}), "(target_main_src_path + '/res')\n", (26896, 26927), False, 'import os\n'), ((26941, 26983), 'os.makedirs', 'os.makedirs', (["(target_main_src_path + '/res')"], {}), "(target_main_src_path + '/res')\n", (26952, 26983), False, 'import os\n'), ((2140, 2166), 'os.path.exists', 'os.path.exists', (['targetfile'], {}), '(targetfile)\n', (2154, 2166), False, 'import os\n'), ((10069, 10093), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (10081, 10093), False, 'import os\n'), ((12165, 12177), 'pathlib.Path', 'Path', (['zucker'], {}), '(zucker)\n', (12169, 12177), False, 'from pathlib import Path\n'), ((12292, 12301), 'pathlib.Path', 'Path', (['src'], {}), '(src)\n', (12296, 12301), False, 'from pathlib import Path\n'), ((12416, 12426), 'pathlib.Path', 'Path', (['main'], {}), '(main)\n', (12420, 12426), False, 'from pathlib import Path\n'), ((20071, 20112), 'os.path.basename', 'os.path.basename', (['self.originAarCachePath'], {}), '(self.originAarCachePath)\n', (20087, 20112), False, 'import os\n'), ((23184, 23225), 'os.path.basename', 'os.path.basename', (['self.originAarCachePath'], {}), '(self.originAarCachePath)\n', (23200, 23225), False, 'import os\n'), ((23799, 23823), 'os.path.join', 'os.path.join', (['path', 'name'], {}), '(path, name)\n', (23811, 23823), False, 'import os\n'), ((23847, 23872), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (23861, 23872), False, 'import os\n'), ((24778, 24798), 'os.readlink', 'os.readlink', (['srcname'], {}), '(srcname)\n', (24789, 24798), False, 'import os\n'), ((25764, 25786), 'os.path.isdir', 'os.path.isdir', (['srcname'], {}), '(srcname)\n', (25777, 25786), False, 'import os\n'), ((27412, 27444), 'os.path.join', 'os.path.join', (['dir_path', 'filename'], {}), '(dir_path, filename)\n', (27424, 27444), False, 'import os\n'), ((29322, 29346), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (29334, 29346), False, 'import os\n'), ((29528, 29547), 'copy.deepcopy', 'copy.deepcopy', (['name'], {}), '(name)\n', (29541, 29547), False, 'import copy\n'), ((29639, 29690), 'os.rename', 'os.rename', (["(root + '/' + name)", "(root + '/' + new_name)"], {}), "(root + '/' + name, root + '/' + new_name)\n", (29648, 29690), False, 'import os\n'), ((14837, 14858), 'os.path.join', 'os.path.join', (['PATH', 'x'], {}), '(PATH, x)\n', (14849, 14858), False, 'import os\n'), ((22983, 23007), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (22995, 23007), False, 'import os\n'), ((23257, 23281), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (23269, 23281), False, 'import os\n'), ((23911, 23937), 'os.path.getsize', 'os.path.getsize', (['file_path'], {}), '(file_path)\n', (23926, 23937), False, 'import os\n'), ((25070, 25097), 'os.symlink', 'os.symlink', (['linkto', 'dstname'], {}), '(linkto, dstname)\n', (25080, 25097), False, 'import os\n'), ((25122, 25185), 'shutil.copystat', 'shutil.copystat', (['srcname', 'dstname'], {'follow_symlinks': '(not symlinks)'}), '(srcname, dstname, follow_symlinks=not symlinks)\n', (25137, 25185), False, 'import shutil\n'), ((25492, 25514), 'os.path.isdir', 'os.path.isdir', (['srcname'], {}), '(srcname)\n', (25505, 25514), False, 'import os\n'), ((29241, 29265), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (29253, 29265), False, 'import os\n'), ((20803, 20827), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (20815, 20827), False, 'import os\n'), ((21122, 21146), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (21134, 21146), False, 'import os\n'), ((25311, 25333), 'os.path.exists', 'os.path.exists', (['linkto'], {}), '(linkto)\n', (25325, 25333), False, 'import os\n'), ((21624, 21648), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (21636, 21648), False, 'import os\n'), ((21932, 21956), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (21944, 21956), False, 'import os\n'), ((22243, 22267), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (22255, 22267), False, 'import os\n'), ((22509, 22533), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (22521, 22533), False, 'import os\n')] |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from unittest.mock import MagicMock
import pytest
from airflow.providers.amazon.aws.links.base_aws import BaseAwsLink
from tests.test_utils.mock_operators import MockOperator
XCOM_KEY = "test_xcom_key"
CUSTOM_KEYS = {
"foo": "bar",
"spam": "egg",
}
class SimpleBaseAwsLink(BaseAwsLink):
key = XCOM_KEY
class TestBaseAwsLink:
@pytest.mark.parametrize(
"region_name, aws_partition,keywords,expected_value",
[
("eu-central-1", "aws", {}, {"region_name": "eu-central-1", "aws_domain": "aws.amazon.com"}),
("cn-north-1", "aws-cn", {}, {"region_name": "cn-north-1", "aws_domain": "amazonaws.cn"}),
(
"us-gov-east-1",
"aws-us-gov",
{},
{"region_name": "us-gov-east-1", "aws_domain": "amazonaws-us-gov.com"},
),
(
"eu-west-1",
"aws",
CUSTOM_KEYS,
{"region_name": "eu-west-1", "aws_domain": "aws.amazon.com", **CUSTOM_KEYS},
),
],
)
def test_persist(self, region_name, aws_partition, keywords, expected_value):
mock_context = MagicMock()
SimpleBaseAwsLink.persist(
context=mock_context,
operator=MockOperator(task_id="test_task_id"),
region_name=region_name,
aws_partition=aws_partition,
**keywords,
)
ti = mock_context["ti"]
ti.xcom_push.assert_called_once_with(
execution_date=None,
key=XCOM_KEY,
value=expected_value,
)
def test_disable_xcom_push(self):
mock_context = MagicMock()
SimpleBaseAwsLink.persist(
context=mock_context,
operator=MockOperator(task_id="test_task_id", do_xcom_push=False),
region_name="eu-east-1",
aws_partition="aws",
)
ti = mock_context["ti"]
ti.xcom_push.assert_not_called()
| [
"pytest.mark.parametrize",
"unittest.mock.MagicMock",
"tests.test_utils.mock_operators.MockOperator"
] | [((1135, 1650), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""region_name, aws_partition,keywords,expected_value"""', "[('eu-central-1', 'aws', {}, {'region_name': 'eu-central-1', 'aws_domain':\n 'aws.amazon.com'}), ('cn-north-1', 'aws-cn', {}, {'region_name':\n 'cn-north-1', 'aws_domain': 'amazonaws.cn'}), ('us-gov-east-1',\n 'aws-us-gov', {}, {'region_name': 'us-gov-east-1', 'aws_domain':\n 'amazonaws-us-gov.com'}), ('eu-west-1', 'aws', CUSTOM_KEYS, {\n 'region_name': 'eu-west-1', 'aws_domain': 'aws.amazon.com', **CUSTOM_KEYS})\n ]"], {}), "('region_name, aws_partition,keywords,expected_value',\n [('eu-central-1', 'aws', {}, {'region_name': 'eu-central-1',\n 'aws_domain': 'aws.amazon.com'}), ('cn-north-1', 'aws-cn', {}, {\n 'region_name': 'cn-north-1', 'aws_domain': 'amazonaws.cn'}), (\n 'us-gov-east-1', 'aws-us-gov', {}, {'region_name': 'us-gov-east-1',\n 'aws_domain': 'amazonaws-us-gov.com'}), ('eu-west-1', 'aws',\n CUSTOM_KEYS, {'region_name': 'eu-west-1', 'aws_domain':\n 'aws.amazon.com', **CUSTOM_KEYS})])\n", (1158, 1650), False, 'import pytest\n'), ((1966, 1977), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1975, 1977), False, 'from unittest.mock import MagicMock\n'), ((2463, 2474), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2472, 2474), False, 'from unittest.mock import MagicMock\n'), ((2069, 2105), 'tests.test_utils.mock_operators.MockOperator', 'MockOperator', ([], {'task_id': '"""test_task_id"""'}), "(task_id='test_task_id')\n", (2081, 2105), False, 'from tests.test_utils.mock_operators import MockOperator\n'), ((2565, 2621), 'tests.test_utils.mock_operators.MockOperator', 'MockOperator', ([], {'task_id': '"""test_task_id"""', 'do_xcom_push': '(False)'}), "(task_id='test_task_id', do_xcom_push=False)\n", (2577, 2621), False, 'from tests.test_utils.mock_operators import MockOperator\n')] |
from django.db import models
from articles.models import Article
from users.models import User
class Comment(models.Model):
id = models.IntegerField(primary_key=True,
editable=False,
auto_created=True)
author = models.ForeignKey(User,
on_delete=models.CASCADE,
related_name='comments')
article = models.ForeignKey(Article,
on_delete=models.CASCADE,
related_name='comments')
depth = models.IntegerField(auto_created=True,
default=0,
editable=False)
parent_comment = models.ForeignKey('self',
on_delete=models.CASCADE,
null=True,
blank=True,
editable=False,
related_name='children')
pub_date = models.DateField(auto_now_add=True)
text = models.TextField()
class Meta:
ordering = ['depth']
def __str__(self):
return(self.text)
| [
"django.db.models.ForeignKey",
"django.db.models.DateField",
"django.db.models.TextField",
"django.db.models.IntegerField"
] | [((136, 208), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'primary_key': '(True)', 'editable': '(False)', 'auto_created': '(True)'}), '(primary_key=True, editable=False, auto_created=True)\n', (155, 208), False, 'from django.db import models\n'), ((280, 354), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""comments"""'}), "(User, on_delete=models.CASCADE, related_name='comments')\n", (297, 354), False, 'from django.db import models\n'), ((431, 508), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Article'], {'on_delete': 'models.CASCADE', 'related_name': '"""comments"""'}), "(Article, on_delete=models.CASCADE, related_name='comments')\n", (448, 508), False, 'from django.db import models\n'), ((585, 650), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'auto_created': '(True)', 'default': '(0)', 'editable': '(False)'}), '(auto_created=True, default=0, editable=False)\n', (604, 650), False, 'from django.db import models\n'), ((736, 855), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""self"""'], {'on_delete': 'models.CASCADE', 'null': '(True)', 'blank': '(True)', 'editable': '(False)', 'related_name': '"""children"""'}), "('self', on_delete=models.CASCADE, null=True, blank=True,\n editable=False, related_name='children')\n", (753, 855), False, 'from django.db import models\n'), ((1062, 1097), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1078, 1097), False, 'from django.db import models\n'), ((1109, 1127), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1125, 1127), False, 'from django.db import models\n')] |
# -*- coding: utf-8 -*-
import requests_mock
from chaoslib.run import EventHandlerRegistry
from logzero import logger
from chaosgrafana.controls.loki import configure_control
def test_sending_to_loki():
with requests_mock.Mocker() as m:
m.post("http://localhost.test:3100/loki/api/v1/push", status_code=204)
registry = EventHandlerRegistry()
configure_control(
experiment={"title": "hello", "tags": {"One": "1"}},
event_registry=registry,
secrets={"auth": ("admin", "<PASSWORD>")},
loki_endpoint="http://localhost.test:3100",
tags={"test": "yes"},
)
logger.error("hello")
assert m.called
payload = m.request_history[0].json()
stream = payload["streams"][0]["stream"]
assert stream["test"] == "yes"
assert "chaostoolkit_lib_version" in stream
assert "chaostoolkit_run_trace_id" in stream
assert "chaostoolkit_experiment_ref" in stream
assert "One" in stream
assert stream["source"] == "chaostoolkit"
| [
"chaoslib.run.EventHandlerRegistry",
"requests_mock.Mocker",
"logzero.logger.error",
"chaosgrafana.controls.loki.configure_control"
] | [((215, 237), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (235, 237), False, 'import requests_mock\n'), ((343, 365), 'chaoslib.run.EventHandlerRegistry', 'EventHandlerRegistry', ([], {}), '()\n', (363, 365), False, 'from chaoslib.run import EventHandlerRegistry\n'), ((374, 586), 'chaosgrafana.controls.loki.configure_control', 'configure_control', ([], {'experiment': "{'title': 'hello', 'tags': {'One': '1'}}", 'event_registry': 'registry', 'secrets': "{'auth': ('admin', '<PASSWORD>')}", 'loki_endpoint': '"""http://localhost.test:3100"""', 'tags': "{'test': 'yes'}"}), "(experiment={'title': 'hello', 'tags': {'One': '1'}},\n event_registry=registry, secrets={'auth': ('admin', '<PASSWORD>')},\n loki_endpoint='http://localhost.test:3100', tags={'test': 'yes'})\n", (391, 586), False, 'from chaosgrafana.controls.loki import configure_control\n'), ((659, 680), 'logzero.logger.error', 'logger.error', (['"""hello"""'], {}), "('hello')\n", (671, 680), False, 'from logzero import logger\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""数据模型"""
__author__ = 'stone'
import re
import LianJiaWangConfig
class LianJiaWangModel(object):
def __init__(self):
self.title = ''
self.des = ''
self.time = ''
"""命名 bottom 是为了跟 HTML 代码中的名字保持一致并方便批量赋值;但其实的意义是 features"""
self.bottom = ''
self.price = ''
self.__href = ''
def __str__(self):
return '%8s: %s\n' \
'%8s: %s\n' \
'%8s: %s\n' \
'%8s: %s\n' \
'%8s: %s\n' \
'%8s: %s\n' % \
('title', self.title,
'des', self.des,
'time', self.time,
'bottom', self.bottom,
'price', self.price,
'href', self.href())
__repr__ = __str__
def set_href_with_starttag(self, starttag):
striped = str(starttag).strip()
if len(striped) > 0:
pattern = re.compile('href="(.+?)"')
result = re.findall(pattern, striped)
# 匹配了多条结果肯定也是有问题的
if len(result) == 1:
self.__href = str(result[0])
self.__href = self.__href.replace('/', '', 1)
def href(self):
return LianJiaWangConfig.BASE_URL + self.__href
| [
"re.findall",
"re.compile"
] | [((968, 994), 're.compile', 're.compile', (['"""href="(.+?)\\""""'], {}), '(\'href="(.+?)"\')\n', (978, 994), False, 'import re\n'), ((1016, 1044), 're.findall', 're.findall', (['pattern', 'striped'], {}), '(pattern, striped)\n', (1026, 1044), False, 'import re\n')] |
#!/usr/bin/env python3
from xmlrpc.client import Boolean
from sqlalchemy import false, true
import yaml
import os
import rospy
from ros_pepper_pkg.srv import *
from std_msgs.msg import String
from std_msgs.msg import Bool
def callback(message, tts_service, publisher):
"""
Topic callback.
Parameters
----------
message
Message for the service
tts_service
Service to call
"""
#disattivare microfono
publisher.publish(False)
try:
bot_answer = tts_service(message.data)
#attivare microfono
except rospy.ServiceException as e:
print("Service call failed: %s"%e)
publisher.publish(True)
def main():
"""
Main function of the interface.
"""
rospy.logdebug('Tts interface READY.')
rospy.spin()
def init_node(node_name, service_name, topic,mic_status_topic):
"""
Init the node.
Parameters
----------
node_name
Name assigned to the node
service_name
Name of the service to call
topic
topic on which the messages for the tts are published
"""
rospy.init_node(node_name)
rospy.wait_for_service(service_name)
tts_service = rospy.ServiceProxy(service_name, Tts)
mic_status_publisher = rospy.Publisher(mic_status_topic, Bool, queue_size=1)
rospy.Subscriber(topic, String, lambda message : callback(message, tts_service,mic_status_publisher))
if __name__ == '__main__':
REF_PATH = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(REF_PATH,'config.yml')) as file:
config = yaml.full_load(file)
service_tag = rospy.get_param('service_tag')
node_name = config['nodes']['ttsInterface']
service_name = config['nodes'][service_tag]
input_topic = config['topics']['outputText']
mic_status_topic = config['topics']['micStatus']
init_node(node_name, service_name, input_topic,mic_status_topic)
main() | [
"yaml.full_load",
"rospy.Publisher",
"rospy.init_node",
"rospy.get_param",
"rospy.ServiceProxy",
"os.path.join",
"rospy.spin",
"os.path.abspath",
"rospy.logdebug",
"rospy.wait_for_service"
] | [((749, 787), 'rospy.logdebug', 'rospy.logdebug', (['"""Tts interface READY."""'], {}), "('Tts interface READY.')\n", (763, 787), False, 'import rospy\n'), ((792, 804), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (802, 804), False, 'import rospy\n'), ((1114, 1140), 'rospy.init_node', 'rospy.init_node', (['node_name'], {}), '(node_name)\n', (1129, 1140), False, 'import rospy\n'), ((1145, 1181), 'rospy.wait_for_service', 'rospy.wait_for_service', (['service_name'], {}), '(service_name)\n', (1167, 1181), False, 'import rospy\n'), ((1200, 1237), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['service_name', 'Tts'], {}), '(service_name, Tts)\n', (1218, 1237), False, 'import rospy\n'), ((1265, 1318), 'rospy.Publisher', 'rospy.Publisher', (['mic_status_topic', 'Bool'], {'queue_size': '(1)'}), '(mic_status_topic, Bool, queue_size=1)\n', (1280, 1318), False, 'import rospy\n'), ((1634, 1664), 'rospy.get_param', 'rospy.get_param', (['"""service_tag"""'], {}), "('service_tag')\n", (1649, 1664), False, 'import rospy\n'), ((1490, 1515), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1505, 1515), False, 'import os\n'), ((1594, 1614), 'yaml.full_load', 'yaml.full_load', (['file'], {}), '(file)\n', (1608, 1614), False, 'import yaml\n'), ((1531, 1567), 'os.path.join', 'os.path.join', (['REF_PATH', '"""config.yml"""'], {}), "(REF_PATH, 'config.yml')\n", (1543, 1567), False, 'import os\n')] |
import sys
import random
from test_base import *
class TestBlockLD(TestBase):
def generate(self):
self.clear_tag()
for n in range(50000):
store_not_load = random.randint(0,1)
tag = random.randint(0, 15)
index = random.randint(0,self.sets_p-1)
taddr = self.get_addr(tag,index)
if store_not_load:
self.send_block_st(taddr)
else:
self.send_block_ld(taddr)
self.tg.done()
def send_block_st(self, addr):
base_addr = addr - (addr % (self.block_size_in_words_p*4))
for i in range(self.block_size_in_words_p):
self.send_sw(base_addr+(i*4))
# main()
if __name__ == "__main__":
t = TestBlockLD()
t.generate()
| [
"random.randint"
] | [((176, 196), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (190, 196), False, 'import random\n'), ((208, 229), 'random.randint', 'random.randint', (['(0)', '(15)'], {}), '(0, 15)\n', (222, 229), False, 'import random\n'), ((244, 278), 'random.randint', 'random.randint', (['(0)', '(self.sets_p - 1)'], {}), '(0, self.sets_p - 1)\n', (258, 278), False, 'import random\n')] |
import sys
import json
import nltk
import nltk
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize, sent_tokenize
class Partsofspeech():
def pos(txt):
tokens = nltk.word_tokenize(txt)
return (nltk.pos_tag(tokens))
# stop_words = set(stopwords.words('english')) | [
"nltk.pos_tag",
"nltk.word_tokenize"
] | [((202, 225), 'nltk.word_tokenize', 'nltk.word_tokenize', (['txt'], {}), '(txt)\n', (220, 225), False, 'import nltk\n'), ((242, 262), 'nltk.pos_tag', 'nltk.pos_tag', (['tokens'], {}), '(tokens)\n', (254, 262), False, 'import nltk\n')] |
#!/bin/python3
# author: <NAME>
import sys
import time
basic_format = '{desc} {n:02d}/{total:02d} [{elapsed}/{remaining}] [{item_duration}/item]'
reverse_format = '{n:02d}/{total:02d} {desc} [{elapsed}/{remaining}] [{item_duration}/item]'
class Line(object):
def __init__(self, iterable=None, total=None, format=reverse_format, file=sys.stdout, desc='', ncols=None, tty=None):
self.iterable = iterable
if total is None and iterable is not None:
try:
total = len(iterable)
except (TypeError, AttributeError):
total = None
self.total = total
self.format = format
if tty is None:
try:
tty = file.isatty()
except:
tty = False
self.tty = tty
self.file = file
self.n = 0
self.desc = desc
self.start_time = time.time()
self.ncols = ncols or 80
def start(self):
self.start_time = time.time()
self.update(0)
def stop(self):
self.update(0)
if self.tty:
self.file.write('\n')
self.file.flush()
close = stop
@property
def duration(self):
if self.start_time:
return time.time() - self.start_time
return 0
def update(self, n=1):
self.n += n
elapsed = '??:??'
remaining = '??:??'
item_duration = '??:??'
if self.start_time:
elapsed = self.format_interval(int(self.duration))
if self.total and self.n:
item_duration = self.format_interval(int(
self.duration / self.n
))
if self.total > self.n:
remaining = self.format_interval(int(
(self.duration / self.n) * (self.total - self.n)
))
else:
remaining = '00:00'
kwargs = dict(
desc=self.desc,
total=self.total,
n=self.n,
start_time=self.start_time,
item_duration=item_duration,
duration=self.duration,
elapsed=elapsed,
remaining=remaining,
)
msg = self.format.format(**kwargs)
fmt = '{msg:<%ds}' % self.ncols
if self.tty:
self.file.write('\r' + fmt.format(msg=msg))
self.file.flush()
else:
self.file.write(fmt.format(msg=msg) + '\n')
@staticmethod
def format_interval(t):
"""
Formats a number of seconds as a clock time, [H:]MM:SS
Parameters
----------
t : int
Number of seconds.
Returns
-------
out : str
[H:]MM:SS
"""
mins, s = divmod(int(t), 60)
h, m = divmod(mins, 60)
if h:
return '{0:d}:{1:02d}:{2:02d}'.format(h, m, s)
else:
return '{0:02d}:{1:02d}'.format(m, s)
| [
"time.time"
] | [((902, 913), 'time.time', 'time.time', ([], {}), '()\n', (911, 913), False, 'import time\n'), ((995, 1006), 'time.time', 'time.time', ([], {}), '()\n', (1004, 1006), False, 'import time\n'), ((1263, 1274), 'time.time', 'time.time', ([], {}), '()\n', (1272, 1274), False, 'import time\n')] |
import game_framework
import pico2d
import start_state
pico2d.open_canvas()
game_framework.run(start_state)
pico2d.close_canvas()
| [
"game_framework.run",
"pico2d.close_canvas",
"pico2d.open_canvas"
] | [((57, 77), 'pico2d.open_canvas', 'pico2d.open_canvas', ([], {}), '()\n', (75, 77), False, 'import pico2d\n'), ((78, 109), 'game_framework.run', 'game_framework.run', (['start_state'], {}), '(start_state)\n', (96, 109), False, 'import game_framework\n'), ((110, 131), 'pico2d.close_canvas', 'pico2d.close_canvas', ([], {}), '()\n', (129, 131), False, 'import pico2d\n')] |
import os
import subprocess
from pathlib import Path
from ..views.viewhelper import delay_refresh_detail
from ..helper.config import config
def edit(filepath: Path, loop):
if isinstance(filepath, str):
filepath = Path(filepath)
editor = os.environ.get('EDITOR', 'vi').lower()
# vim
if editor == 'vi' or editor == 'vim':
cmd = editor + ' ' + str(filepath)
current_directory = Path.cwd()
os.chdir(filepath.parent)
if config.tmux_support and is_inside_tmux():
open_in_new_tmux_window(cmd)
else:
subprocess.call(cmd, shell=True)
delay_refresh_detail(loop)
os.chdir(current_directory)
# sublime text
elif editor == 'sublime':
cmd = 'subl ' + str(filepath)
subprocess.call(cmd, shell=True)
def is_inside_tmux():
return 'TMUX' in os.environ
def open_in_new_tmux_window(edit_cmd):
# close other panes if exist, so that the detail pane is the only pane
try:
output = subprocess.check_output("tmux list-panes | wc -l", shell=True)
num_pane = int(output)
if num_pane > 1:
subprocess.check_call("tmux kill-pane -a", shell=True)
except Exception:
pass
cmd = "tmux split-window -h"
os.system(cmd)
cmd = "tmux send-keys -t right '%s' C-m" % edit_cmd
os.system(cmd)
| [
"subprocess.check_output",
"pathlib.Path",
"pathlib.Path.cwd",
"subprocess.check_call",
"os.environ.get",
"os.chdir",
"subprocess.call",
"os.system"
] | [((1274, 1288), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (1283, 1288), False, 'import os\n'), ((1349, 1363), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (1358, 1363), False, 'import os\n'), ((227, 241), 'pathlib.Path', 'Path', (['filepath'], {}), '(filepath)\n', (231, 241), False, 'from pathlib import Path\n'), ((417, 427), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (425, 427), False, 'from pathlib import Path\n'), ((436, 461), 'os.chdir', 'os.chdir', (['filepath.parent'], {}), '(filepath.parent)\n', (444, 461), False, 'import os\n'), ((662, 689), 'os.chdir', 'os.chdir', (['current_directory'], {}), '(current_directory)\n', (670, 689), False, 'import os\n'), ((1016, 1078), 'subprocess.check_output', 'subprocess.check_output', (['"""tmux list-panes | wc -l"""'], {'shell': '(True)'}), "('tmux list-panes | wc -l', shell=True)\n", (1039, 1078), False, 'import subprocess\n'), ((255, 285), 'os.environ.get', 'os.environ.get', (['"""EDITOR"""', '"""vi"""'], {}), "('EDITOR', 'vi')\n", (269, 285), False, 'import os\n'), ((582, 614), 'subprocess.call', 'subprocess.call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (597, 614), False, 'import subprocess\n'), ((785, 817), 'subprocess.call', 'subprocess.call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (800, 817), False, 'import subprocess\n'), ((1147, 1201), 'subprocess.check_call', 'subprocess.check_call', (['"""tmux kill-pane -a"""'], {'shell': '(True)'}), "('tmux kill-pane -a', shell=True)\n", (1168, 1201), False, 'import subprocess\n')] |
import json
import sys
import os
from algoliasearch.search_client import SearchClient
# This script updates the product list in products/data/product_list.json
# This is done by downloading the data from algolia index and replacing the product_list.json with the new content.
def main():
product_list_filepath = os.getcwd() + "/products/data/product_list.json"
print("Updating the product list: " + product_list_filepath)
client = SearchClient.create('9SXIDIVU1E', os.environ['ALGOLIA_ADMIN_API_KEY'])
index = client.init_index('products_index')
hits = []
for hit in index.browse_objects({'query': ''}):
hits.append(hit)
with open(product_list_filepath, 'w') as f:
json.dump(hits, f, indent=4)
if f.closed:
print("Finished updating " + product_list_filepath)
else:
print("file not closed")
if __name__ == '__main__':
main()
| [
"algoliasearch.search_client.SearchClient.create",
"json.dump",
"os.getcwd"
] | [((458, 528), 'algoliasearch.search_client.SearchClient.create', 'SearchClient.create', (['"""9SXIDIVU1E"""', "os.environ['ALGOLIA_ADMIN_API_KEY']"], {}), "('9SXIDIVU1E', os.environ['ALGOLIA_ADMIN_API_KEY'])\n", (477, 528), False, 'from algoliasearch.search_client import SearchClient\n'), ((325, 336), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (334, 336), False, 'import os\n'), ((736, 764), 'json.dump', 'json.dump', (['hits', 'f'], {'indent': '(4)'}), '(hits, f, indent=4)\n', (745, 764), False, 'import json\n')] |
#!/usr/bin/env python
import asyncio
import io
import http.server
import json
import numpy
import os
import queue
import re
import sys
import threading
import websockets
from obspy.io.quakeml.core import Pickler
from obspy.core.stream import Stream
from obspy.core.utcdatetime import UTCDateTime
from obspy.core.event.catalog import Catalog
from obspy.core.event.base import ResourceIdentifier
from obspy.core.util.attribdict import AttribDict
import logging
logger = logging.getLogger('viewobspy')
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
class ServeObsPy():
"""
Serves Stream, Event and Inventory over http, allowing a web browser to be
the display for plotting. JSONApi is used for events and inventory and miniseed
is used for waveforms.
.. rubric:: Basic Usage
>>> import serveobspy
>>> from obspy.clients.fdsn import Client
>>> import obspy
>>> serveSeis = serveobspy.ServeObsPy('www')
>>> serveSeis.serveData()
>>> start = obspy.UTCDateTime('2019-10-31T01:11:19')
>>> end = start + 20*60
>>> st = client.get_waveforms("IU", "SNZO", "00", "BH?", start, start + 20 * 60)
>>> serveSeis.stream=st
>>> quakes = client.get_events(starttime=start - 1*60, endtime=start + 20*60, minmagnitude=5)
>>> serveSeis.quake=quakes[0]
>>> serveSeis.inventory = client.get_stations(network="IU", station="SNZO",
location="00", channel="BH?",
level="response",
starttime=start,
endtime=end)
and then open a web browser to http://localhost:8000
"""
FAKE_EMPTY_STATIONXML = '<?xml version="1.0" encoding="ISO-8859-1"?> <FDSNStationXML xmlns="http://www.fdsn.org/xml/station/1" schemaVersion="1.0" xsi:schemaLocation="http://www.fdsn.org/xml/station/1 http://www.fdsn.org/xml/station/fdsn-station-1.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:iris="http://www.fdsn.org/xml/station/1/iris"> </FDSNStationXML>'
LOCALHOST = '127.0.0.1'
DEFAULT_HTTP = 8000
DEFAULT_WS = 8001
def __init__(self, webdir, host=None, port=None, wsport=None):
self.webdir = webdir
self.__host=host
self.__port=port
self.__wsport=wsport
self.dataset = self.initEmptyDataset()
self.httpServer = None
self.wsServer = None
if webdir is not None and not os.path.isdir(webdir):
raise Exception('webdir must be an existing directory: {}'.format(webdir))
def initEmptyDataset(self):
"""
Creates a empty dataset.
"""
return {
"stream": None,
"bychan": {},
"title": "tytle",
"quake": None,
"inventory": None
}
@property
def host(self):
if self.__host is None:
return ServeObsPy.LOCALHOST
else:
return self.__host
@property
def port(self):
if self.__port is None:
return ServeObsPy.DEFAULT_HTTP
else:
return self.__port
@property
def wsport(self):
if self.__wsport is None:
return ServeObsPy.DEFAULT_WS
else:
return self.__wsport
def datasetAsJsonApi(self):
jsonapi = {
'data': {
"type": "seisdata",
"id": "1",
"attributes": {
"title": "JSON:API paints my bikeshed!"
},
"relationships": {
"seismograms": {
"data": [
]
},
"quake": {
"data": {}
},
"inventory": {
"data": {}
}
}
}
}
jsonapi['data']['attributes']['title'] = self.dataset['title']
if 'quake' in self.dataset and self.dataset['quake'] is not None:
quakeId = extractEventId(self.dataset['quake'])
jsonapi['data']['relationships']['quake']['data'] = {
'type': 'quake',
'id': quakeId
}
if 'inventory' in self.dataset and self.dataset['inventory'] is not None:
id_num = 1; #should do something more, maybe break out by channel?
jsonapi['data']['relationships']['inventory']['data'] = {
'type': 'inventory',
'id': id_num
}
if 'bychan' in self.dataset and self.dataset['bychan'] is not None:
# stream split by channel to group segments into a single js seismogram
seisjson = jsonapi['data']['relationships']['seismograms']['data']
for st in self.dataset['bychan']:
seisjson.append({
'type': 'seismogram',
'id': id(st)
})
return jsonapi
def __streamToSeismogramMap(self, stream):
byChan = {}
for tr in stream:
if not tr.id in byChan:
byChan[tr.id] = []
byChan[tr.id].append(tr)
out = []
for id, trlist in byChan.items():
out.append(Stream(traces=trlist))
return out
def serveData(self):
logger.debug("before http server")
if self.httpServer:
logger.warn("already serving...")
return
self.httpServer = ObsPyServer(self.__createRequestHandlerClass(), host=self.host, port=self.port)
self.httpServer.start()
logger.info("http server started at http://{}:{:d}".format(self.host, self.port))
self.wsServer = ObsPyWebSocket(host=self.host, port=self.wsport)
self.wsServer.start()
logger.info("websocket server started ws://{}:{:d}".format(self.host, self.wsport))
@property
def stream(self):
return self.dataset["stream"]
@stream.setter
def stream(self, stream):
self.dataset["stream"] = stream
self.dataset["bychan"] = self.__streamToSeismogramMap(stream)
self.wsServer.notifyUpdate('stream');
@stream.deleter
def stream(self):
self.dataset["stream"] = None
self.dataset["bychan"] = []
self.wsServer.notifyUpdate('dataset');
@property
def title(self):
return self.dataset["title"];
@title.setter
def title(self, title):
self.dataset["title"] = title;
self.wsServer.notifyUpdate('title');
@title.deleter
def title(self):
self.title = "";
@property
def quake(self):
return self.dataset["quake"];
@quake.setter
def quake(self, quake):
self.dataset["quake"] = quake;
self.wsServer.notifyUpdate('quake');
@quake.deleter
def quake(self):
self.quake = None
@property
def inventory(self):
return self.dataset["inventory"];
@inventory.setter
def inventory(self, inventory):
self.dataset["inventory"] = inventory;
self.wsServer.notifyUpdate('inventory');
@inventory.deleter
def inventory(self):
self.inventory = None
def refreshAll(self):
self.dataset["bychan"] = self.__streamToSeismogramMap(self.stream)
self.wsServer.notifyUpdate('refreshAll');
def __createRequestHandlerClass(self):
class ObsPyRequestHandler(http.server.SimpleHTTPRequestHandler):
serveSeis = self # class variable to access
def __init__(self,request, client_address, server):
super().__init__(request, client_address, server, directory=ObsPyRequestHandler.serveSeis.webdir)
def end_headers (self):
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Access-Control-Allow-Headers', "X-Requested-With, Content-Type, Origin, Authorization, Accept, Client-Security-Token, Accept-Encoding")
self.send_header('Access-Control-Allow-Methods', "POST, GET, OPTIONS, DELETE, PUT")
http.server.SimpleHTTPRequestHandler.end_headers(self)
def do_GET(self):
try:
logger.debug("do_GET {}".format(self.path))
if self.path == '/dataset':
self.sendDataset()
elif self.path.startswith('/seismograms/'):
self.sendSeismogram()
elif self.path.startswith('/quake/'):
self.sendQuake()
elif self.path.startswith('/inventory'):
self.sendInventory()
elif self.path == '/favicon.ico':
super().do_GET()
else:
super().do_GET()
except Exception as e:
self.send_error(404, "seis item not found")
logger.error(e)
def sendDataset(self):
"""
Serve dataset as JsonAPI
"""
content = json.dumps(ObsPyRequestHandler.serveSeis.datasetAsJsonApi())
self.send_response(200)
self.send_header("Content-Length", len(content))
self.send_header("Content-Type", "application/vnd.api+json")
self.end_headers()
self.wfile.write(content.encode())
def sendMseed(self, seisid):
"""
Serve seismogram as miniseed
"""
bychan = ObsPyRequestHandler.serveSeis.dataset['bychan']
try:
seis = next(s for s in bychan if id(s) == seisid)
buf = io.BytesIO()
seis.write(buf, format='MSEED')
self.send_response(200)
self.send_header("Content-Length", buf.getbuffer().nbytes)
self.send_header("Content-Type", "application/vnd.fdsn.mseed")
self.end_headers()
self.wfile.write(buf.getbuffer())
except StopIteration:
self.send_error(404, "mseed seismogram not found, {} ".format(seisid))
def sendStats(self, seisid):
"""
Serve stats for seismogram as simple JSON
"""
bychan = ObsPyRequestHandler.serveSeis.dataset['bychan']
try:
seis = next(s for s in bychan if id(s) == seisid)
content = json.dumps(seis[0].stats, cls=StatsEncoder)
self.send_response(200)
self.send_header("Content-Length", len(content))
self.send_header("Content-Type", "application/json")
self.end_headers()
self.wfile.write(content.encode())
except StopIteration as e:
logger.warn(e)
self.send_error(404, "stats seismogram not found {} {}".format(seisid, e))
except Exception as e:
logger.warn(e)
self.send_error(404, "stats seismogram something bad happened {} {}".format(seisid, e))
def sendSeismogram(self):
m = re.match(r'/seismograms/(\d+)/(\w+)', self.path)
if (m.group(2) == 'mseed'):
self.sendMseed(int(m.group(1)))
elif m.group(2) == 'stats':
self.sendStats(int(m.group(1)))
else:
raise Exception("unknown seismogram url {}".format(self.path))
def sendQuake(self):
"""
Serve quake as QuakeML
"""
splitPath = self.path.split('/')
id = splitPath[2]
resource_id = ResourceIdentifier(id)
catalog = Catalog([ObsPyRequestHandler.serveSeis.dataset['quake']],resource_id=resource_id)
buf = io.BytesIO()
catalog.write(buf, format="QUAKEML")
self.send_response(200)
self.send_header("Content-Length", buf.getbuffer().nbytes)
self.send_header("Content-Type", "application/xml")
self.end_headers()
self.wfile.write(buf.getbuffer())
def sendInventory(self):
"""
Serve inventory as StationXML
"""
buf = io.BytesIO()
inventory = ObsPyRequestHandler.serveSeis.dataset['inventory']
if inventory is not None:
inventory.write(buf,format="STATIONXML")
else:
buf.write(FAKE_EMPTY_STATIONXML)
self.send_response(200)
self.send_header("Content-Length", buf.getbuffer().nbytes)
self.send_header("Content-Type", "application/xml")
self.end_headers()
self.wfile.write(buf.getbuffer())
http.server.SimpleHTTPRequestHandler.extensions_map['.js'] = 'text/javascript'
return ObsPyRequestHandler
ANSS_CATALOG_NS = "http://anss.org/xmlns/catalog/0.1"
def extractEventId(quakeml):
usgsExtras = {k: v for k, v in quakeml.extra.items() if v.namespace == ANSS_CATALOG_NS}
if 'eventid' in usgsExtras and 'eventsource' in usgsExtras:
# assume USGS style event ids,
# USGS, NCEDC and SCEDC use concat of eventsource and eventId as eventit, sigh...
return "{}{}".format(usgsExtras['eventsource'].value, usgsExtras['eventid'].value)
if quakeml.resource_id:
eventIdPat = re.compile('eventid=([\w\d]+)')
m = eventIdPat.search(quakeml.resource_id.id)
if m:
return m.group(1)
evidPat = re.compile('evid=([\w\d]+)')
m = evidPat.search(quakeml.resource_id.id)
if m:
return m.group(1)
return quakeml.resource_id.id
return "unknownEventId"+random.randrange(100000)
class StatsEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, numpy.integer):
return int(obj)
elif isinstance(obj, numpy.floating):
return float(obj)
elif isinstance(obj, numpy.ndarray):
return obj.tolist()
if isinstance(obj, UTCDateTime):
return obj.isoformat()
if isinstance(obj, AttribDict):
jsonDict = {}
for k,v in obj.items():
jsonDict[k] = v;
return jsonDict
if isinstance(obj, complex):
return [obj.real, obj.imag]
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
class ObsPyServer(threading.Thread):
def __init__(self, handler_class, host=None, port=None):
threading.Thread.__init__(self)
self.daemon=True
self.handler_class = handler_class
self.__host = host
self.__port = port
@property
def host(self):
if self.__host is None:
return ServeObsPy.LOCALHOST
else:
return self.__host
@property
def port(self):
if self.__port is None:
return ServeObsPy.DEFAULT_HTTP
else:
return self.__port
def run(self, server_class=http.server.ThreadingHTTPServer):
server_address = (self.host, self.port)
httpd = server_class(server_address, self.handler_class)
httpd.serve_forever()
class ObsPyWebSocket(threading.Thread):
def __init__(self, host=None, port=None):
threading.Thread.__init__(self)
self.daemon=True
self.__host = host
self.__port = port
self.users = set()
@property
def host(self):
if self.__host is None:
return ServeObsPy.LOCALHOST
else:
return self.__host
@property
def port(self):
if self.__port is None:
return ServeObsPy.DEFAULT_WS
else:
return self.__port
def hello(self):
return json.dumps({'msg': "hi"})
def notifyUpdate(self, type):
self.send_json_message({'update': type})
def send_json_message(self, jsonMessage):
if isinstance(jsonMessage, str):
jsonMessage = {'msg': jsonMessage};
if not isinstance(jsonMessage, dict):
raise ValueError("jsonMessage must be string or dict")
jsonAsStr = json.dumps(jsonMessage)
logger.debug("sending '{}'".format(jsonAsStr))
future = asyncio.run_coroutine_threadsafe(
self.dataQueue.put(jsonAsStr),
self.loop
)
result = future.result()
logger.debug('result of send msg {}'.format(result))
async def consumer_handler(self, websocket, path):
try:
while True:
message = await websocket.recv()
logger.debug("got message from ws "+message)
except websockets.exceptions.ConnectionClosedOK:
pass
except Exception as e:
logger.error("consumer_handler exception ", exc_info=e)
except:
e = sys.exc_info()[0]
logger.error('consumer_handler something bad happened ', exc_info=e)
async def producer_handler(self, websocket, path):
try:
while True:
message = await self.dataQueue.get()
logger.debug("dataqueue had message {}".format(message))
if message is None:
continue
if self.users: # asyncio.wait doesn't accept an empty list
await asyncio.wait([self.sendOneUser(user, message) for user in self.users])
logger.debug("done sending")
else:
logger.debug("no users to send to...")
except:
e = sys.exc_info()[0]
logger.error('producer_handler something bad happened ', exc_info=e)
async def sendOneUser(self, user, message):
try:
await user.send(message)
except websockets.exceptions.ConnectionClosedError as ee:
logger.debug("ws conn was closed, removing user ")
self.users.remove(user)
async def initWS(self):
self.dataQueue = asyncio.Queue()
async def handler(websocket, path):
self.users.add(websocket)
try:
await websocket.send(self.hello())
done, pending = await asyncio.wait([
self.consumer_handler(websocket, path),
self.producer_handler(websocket, path)
],
return_when=asyncio.FIRST_COMPLETED,
)
except:
e = sys.exc_info()[0]
logger.error('handler something bad happened ', exc_info=e)
finally:
self.users.remove(websocket)
logger.debug('exit handler, remove websocket user')
self.server = await websockets.serve(handler, self.host, self.port)
def run(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
asyncio.get_event_loop().run_until_complete(asyncio.ensure_future(self.initWS()))
asyncio.get_event_loop().run_forever()
logger.debug("ws end run")
| [
"logging.getLogger",
"threading.Thread.__init__",
"obspy.core.stream.Stream",
"logging.StreamHandler",
"obspy.core.event.catalog.Catalog",
"json.JSONEncoder.default",
"re.compile",
"asyncio.get_event_loop",
"asyncio.new_event_loop",
"json.dumps",
"asyncio.Queue",
"re.match",
"io.BytesIO",
... | [((471, 501), 'logging.getLogger', 'logging.getLogger', (['"""viewobspy"""'], {}), "('viewobspy')\n", (488, 501), False, 'import logging\n'), ((550, 573), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (571, 573), False, 'import logging\n'), ((13750, 13783), 're.compile', 're.compile', (['"""eventid=([\\\\w\\\\d]+)"""'], {}), "('eventid=([\\\\w\\\\d]+)')\n", (13760, 13783), False, 'import re\n'), ((13899, 13929), 're.compile', 're.compile', (['"""evid=([\\\\w\\\\d]+)"""'], {}), "('evid=([\\\\w\\\\d]+)')\n", (13909, 13929), False, 'import re\n'), ((14801, 14836), 'json.JSONEncoder.default', 'json.JSONEncoder.default', (['self', 'obj'], {}), '(self, obj)\n', (14825, 14836), False, 'import json\n'), ((14944, 14975), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (14969, 14975), False, 'import threading\n'), ((15706, 15737), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (15731, 15737), False, 'import threading\n'), ((16183, 16208), 'json.dumps', 'json.dumps', (["{'msg': 'hi'}"], {}), "({'msg': 'hi'})\n", (16193, 16208), False, 'import json\n'), ((16560, 16583), 'json.dumps', 'json.dumps', (['jsonMessage'], {}), '(jsonMessage)\n', (16570, 16583), False, 'import json\n'), ((18404, 18419), 'asyncio.Queue', 'asyncio.Queue', ([], {}), '()\n', (18417, 18419), False, 'import asyncio\n'), ((19231, 19255), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (19253, 19255), False, 'import asyncio\n'), ((19264, 19297), 'asyncio.set_event_loop', 'asyncio.set_event_loop', (['self.loop'], {}), '(self.loop)\n', (19286, 19297), False, 'import asyncio\n'), ((19143, 19190), 'websockets.serve', 'websockets.serve', (['handler', 'self.host', 'self.port'], {}), '(handler, self.host, self.port)\n', (19159, 19190), False, 'import websockets\n'), ((2481, 2502), 'os.path.isdir', 'os.path.isdir', (['webdir'], {}), '(webdir)\n', (2494, 2502), False, 'import os\n'), ((5326, 5347), 'obspy.core.stream.Stream', 'Stream', ([], {'traces': 'trlist'}), '(traces=trlist)\n', (5332, 5347), False, 'from obspy.core.stream import Stream\n'), ((11374, 11423), 're.match', 're.match', (['"""/seismograms/(\\\\d+)/(\\\\w+)"""', 'self.path'], {}), "('/seismograms/(\\\\d+)/(\\\\w+)', self.path)\n", (11382, 11423), False, 'import re\n'), ((11947, 11969), 'obspy.core.event.base.ResourceIdentifier', 'ResourceIdentifier', (['id'], {}), '(id)\n', (11965, 11969), False, 'from obspy.core.event.base import ResourceIdentifier\n'), ((11996, 12083), 'obspy.core.event.catalog.Catalog', 'Catalog', (["[ObsPyRequestHandler.serveSeis.dataset['quake']]"], {'resource_id': 'resource_id'}), "([ObsPyRequestHandler.serveSeis.dataset['quake']], resource_id=\n resource_id)\n", (12003, 12083), False, 'from obspy.core.event.catalog import Catalog\n'), ((12100, 12112), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (12110, 12112), False, 'import io\n'), ((12580, 12592), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (12590, 12592), False, 'import io\n'), ((19306, 19330), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (19328, 19330), False, 'import asyncio\n'), ((19396, 19420), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (19418, 19420), False, 'import asyncio\n'), ((9808, 9820), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (9818, 9820), False, 'import io\n'), ((10634, 10677), 'json.dumps', 'json.dumps', (['seis[0].stats'], {'cls': 'StatsEncoder'}), '(seis[0].stats, cls=StatsEncoder)\n', (10644, 10677), False, 'import json\n'), ((17270, 17284), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (17282, 17284), False, 'import sys\n'), ((17988, 18002), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (18000, 18002), False, 'import sys\n'), ((18885, 18899), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (18897, 18899), False, 'import sys\n')] |
#!/usr/bin/env python
# Created by "Thieu" at 00:16, 15/03/2022 ----------%
# Email: <EMAIL> %
# Github: https://github.com/thieu1995 %
# --------------------------------------------------%
from mealpy.bio_based import BBO
from mealpy.optimizer import Optimizer
import numpy as np
import pytest
@pytest.fixture(scope="module") # scope: Call only 1 time at the beginning
def problem():
def fitness_function(solution):
return np.sum(solution ** 2)
problem = {
"fit_func": fitness_function,
"lb": [-10, -15, -4, -2, -8],
"ub": [10, 15, 12, 8, 20],
"minmax": "min",
"log_to": None,
}
return problem
def test_OriginalBBO_results(problem):
epoch = 10
pop_size = 50
p_m = 0.01
elites = 2
model = BBO.OriginalBBO(problem, epoch, pop_size, p_m, elites)
best_position, best_fitness = model.solve()
assert isinstance(model, Optimizer)
assert isinstance(best_position, np.ndarray)
assert len(best_position) == len(problem["lb"])
def test_BaseBBO_results(problem):
epoch = 10
pop_size = 50
p_m = 0.01
elites = 2
model = BBO.BaseBBO(problem, epoch, pop_size, p_m, elites)
best_position, best_fitness = model.solve()
assert isinstance(model, Optimizer)
assert isinstance(best_position, np.ndarray)
assert len(best_position) == len(problem["lb"])
@pytest.mark.parametrize("problem, epoch, system_code",
[
(problem, None, 0),
(problem, "hello", 0),
(problem, -10, 0),
(problem, [10], 0),
(problem, (0, 9), 0),
(problem, 0, 0),
(problem, float("inf"), 0),
])
def test_epoch_BBO(problem, epoch, system_code):
pop_size = 50
p_m = 0.01
elites = 2
algorithms = [BBO.OriginalBBO, BBO.BaseBBO]
for algorithm in algorithms:
with pytest.raises(SystemExit) as e:
model = algorithm(problem, epoch, pop_size, p_m, elites)
assert e.type == SystemExit
assert e.value.code == system_code
@pytest.mark.parametrize("problem, pop_size, system_code",
[
(problem, None, 0),
(problem, "hello", 0),
(problem, -10, 0),
(problem, [10], 0),
(problem, (0, 9), 0),
(problem, 0, 0),
(problem, float("inf"), 0),
])
def test_pop_size_BBO(problem, pop_size, system_code):
epoch = 10
p_m = 0.01
elites = 2
algorithms = [BBO.OriginalBBO, BBO.BaseBBO]
for algorithm in algorithms:
with pytest.raises(SystemExit) as e:
model = algorithm(problem, epoch, pop_size, p_m, elites)
assert e.type == SystemExit
assert e.value.code == system_code
@pytest.mark.parametrize("problem, p_m, system_code",
[
(problem, None, 0),
(problem, "hello", 0),
(problem, -1.0, 0),
(problem, [10], 0),
(problem, (0, 9), 0),
(problem, 0, 0),
(problem, 1, 0),
(problem, 1.1, 0),
(problem, -0.01, 0),
])
def test_p_m_BBO(problem, p_m, system_code):
epoch = 10
pop_size = 50
elites = 2
algorithms = [BBO.OriginalBBO, BBO.BaseBBO]
for algorithm in algorithms:
with pytest.raises(SystemExit) as e:
model = algorithm(problem, epoch, pop_size, p_m, elites)
assert e.type == SystemExit
assert e.value.code == system_code
@pytest.mark.parametrize("problem, elites, system_code",
[
(problem, None, 0),
(problem, "hello", 0),
(problem, -1.0, 0),
(problem, [10], 0),
(problem, (0, 9), 0),
(problem, 1, 0),
(problem, 50, 0),
(problem, 100, 0),
(problem, 1.6, 0),
])
def test_elites_BBO(problem, elites, system_code):
epoch = 10
pop_size = 50
p_m = 0.01
algorithms = [BBO.OriginalBBO, BBO.BaseBBO]
for algorithm in algorithms:
with pytest.raises(SystemExit) as e:
model = algorithm(problem, epoch, pop_size, p_m, elites)
assert e.type == SystemExit
assert e.value.code == system_code
| [
"mealpy.bio_based.BBO.OriginalBBO",
"pytest.mark.parametrize",
"numpy.sum",
"pytest.raises",
"pytest.fixture",
"mealpy.bio_based.BBO.BaseBBO"
] | [((485, 515), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (499, 515), False, 'import pytest\n'), ((3242, 3489), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""problem, p_m, system_code"""', "[(problem, None, 0), (problem, 'hello', 0), (problem, -1.0, 0), (problem, [\n 10], 0), (problem, (0, 9), 0), (problem, 0, 0), (problem, 1, 0), (\n problem, 1.1, 0), (problem, -0.01, 0)]"], {}), "('problem, p_m, system_code', [(problem, None, 0), (\n problem, 'hello', 0), (problem, -1.0, 0), (problem, [10], 0), (problem,\n (0, 9), 0), (problem, 0, 0), (problem, 1, 0), (problem, 1.1, 0), (\n problem, -0.01, 0)])\n", (3265, 3489), False, 'import pytest\n'), ((4160, 4408), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""problem, elites, system_code"""', "[(problem, None, 0), (problem, 'hello', 0), (problem, -1.0, 0), (problem, [\n 10], 0), (problem, (0, 9), 0), (problem, 1, 0), (problem, 50, 0), (\n problem, 100, 0), (problem, 1.6, 0)]"], {}), "('problem, elites, system_code', [(problem, None, 0),\n (problem, 'hello', 0), (problem, -1.0, 0), (problem, [10], 0), (problem,\n (0, 9), 0), (problem, 1, 0), (problem, 50, 0), (problem, 100, 0), (\n problem, 1.6, 0)])\n", (4183, 4408), False, 'import pytest\n'), ((965, 1019), 'mealpy.bio_based.BBO.OriginalBBO', 'BBO.OriginalBBO', (['problem', 'epoch', 'pop_size', 'p_m', 'elites'], {}), '(problem, epoch, pop_size, p_m, elites)\n', (980, 1019), False, 'from mealpy.bio_based import BBO\n'), ((1321, 1371), 'mealpy.bio_based.BBO.BaseBBO', 'BBO.BaseBBO', (['problem', 'epoch', 'pop_size', 'p_m', 'elites'], {}), '(problem, epoch, pop_size, p_m, elites)\n', (1332, 1371), False, 'from mealpy.bio_based import BBO\n'), ((626, 647), 'numpy.sum', 'np.sum', (['(solution ** 2)'], {}), '(solution ** 2)\n', (632, 647), True, 'import numpy as np\n'), ((2217, 2242), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (2230, 2242), False, 'import pytest\n'), ((3059, 3084), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (3072, 3084), False, 'import pytest\n'), ((3977, 4002), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (3990, 4002), False, 'import pytest\n'), ((4903, 4928), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (4916, 4928), False, 'import pytest\n')] |
from io import StringIO
from hypothesis import given, strategies
from intentionally_blank import api
def test_format_empty_with_empty_string():
with StringIO() as in_file, StringIO() as out_file:
api.format_from_file_to_file(in_file, out_file, format_names=["identity"])
assert len(out_file.getvalue()) == 0
def test_airtravel_empty_unchanged(airtravel_empty_file):
expected_text = airtravel_empty_file.read()
with StringIO(expected_text) as in_file, StringIO() as out_file:
api.format_from_file_to_file(in_file, out_file, format_names=["identity"])
actual_text = out_file.getvalue()
assert actual_text == expected_text
def test_airtravel_ragged_unchanged(airtravel_ragged_file):
expected_text = airtravel_ragged_file.read()
with StringIO(expected_text) as in_file, StringIO() as out_file:
api.format_from_file_to_file(in_file, out_file, format_names=["identity"])
actual_text = out_file.getvalue()
assert actual_text == expected_text
@given(text=strategies.text())
def test_text_is_unchanged(text):
with StringIO(text) as in_file, StringIO() as out_file:
api.format_from_file_to_file(in_file, out_file, format_names=["identity"])
actual_text = out_file.getvalue()
assert actual_text == text | [
"hypothesis.strategies.text",
"io.StringIO",
"intentionally_blank.api.format_from_file_to_file"
] | [((157, 167), 'io.StringIO', 'StringIO', ([], {}), '()\n', (165, 167), False, 'from io import StringIO\n'), ((180, 190), 'io.StringIO', 'StringIO', ([], {}), '()\n', (188, 190), False, 'from io import StringIO\n'), ((212, 286), 'intentionally_blank.api.format_from_file_to_file', 'api.format_from_file_to_file', (['in_file', 'out_file'], {'format_names': "['identity']"}), "(in_file, out_file, format_names=['identity'])\n", (240, 286), False, 'from intentionally_blank import api\n'), ((449, 472), 'io.StringIO', 'StringIO', (['expected_text'], {}), '(expected_text)\n', (457, 472), False, 'from io import StringIO\n'), ((485, 495), 'io.StringIO', 'StringIO', ([], {}), '()\n', (493, 495), False, 'from io import StringIO\n'), ((517, 591), 'intentionally_blank.api.format_from_file_to_file', 'api.format_from_file_to_file', (['in_file', 'out_file'], {'format_names': "['identity']"}), "(in_file, out_file, format_names=['identity'])\n", (545, 591), False, 'from intentionally_blank import api\n'), ((798, 821), 'io.StringIO', 'StringIO', (['expected_text'], {}), '(expected_text)\n', (806, 821), False, 'from io import StringIO\n'), ((834, 844), 'io.StringIO', 'StringIO', ([], {}), '()\n', (842, 844), False, 'from io import StringIO\n'), ((866, 940), 'intentionally_blank.api.format_from_file_to_file', 'api.format_from_file_to_file', (['in_file', 'out_file'], {'format_names': "['identity']"}), "(in_file, out_file, format_names=['identity'])\n", (894, 940), False, 'from intentionally_blank import api\n'), ((1103, 1117), 'io.StringIO', 'StringIO', (['text'], {}), '(text)\n', (1111, 1117), False, 'from io import StringIO\n'), ((1130, 1140), 'io.StringIO', 'StringIO', ([], {}), '()\n', (1138, 1140), False, 'from io import StringIO\n'), ((1162, 1236), 'intentionally_blank.api.format_from_file_to_file', 'api.format_from_file_to_file', (['in_file', 'out_file'], {'format_names': "['identity']"}), "(in_file, out_file, format_names=['identity'])\n", (1190, 1236), False, 'from intentionally_blank import api\n'), ((1041, 1058), 'hypothesis.strategies.text', 'strategies.text', ([], {}), '()\n', (1056, 1058), False, 'from hypothesis import given, strategies\n')] |
"""
Mask R-CNN
Train on the toy Balloon dataset and implement color splash effect.
Copyright (c) 2018 Matterport, Inc.
Licensed under the MIT License (see LICENSE for details)
Written by <NAME>
------------------------------------------------------------"""
import os
import sys
import json
import numpy as np
import skimage.draw
import argparse
import matplotlib.pyplot as plt
from tensorflow.python.util import deprecation
deprecation._PRINT_DEPRECATION_WARNINGS = False
# Root directory of the project
ROOT_DIR = os.path.abspath("../../")
# Import Mask RCNN
sys.path.append(ROOT_DIR) # To find local version of the library
from mrcnn.config import Config
from mrcnn import model as modellib
from mrcnn import visualize
# from mrcnn.model import MaskRCNN, log
from mrcnn import utils
# Weights path for unfollow_weights.h5 "r" may be removed depending on system
UNFOLLOW_WEIGHTS_PATH = r"mask\logs\unfollow\unfollow_weights.h5"
dataset_dir = r'button'
# Image needs to be updated...
IMAGE = r'button\val\24.png'
# Directory to save logs and model checkpoints, if not provided
# through the command line argument --logs
DEFAULT_LOGS_DIR = os.path.join(ROOT_DIR, "logs")
############################################################
# Configurations
############################################################
class ModelConfiguration(Config):
"""Base configuration class. CHANGE NUMBER OF CLASSES (NUM_CLASSES)
You can leave most of the values as defualt but if you wanted to try
and improve accuracy or increase time it is available by tweaking values.
"""
# give the configuration a recognizable name
NAME = "unfollow_model"
# number of classes ( add +1 for the background (BG))
NUM_CLASSES = 2 #NUMBER OF CLASSES!!!!
# gpu count
GPU_COUNT = 1
IMAGES_PER_GPU = 2
# Skip detections with < 90% confidence
DETECTION_MIN_CONFIDENCE = 0.9
# Anchor stride
# If 1 then anchors are created for each cell in the backbone feature map.
# If 2, then anchors are created for every other cell, and so on.
RPN_ANCHOR_STRIDE = 1
# ROIs kept after non-maximum suppression (training and inference)
POST_NMS_ROIS_INFERENCE = 1000
# Input image resizing
# Generally, use the "square" resizing mode for predicting
# and it should work well in most cases. In this mode, images are scaled
# up such that the small side is = IMAGE_MIN_DIM, but ensuring that the
# scaling doesn't make the long side > IMAGE_MAX_DIM. Then the image is
# padded with zeros to make it a square so multiple images can be put
# in one batch.
# Available resizing modes:
# none, square, pad64, crop
IMAGE_RESIZE_MODE = "square"
IMAGE_MIN_DIM = 800
IMAGE_MAX_DIM = 1024
# Number of color channels per image. RGB = 3, grayscale = 1, RGB-D = 4
# Changing this requires other changes in the code. See the WIKI for more
# details: https://github.com/matterport/Mask_RCNN/wiki
IMAGE_CHANNEL_COUNT = 3
# Minimum probability value to accept a detected instance
# ROIs below this threshold are skipped
DETECTION_MIN_CONFIDENCE = 0.7
config = ModelConfiguration()
config.display()
############################################################
# Dataset
############################################################
class Model_Dataset(utils.Dataset):
# load the dataset definitions
def load_dataset(self, dataset_dir, subset):
# Add classes. .add_class(model name, class id number, name of class)
# MUST FILL IN ###### AS CLASS NAME
self.add_class("unfollow_model", 1, 'unfollow')
# Train or validation dataset?
assert subset in ["train", "val"]
dataset_dir = os.path.join(dataset_dir, subset)
# dictionary of x and y coordinates of each region and region class name
annotations = json.load(open(os.path.join(dataset_dir, "via_region_data.json")))
annotations = list(annotations.values())
# decrease dimensions within annotations
annotations = [a for a in annotations if a['regions']]
# Add images
for a in annotations:
# dependant on VIA version
if type(a['regions']) is dict:
# polygons are bboxes and objects are the class name
polygons = [r['shape_attributes'] for r in a['regions'].values()]
objects = [r['region_attributes'] for r in a['regions'].values()]
else:
polygons = [r['shape_attributes'] for r in a['regions']]
objects = [r['region_attributes'] for r in a['regions']]
# check to see if report and more line up with appropriate id
num_ids = [list(n.values()) for n in objects]
num_ids = [1]
# NUMBER IDS MUST BE CHANGED IF USING <2 OR >2
# load_mask() needs the image size to convert polygons to masks.
# Not provided in annotation json
image_path = os.path.join(dataset_dir, a['filename'])
image = skimage.io.imread(image_path)
height, width = image.shape[:2]
# loading the dataset with image information to be used in load_mask()
self.add_image(
"unfollow_model",
image_id=a['filename'], # use file name as a unique image id
path=image_path,
num_ids=num_ids,
width=width, height=height,
polygons=polygons)
def load_mask(self, image_id):
# obtains info for each image in dataset
info = self.image_info[image_id]
# Convert polygons to a bitmap mask of shape
# [height, width, instance_count]
mask = np.zeros([info["height"],
info["width"],
len(info["polygons"])],
dtype=np.uint8)
for i, p in enumerate(info["polygons"]):
# Get indexes of pixels inside the polygon and set them to 1
rr, cc = skimage.draw.polygon(p['all_points_y'], p['all_points_x'])
# one makes the transparent mask
mask[rr, cc, i] = 1
# Map class names to class IDs.
num_ids = info['num_ids']
num_ids = np.array(num_ids, dtype=np.int32)
return mask.astype(np.bool), num_ids
def image_reference(self, image_id):
"""Return the path of the image."""
info = self.image_info[image_id]
if info["source"] == "unfollow_model":
return info["path"]
else:
super(self.__class__, self).image_reference(image_id)
############################################################
# Command line
############################################################
if __name__ == '__main__':
# # Parse command line arguments
# parser = argparse.ArgumentParser(
# description='Train Mask R-CNN to detect unfollow button on twitter.')
# parser.add_argument('--dataset', required=False,
# metavar= dataset_dir,
# help='Only val dataset available')
# parser.add_argument('--weights', required = False,
# metavar = UNFOLLOW_WEIGHTS_PATH ,
# help="Path to weights .h5 file, only weights_unfollow.h5 available")
# parser.add_argument('--logs', required=False,
# default = DEFAULT_LOGS_DIR,
# metavar="/path/to/logs/",
# help='Logs and checkpoints directory (default=logs/)')
# # IMAGE may be required change to True
# parser.add_argument('--image', required=False,
# metavar="path or URL to image",
# help='Image to apply the color splash effect on')
# args = parser.parse_args()
#
# print("Weights: ", args.weights)
# print("Dataset: ", args.dataset)
# print("Logs: ", args.logs)
# Configurations
class InferenceConfig(ModelConfiguration):
# Set batch size to 1 since we'll be running inference on
# one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU
GPU_COUNT = 1
IMAGES_PER_GPU = 1
config = InferenceConfig()
# can be removed to not show configuration
config.display()
# Create model
model = modellib.MaskRCNN(mode="inference",
config=config,
model_dir=DEFAULT_LOGS_DIR)
# Load weights
print("Loading weights ", UNFOLLOW_WEIGHTS_PATH)
model.load_weights(UNFOLLOW_WEIGHTS_PATH, by_name=True)
def get_ax(rows=1, cols=1, size=16):
"""Return a Matplotlib Axes array to be used in
all visualizations in the notebook. Provide a
central point to control graph sizes.
Adjust the size attribute to control how big to render images
"""
_, ax = plt.subplots(rows, cols, figsize=(size*cols, size*rows))
return ax
# Load dataset
dataset = Model_Dataset()
dataset.load_dataset(dataset_dir, subset = 'val')
# Must call before using the dataset
dataset.prepare()
# run detection
image = skimage.io.imread(IMAGE)
# Remove alpha channel, if it has one
if image.shape[-1] == 4:
image = image[..., :3]
# Run object detection
results = model.detect([image], verbose=0)
# Display results
ax = get_ax(1)
r = results[0]
visualize.display_instances(image, r['rois'], r['masks'], r['class_ids'],
dataset.class_names, r['scores'], ax=ax,
title="Predictions")
#Extract the first bbox
print (r['rois'][0])
# has the format of [y1, x1, y2, x2]
#############################################################################
# Evaluation/ Inference
#############################################################################
# Load dataset
dataset = Model_Dataset()
dataset.load_dataset(dataset_dir, subset = 'val')
# Must call before using the dataset
dataset.prepare()
class InferenceConfig(ModelConfiguration):
GPU_COUNT = 1
IMAGES_PER_GPU = 1
inference_config = InferenceConfig()
# Recreate the model in inference mode
model = modellib.MaskRCNN(mode="inference",
config=inference_config,
model_dir=UNFOLLOW_WEIGHTS_PATH)
def get_ax(rows=1, cols=1, size=16):
"""Return a Matplotlib Axes array to be used in
all visualizations in the notebook. Provide a
central point to control graph sizes.
Adjust the size attribute to control how big to render images
"""
_, ax = plt.subplots(rows, cols, figsize=(size*cols, size*rows))
return ax
print("Loading weights from ", UNFOLLOW_WEIGHTS_PATH)
model.load_weights(UNFOLLOW_WEIGHTS_PATH, by_name=True)
# run detection
image = skimage.io.imread(IMAGE)
# Remove alpha channel, if it has one
if image.shape[-1] == 4:
image = image[..., :3]
# Run object detection
results = model.detect([image], verbose=0)
# Display results
ax = get_ax(1)
r = results[0]
visualize.display_instances(image, r['rois'], r['masks'], r['class_ids'],
dataset.class_names, r['scores'], ax=ax,
title="Predictions")
#Extract the first bbox
print (r['rois'][0])
# has the format of [y1, x1, y2, x2] | [
"mrcnn.model.MaskRCNN",
"os.path.join",
"numpy.array",
"mrcnn.visualize.display_instances",
"os.path.abspath",
"sys.path.append",
"matplotlib.pyplot.subplots"
] | [((541, 566), 'os.path.abspath', 'os.path.abspath', (['"""../../"""'], {}), "('../../')\n", (556, 566), False, 'import os\n'), ((590, 615), 'sys.path.append', 'sys.path.append', (['ROOT_DIR'], {}), '(ROOT_DIR)\n', (605, 615), False, 'import sys\n'), ((1185, 1215), 'os.path.join', 'os.path.join', (['ROOT_DIR', '"""logs"""'], {}), "(ROOT_DIR, 'logs')\n", (1197, 1215), False, 'import os\n'), ((10704, 10802), 'mrcnn.model.MaskRCNN', 'modellib.MaskRCNN', ([], {'mode': '"""inference"""', 'config': 'inference_config', 'model_dir': 'UNFOLLOW_WEIGHTS_PATH'}), "(mode='inference', config=inference_config, model_dir=\n UNFOLLOW_WEIGHTS_PATH)\n", (10721, 10802), True, 'from mrcnn import model as modellib\n'), ((11591, 11730), 'mrcnn.visualize.display_instances', 'visualize.display_instances', (['image', "r['rois']", "r['masks']", "r['class_ids']", 'dataset.class_names', "r['scores']"], {'ax': 'ax', 'title': '"""Predictions"""'}), "(image, r['rois'], r['masks'], r['class_ids'],\n dataset.class_names, r['scores'], ax=ax, title='Predictions')\n", (11618, 11730), False, 'from mrcnn import visualize\n'), ((8669, 8747), 'mrcnn.model.MaskRCNN', 'modellib.MaskRCNN', ([], {'mode': '"""inference"""', 'config': 'config', 'model_dir': 'DEFAULT_LOGS_DIR'}), "(mode='inference', config=config, model_dir=DEFAULT_LOGS_DIR)\n", (8686, 8747), True, 'from mrcnn import model as modellib\n'), ((9867, 10006), 'mrcnn.visualize.display_instances', 'visualize.display_instances', (['image', "r['rois']", "r['masks']", "r['class_ids']", 'dataset.class_names', "r['scores']"], {'ax': 'ax', 'title': '"""Predictions"""'}), "(image, r['rois'], r['masks'], r['class_ids'],\n dataset.class_names, r['scores'], ax=ax, title='Predictions')\n", (9894, 10006), False, 'from mrcnn import visualize\n'), ((11135, 11195), 'matplotlib.pyplot.subplots', 'plt.subplots', (['rows', 'cols'], {'figsize': '(size * cols, size * rows)'}), '(rows, cols, figsize=(size * cols, size * rows))\n', (11147, 11195), True, 'import matplotlib.pyplot as plt\n'), ((3874, 3907), 'os.path.join', 'os.path.join', (['dataset_dir', 'subset'], {}), '(dataset_dir, subset)\n', (3886, 3907), False, 'import os\n'), ((6541, 6574), 'numpy.array', 'np.array', (['num_ids'], {'dtype': 'np.int32'}), '(num_ids, dtype=np.int32)\n', (6549, 6574), True, 'import numpy as np\n'), ((9283, 9343), 'matplotlib.pyplot.subplots', 'plt.subplots', (['rows', 'cols'], {'figsize': '(size * cols, size * rows)'}), '(rows, cols, figsize=(size * cols, size * rows))\n', (9295, 9343), True, 'import matplotlib.pyplot as plt\n'), ((5195, 5235), 'os.path.join', 'os.path.join', (['dataset_dir', "a['filename']"], {}), "(dataset_dir, a['filename'])\n", (5207, 5235), False, 'import os\n'), ((4030, 4079), 'os.path.join', 'os.path.join', (['dataset_dir', '"""via_region_data.json"""'], {}), "(dataset_dir, 'via_region_data.json')\n", (4042, 4079), False, 'import os\n')] |
import os, csv
path = 'F:\Movies-TV'
with open('C:\wsl\local-movies\db\movies.csv', 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
for root,dirs, files in os.walk(path):
for folders in dirs:
if folders == "Subs" or folders == "Subtitles" or folders == "Other" or folders == "subtitles":
pass
else:
writer.writerow([folders.replace('.', ' ')]) | [
"csv.writer",
"os.walk"
] | [((126, 145), 'csv.writer', 'csv.writer', (['csvfile'], {}), '(csvfile)\n', (136, 145), False, 'import os, csv\n'), ((172, 185), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (179, 185), False, 'import os, csv\n')] |
import io
import logging
from functools import cached_property
from .base_experiment import BaseExperiment
from .utils import insert_suffix
class TqdmToLogger(io.StringIO):
buffer = ''
def __init__(self, logger, level=logging.DEBUG):
super().__init__()
self.logger = logger
self.level = level
def write(self, buffer):
self.buffer = buffer.strip('\r\n\t ')
def flush(self):
self.logger.log(self.level, self.buffer)
class LoggingExperiment(BaseExperiment):
@cached_property
def logger(self):
identity = self.cfg.get("worker_id", None)
suffix = ".{}".format(identity) if identity is not None else ""
logger = logging.getLogger("experimentator{}".format(suffix))
if identity is not None and logger.parent.handlers and not logger.handlers:
parent_handlers = [h for h in logger.parent.handlers if isinstance(h, logging.FileHandler)]
name = insert_suffix(parent_handlers[0].baseFilename, f".{identity}")
handler = logging.FileHandler(name, mode="w")
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter("[%(levelname)s]%(filename)s:%(lineno)d: %(message)s"))
logging.info("Logging in %s", name)
return logger
def progress(self, generator, **kwargs):
stream = TqdmToLogger(self.logger, level=logging.DEBUG)
return super().progress(generator, file=stream, **kwargs)
| [
"logging.Formatter",
"logging.FileHandler",
"logging.info"
] | [((1041, 1076), 'logging.FileHandler', 'logging.FileHandler', (['name'], {'mode': '"""w"""'}), "(name, mode='w')\n", (1060, 1076), False, 'import logging\n'), ((1278, 1313), 'logging.info', 'logging.info', (['"""Logging in %s"""', 'name'], {}), "('Logging in %s', name)\n", (1290, 1313), False, 'import logging\n'), ((1192, 1264), 'logging.Formatter', 'logging.Formatter', (['"""[%(levelname)s]%(filename)s:%(lineno)d: %(message)s"""'], {}), "('[%(levelname)s]%(filename)s:%(lineno)d: %(message)s')\n", (1209, 1264), False, 'import logging\n')] |
#!/usr/bin/python3
# Entry-point for running from the CLI when not installed via Pip, Pip will handle the console_scripts entry_points's from setup.py
# It's recommended to use `pip3 install changedetection.io` and start with `changedetection.py` instead, it will be linkd to your global path.
# or Docker.
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
from mb_changedetectionio import mb_changedetection
if __name__ == '__main__':
mb_changedetection.main()
| [
"mb_changedetectionio.mb_changedetection.main"
] | [((457, 482), 'mb_changedetectionio.mb_changedetection.main', 'mb_changedetection.main', ([], {}), '()\n', (480, 482), False, 'from mb_changedetectionio import mb_changedetection\n')] |
'''Autogenerated by get_gl_extensions script, do not edit!'''
from OpenGL import platform as _p
from OpenGL.GL import glget
EXTENSION_NAME = 'GL_INGR_color_clamp'
_p.unpack_constants( """GL_RED_MIN_CLAMP_INGR 0x8560
GL_GREEN_MIN_CLAMP_INGR 0x8561
GL_BLUE_MIN_CLAMP_INGR 0x8562
GL_ALPHA_MIN_CLAMP_INGR 0x8563
GL_RED_MAX_CLAMP_INGR 0x8564
GL_GREEN_MAX_CLAMP_INGR 0x8565
GL_BLUE_MAX_CLAMP_INGR 0x8566
GL_ALPHA_MAX_CLAMP_INGR 0x8567""", globals())
glget.addGLGetConstant( GL_RED_MIN_CLAMP_INGR, (1,) )
glget.addGLGetConstant( GL_GREEN_MIN_CLAMP_INGR, (1,) )
glget.addGLGetConstant( GL_BLUE_MIN_CLAMP_INGR, (1,) )
glget.addGLGetConstant( GL_ALPHA_MIN_CLAMP_INGR, (1,) )
glget.addGLGetConstant( GL_RED_MAX_CLAMP_INGR, (1,) )
glget.addGLGetConstant( GL_GREEN_MAX_CLAMP_INGR, (1,) )
glget.addGLGetConstant( GL_BLUE_MAX_CLAMP_INGR, (1,) )
glget.addGLGetConstant( GL_ALPHA_MAX_CLAMP_INGR, (1,) )
def glInitColorClampINGR():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( EXTENSION_NAME )
| [
"OpenGL.extensions.hasGLExtension",
"OpenGL.GL.glget.addGLGetConstant"
] | [((444, 495), 'OpenGL.GL.glget.addGLGetConstant', 'glget.addGLGetConstant', (['GL_RED_MIN_CLAMP_INGR', '(1,)'], {}), '(GL_RED_MIN_CLAMP_INGR, (1,))\n', (466, 495), False, 'from OpenGL.GL import glget\n'), ((498, 551), 'OpenGL.GL.glget.addGLGetConstant', 'glget.addGLGetConstant', (['GL_GREEN_MIN_CLAMP_INGR', '(1,)'], {}), '(GL_GREEN_MIN_CLAMP_INGR, (1,))\n', (520, 551), False, 'from OpenGL.GL import glget\n'), ((554, 606), 'OpenGL.GL.glget.addGLGetConstant', 'glget.addGLGetConstant', (['GL_BLUE_MIN_CLAMP_INGR', '(1,)'], {}), '(GL_BLUE_MIN_CLAMP_INGR, (1,))\n', (576, 606), False, 'from OpenGL.GL import glget\n'), ((609, 662), 'OpenGL.GL.glget.addGLGetConstant', 'glget.addGLGetConstant', (['GL_ALPHA_MIN_CLAMP_INGR', '(1,)'], {}), '(GL_ALPHA_MIN_CLAMP_INGR, (1,))\n', (631, 662), False, 'from OpenGL.GL import glget\n'), ((665, 716), 'OpenGL.GL.glget.addGLGetConstant', 'glget.addGLGetConstant', (['GL_RED_MAX_CLAMP_INGR', '(1,)'], {}), '(GL_RED_MAX_CLAMP_INGR, (1,))\n', (687, 716), False, 'from OpenGL.GL import glget\n'), ((719, 772), 'OpenGL.GL.glget.addGLGetConstant', 'glget.addGLGetConstant', (['GL_GREEN_MAX_CLAMP_INGR', '(1,)'], {}), '(GL_GREEN_MAX_CLAMP_INGR, (1,))\n', (741, 772), False, 'from OpenGL.GL import glget\n'), ((775, 827), 'OpenGL.GL.glget.addGLGetConstant', 'glget.addGLGetConstant', (['GL_BLUE_MAX_CLAMP_INGR', '(1,)'], {}), '(GL_BLUE_MAX_CLAMP_INGR, (1,))\n', (797, 827), False, 'from OpenGL.GL import glget\n'), ((830, 883), 'OpenGL.GL.glget.addGLGetConstant', 'glget.addGLGetConstant', (['GL_ALPHA_MAX_CLAMP_INGR', '(1,)'], {}), '(GL_ALPHA_MAX_CLAMP_INGR, (1,))\n', (852, 883), False, 'from OpenGL.GL import glget\n'), ((1033, 1074), 'OpenGL.extensions.hasGLExtension', 'extensions.hasGLExtension', (['EXTENSION_NAME'], {}), '(EXTENSION_NAME)\n', (1058, 1074), False, 'from OpenGL import extensions\n')] |
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 10 22:12:12 2020
@author: vxr131730
"""
import glob
import os
import sys
import random
import time
import numpy as np
import cv2
from test import *
from casadi import *
from numpy import random as npr
from casadi.tools import *
try:
sys.path.append(glob.glob('../carla/dist/carla-*%d.%d-%s.egg' % (
sys.version_info.major,
sys.version_info.minor,
'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])
except IndexError:
pass
import carla
IM_WIDTH = 640
IM_HEIGHT = 480
actor_list = []
try:
client = carla.Client("localhost",2000)
client.set_timeout(10.0)
world = client.get_world()
blueprint_library = world.get_blueprint_library()
vehicle_bp = blueprint_library.filter("model3")[0]
startpoint = world.get_map().get_spawn_points()[195] #128 195
world.debug.draw_string(startpoint.location, 'O', draw_shadow=False,
color=carla.Color(r=255, g=0, b=0), life_time=50,
persistent_lines=True)
vehicle = world.spawn_actor(vehicle_bp, startpoint)
# ---------------------Trajectory-----------------
wplist = world.get_map().get_topology()
wps = wplist[270][0].next_until_lane_end(5.0) # 22 (195:270)
for w in wps:
world.debug.draw_string(w.transform.location, 'O', draw_shadow=False,
color=carla.Color(r=0, g=255, b=0), life_time=20.0,
persistent_lines=True)
endpoint = wps[0].transform
actor_list.append(vehicle)
# -------------------MPC--------------------
# SAMPLING TIME
T = 0.08 #0.08 # (s)
# PREDICTION HORIZON
N = 50 # 12 5
# STATES
x = SX.sym('x') # x coordinate
y = SX.sym('y') # y coordinate
theta = SX.sym('theta') # vehicle orientation
v = SX.sym('v') # longitudenal velocity
states = vertcat(x,y,theta,v)
n_states = 4 # no. of states
# CONTROL
thr = SX.sym('thr') # Throttle
strang = SX.sym('strang') # steering angle
controls = vertcat(thr,strang)
n_controls = 2
# CONTROL BOUNDS
minthr = 0.0 # minimum throttle
maxthr = 0.5 # maximum throttle
minstrang = -1 # minimum steering angle
maxstrang = 1 # maximum steering angle
# VEHICLE MODEL PARAMETERS
l_r = 1.415 # distance from center of gravity to rare wheels
l_f = 1.6# distance from center of graity to front wheels
# SYMBOLIC REPRESENTATION OF THE DERIVATIVE OF THE STATES BASED ON THE BICYCE MODEL
rhs = vertcat((v*cos(theta+(atan((l_r*tan(strang*1.22))/(l_f + l_r))))),
v*sin(theta+(atan((l_r*tan(strang*1.22))/(l_f + l_r)))),
((v/l_r)*(sin(atan((l_r*tan(strang*1.22))/(l_f + l_r))))),
thr*16)
# STATE PREDICTION - SYMBOLIC FUNCTION OF CURRENT STATE AND CONTROL INPUTS AT EACH TIME STEP OF HORIZON PERIOD
f = Function('f', [states, controls], [rhs])
U = SX.sym('U', n_controls, N)
P = SX.sym('P', n_states + n_states)
X = SX.sym('X', n_states, (N+1))
X[:,0] = P[0:4]
for k in range(N):
st = X[:,k]
con = U[:,k]
f_value = f(st, con)
st_next = st + (T*f_value)
X[:,k+1] = st_next
ff = Function('ff', [U,P], [X])
# SYBOLIC REPRESENTATION OF THE OBJECTIVE FUNCTION
obj = 0
g = SX.sym('g',4,(N+1))
Q = diag(SX([3600,3600,1900,2])) #195 [3600,3600,1900,2] [3100,3100,1900,2] [2700,2700,2000,2]
R = diag(SX([0,8000])) #195 [0,7000]
for k in range(N):
st = X[:,k]
con = U[:,k]
obj = obj + mtimes(mtimes((st - P[4:8]).T,Q), (st - P[4:8])) + mtimes(mtimes(con.T, R), con)
# STATES BOUNDS/CONSTRAINTS
for k in range(0,N+1):
g[0,k] = X[0,k]
g[1,k] = X[1,k]
g[2,k] = X[2,k]
g[3,k] = X[3,k]
g = reshape(g, 4*(N+1), 1)
# CREATING A OPTIMIZATION SOLVER IN CASADI
OPT_variables = reshape(U, 2*N, 1)
nlp_prob = {'f':obj, 'x':OPT_variables, 'g':g, 'p':P}
opts = {'ipopt.max_iter':100,
'ipopt.print_level':0,
'print_time':0,
'ipopt.acceptable_tol':1e-8,
'ipopt.acceptable_obj_change_tol':1e-6}
solver = nlpsol('solver','ipopt', nlp_prob, opts) # solver
# IMPLEMENTING CONTROL BOUNDS
lbx = []
ubx = []
for i in range(2*N):
if i%2==0:
lbx.append(minthr)
ubx.append(maxthr)
else:
lbx.append(minstrang)
ubx.append(maxstrang)
lbx = np.transpose(lbx)
ubx = np.transpose(ubx)
# IMPLEMENTING STATE BOUNDS
lbgv = []
ubgv = []
for i in range(0,4*(N+1),4):
lbgv.append(-300)
lbgv.append(-300)
lbgv.append(0)
lbgv.append(0)
ubgv.append(300)
ubgv.append(300)
ubgv.append(405)
ubgv.append(15)
u0 = (DM.zeros(2*N,1))
u_cl = []
def contheta(thet):
if thet < 0:
thet = 360 - abs(thet)
return thet
x0 = np.transpose([startpoint.location.x, startpoint.location.y, contheta(startpoint.rotation.yaw), 0])
xs = np.transpose([endpoint.location.x, endpoint.location.y, contheta(startpoint.rotation.yaw), 3]) #-90.156235*pi/180
c = 0
p = np.transpose([startpoint.location.x,
startpoint.location.y,
contheta(startpoint.rotation.yaw),
0,
endpoint.location.x,
endpoint.location.y,
contheta(startpoint.rotation.yaw),
3])
while c < len(wps):
if (norm_2(x0[0:2]-p[4:6]))<3:
c += 1
endpoint = wps[c].transform
world.debug.draw_string(endpoint.location, 'O', draw_shadow=False,
color=carla.Color(r=0, g=0, b=255), life_time=3,
persistent_lines=True)
print(x0,"---",p[4:8])
u0 = reshape(u0, 2*N,1)
p[0:4] = x0
p[4:8] = [endpoint.location.x, endpoint.location.y, contheta(endpoint.rotation.yaw), 3]#6
sol = solver(x0=u0, lbx=lbx, ubx=ubx, lbg=lbgv, ubg=ubgv, p=p)
u = reshape(sol['x'].T, 2, N).T
ff_value = ff(u.T, p)
for k in range(N):
world.debug.draw_string(carla.Location(x=float(ff_value[0,k]),
y=float(ff_value[1,k]),
z=0.0),
'O', draw_shadow=False,
color=carla.Color(r=255, g=0, b=0), life_time=0.01,
persistent_lines=True)
u_cl.append(u[0,:])
vehicle.apply_control(carla.VehicleControl(throttle =float(u[0,0]) , steer = float(u[0,1])))
u_theta = vehicle.get_transform().rotation.yaw
x0 = np.transpose([vehicle.get_transform().location.x,
vehicle.get_transform().location.y,
contheta(u_theta),
norm_2([vehicle.get_velocity().x,
vehicle.get_velocity().y])])
u0 = reshape(u0, N, 2)
u0[0:N-1,:] = u[1:N,:]
u0[N-1,:]=u[N-1,:]
time.sleep(10)
finally:
for actor in actor_list:
actor.destroy()
print("All cleaned up!") | [
"time.sleep",
"carla.Client",
"carla.Color",
"numpy.transpose",
"glob.glob"
] | [((633, 664), 'carla.Client', 'carla.Client', (['"""localhost"""', '(2000)'], {}), "('localhost', 2000)\n", (645, 664), False, 'import carla\n'), ((4756, 4773), 'numpy.transpose', 'np.transpose', (['lbx'], {}), '(lbx)\n', (4768, 4773), True, 'import numpy as np\n'), ((4785, 4802), 'numpy.transpose', 'np.transpose', (['ubx'], {}), '(ubx)\n', (4797, 4802), True, 'import numpy as np\n'), ((7665, 7679), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (7675, 7679), False, 'import time\n'), ((323, 481), 'glob.glob', 'glob.glob', (["('../carla/dist/carla-*%d.%d-%s.egg' % (sys.version_info.major, sys.\n version_info.minor, 'win-amd64' if os.name == 'nt' else 'linux-x86_64'))"], {}), "('../carla/dist/carla-*%d.%d-%s.egg' % (sys.version_info.major,\n sys.version_info.minor, 'win-amd64' if os.name == 'nt' else 'linux-x86_64')\n )\n", (332, 481), False, 'import glob\n'), ((1025, 1053), 'carla.Color', 'carla.Color', ([], {'r': '(255)', 'g': '(0)', 'b': '(0)'}), '(r=255, g=0, b=0)\n', (1036, 1053), False, 'import carla\n'), ((1500, 1528), 'carla.Color', 'carla.Color', ([], {'r': '(0)', 'g': '(255)', 'b': '(0)'}), '(r=0, g=255, b=0)\n', (1511, 1528), False, 'import carla\n'), ((6136, 6164), 'carla.Color', 'carla.Color', ([], {'r': '(0)', 'g': '(0)', 'b': '(255)'}), '(r=0, g=0, b=255)\n', (6147, 6164), False, 'import carla\n'), ((6943, 6971), 'carla.Color', 'carla.Color', ([], {'r': '(255)', 'g': '(0)', 'b': '(0)'}), '(r=255, g=0, b=0)\n', (6954, 6971), False, 'import carla\n')] |
"""
Collection of functions to calculate lag correlations
and significance following Ebisuzaki 97 JCLIM
"""
def phaseran(recblk, nsurr,ax):
""" Phaseran by <NAME>: http://www.mathworks.nl/matlabcentral/fileexchange/32621-phase-randomization/content/phaseran.m
Args:
recblk (2D array): Row: time sample. Column: recording.
An odd number of time samples (height) is expected.
If that is not the case, recblock is reduced by 1 sample before the surrogate data is created.
The class must be double and it must be nonsparse.
nsurr (int): is the number of image block surrogates that you want to generate.
Returns:
surrblk: 3D multidimensional array image block with the surrogate datasets along the third dimension
Reference:
<NAME>., <NAME>. Generating Surrogate Data for Time Series with Several Simultaneously Measured Variables (1994)
Physical Review Letters, Vol 73, Number 7
NOTE: Extended to xy data and converted to python by <NAME>
"""
import numpy as np
from ds21grl.misc import AxRoll
# make sure time dimension is axis=0
recblk = AxRoll(recblk,ax)
# Get time length
nfrms = recblk.shape[0]
# force data to have odd time length
if nfrms % 2 == 0:
nfrms = nfrms-1
recblk = recblk[0:nfrms]
# define fft frequency intervals
len_ser = int((nfrms-1)/2)
interv1 = np.arange(1, len_ser+1)
interv2 = np.arange(len_ser+1, nfrms)
# Fourier transform of the original dataset
fft_recblk = np.fft.fft(recblk,axis=0)
# Create nsurr timeseries of random numbers (0,1)
# Also tile fft array for later
if np.ndim(recblk) == 1:
ph_rnd = np.random.rand(len_ser,nsurr)
fft_recblk_surr = np.tile(fft_recblk[None,:],(nsurr,1))
elif np.ndim(recblk) == 2:
ph_rnd = np.random.rand(len_ser,recblk.shape[1],nsurr)
fft_recblk_surr = np.tile(fft_recblk[None,:],(nsurr,1,1))
elif np.ndim(recblk) == 3:
ph_rnd = np.random.rand(len_ser,recblk.shape[1],recblk.shape[2],nsurr)
fft_recblk_surr = np.tile(fft_recblk[None,:],(nsurr,1,1,1))
fft_recblk_surr = np.moveaxis(fft_recblk_surr,0,-1)
# Create the random phases for all the time series
ph_interv1 = np.exp(2*np.pi*1j*ph_rnd)
ph_interv2 = np.conj(np.flipud(ph_interv1))
# Randomize all the time series simultaneously
fft_recblk_surr[interv1,:] = fft_recblk_surr[interv1,:] * ph_interv1
fft_recblk_surr[interv2,:] = fft_recblk_surr[interv2,:] * ph_interv2
# Inverse transform
surrblk = np.real(np.fft.ifft(fft_recblk_surr,axis=0))
return surrblk
def remove_mean(data,ax):
"""
function that removes mean defined across
given axis from entire data array
"""
import numpy as np
from ds21grl.misc import AxRoll
if np.ndim(data) == 1:
data = data - np.mean(data)
else:
data = AxRoll(data,ax)
mean = np.mean(data,axis=ax)
for i in range(0,data.shape[ax]):
data[i,:] = data[i,:] - mean[:]
data = AxRoll(data,ax,invert=True)
return data
def cross_correlate_ndim(x,y,maxlag,ax):
"""
Calculates lag cross-correlation
between two n dim arrays along a specified axis.
Truncates to +-maxlag
NOTE: x and y arrays must be same dimensions
"""
import numpy as np
from scipy import signal
from ds21grl.misc import AxRoll
# put lag correlation axis on axis=0
x = AxRoll(x,ax)
y = AxRoll(y,ax)
# center time series
x = remove_mean(x,0)
y = remove_mean(y,0)
# calc cross correlation
corr = signal.fftconvolve(x, np.flip(y,axis=0), mode='full', axes=0)
corr = corr/x.shape[0]/np.std(x,axis=0)/np.std(y,axis=0)
# extract desired lags
temp1 = np.arange(-(x.shape[0]-1),0,1)
temp2 = np.arange(0,x.shape[0],1)
lag = np.concatenate((temp1, temp2), axis=0)
index = (lag >= -1*maxlag) & (lag <= maxlag)
lag = lag[index]
if np.ndim(x) > 1:
corr = corr[index,:]
else:
corr = corr[index]
return corr,lag
def cross_correlate_ndim_sig(x1,x2,maxlag,nbs,sigthresh,ax):
"""
Wrapper for cross_correlate_ndim. Also calculates
significance following randomized phase procedure from Ebisuzaki 97.
Significant = 1 and not significant = 0.
NOTE: x and y arrays must be same dimensions
"""
import numpy as np
from ds21grl.misc import AxRoll
# make time dimension axis=0
x1 = AxRoll(x1,ax)
x2 = AxRoll(x2,ax)
# force timeseries to be odd
# (because of phaseran fxn)
if x1.shape[ax] % 2 == 0:
x1 = x1[0:-1]
x2 = x2[0:-1]
# calculate lag correlation
[corr,lag] = cross_correlate_ndim(x1,x2,maxlag,ax)
# calculate boostrapped time series with
# randomized phases
x2 = phaseran(x2,nbs,ax)
if np.ndim(x1) == 3:
x1 = np.tile(x1[None,:],(nbs,1,1,1))
elif np.ndim(x1) == 2:
x1 = np.tile(x1[None,:],(nbs,1,1))
elif np.ndim(x1) == 1:
x1 = np.tile(x1[None,:],(nbs,1))
x1 = np.moveaxis(x1,0,-1) # x1 must have same shape as x2
[corr_bs,lag] = cross_correlate_ndim(x1,x2,maxlag,ax)
# calculate significant correlations (two sided test)
# using PDF of bootstrapped correlations
sig = np.zeros((corr.shape))
ptile1 = np.percentile(corr_bs,(100-sigthresh)/2,axis=-1)
ptile2 = np.percentile(corr_bs,sigthresh+(100-sigthresh)/2,axis=-1)
index = (corr > ptile1) & (corr < ptile2)
sig[index] = 1
return corr,sig,lag
def write_yt_daily(corr,sig,lag,filename,dir_out,dim,write2file):
"""
Writes yt lag correlation data to file
"""
import numpy as np
import xarray as xr
if write2file == 1:
output = xr.Dataset(data_vars={'corr': (('lag','lat'), corr.astype(np.float32)),
'sig': (('lag','lat'), sig.astype(np.float32))},
coords={'lag': lag,'lat': dim.lat})
output.corr.attrs['units'] = 'unitless'
output.sig.attrs['units'] = 'unitless'
output.to_netcdf(dir_out + filename)
return
| [
"numpy.tile",
"numpy.mean",
"numpy.flip",
"numpy.random.rand",
"numpy.flipud",
"numpy.fft.fft",
"numpy.ndim",
"numpy.exp",
"numpy.zeros",
"ds21grl.misc.AxRoll",
"numpy.concatenate",
"numpy.std",
"numpy.moveaxis",
"numpy.percentile",
"numpy.fft.ifft",
"numpy.arange"
] | [((1166, 1184), 'ds21grl.misc.AxRoll', 'AxRoll', (['recblk', 'ax'], {}), '(recblk, ax)\n', (1172, 1184), False, 'from ds21grl.misc import AxRoll\n'), ((1453, 1478), 'numpy.arange', 'np.arange', (['(1)', '(len_ser + 1)'], {}), '(1, len_ser + 1)\n', (1462, 1478), True, 'import numpy as np\n'), ((1491, 1520), 'numpy.arange', 'np.arange', (['(len_ser + 1)', 'nfrms'], {}), '(len_ser + 1, nfrms)\n', (1500, 1520), True, 'import numpy as np\n'), ((1585, 1611), 'numpy.fft.fft', 'np.fft.fft', (['recblk'], {'axis': '(0)'}), '(recblk, axis=0)\n', (1595, 1611), True, 'import numpy as np\n'), ((2233, 2268), 'numpy.moveaxis', 'np.moveaxis', (['fft_recblk_surr', '(0)', '(-1)'], {}), '(fft_recblk_surr, 0, -1)\n', (2244, 2268), True, 'import numpy as np\n'), ((2340, 2373), 'numpy.exp', 'np.exp', (['(2 * np.pi * 1.0j * ph_rnd)'], {}), '(2 * np.pi * 1.0j * ph_rnd)\n', (2346, 2373), True, 'import numpy as np\n'), ((3596, 3609), 'ds21grl.misc.AxRoll', 'AxRoll', (['x', 'ax'], {}), '(x, ax)\n', (3602, 3609), False, 'from ds21grl.misc import AxRoll\n'), ((3617, 3630), 'ds21grl.misc.AxRoll', 'AxRoll', (['y', 'ax'], {}), '(y, ax)\n', (3623, 3630), False, 'from ds21grl.misc import AxRoll\n'), ((3923, 3957), 'numpy.arange', 'np.arange', (['(-(x.shape[0] - 1))', '(0)', '(1)'], {}), '(-(x.shape[0] - 1), 0, 1)\n', (3932, 3957), True, 'import numpy as np\n'), ((3967, 3994), 'numpy.arange', 'np.arange', (['(0)', 'x.shape[0]', '(1)'], {}), '(0, x.shape[0], 1)\n', (3976, 3994), True, 'import numpy as np\n'), ((4006, 4044), 'numpy.concatenate', 'np.concatenate', (['(temp1, temp2)'], {'axis': '(0)'}), '((temp1, temp2), axis=0)\n', (4020, 4044), True, 'import numpy as np\n'), ((4756, 4770), 'ds21grl.misc.AxRoll', 'AxRoll', (['x1', 'ax'], {}), '(x1, ax)\n', (4762, 4770), False, 'from ds21grl.misc import AxRoll\n'), ((4779, 4793), 'ds21grl.misc.AxRoll', 'AxRoll', (['x2', 'ax'], {}), '(x2, ax)\n', (4785, 4793), False, 'from ds21grl.misc import AxRoll\n'), ((5354, 5376), 'numpy.moveaxis', 'np.moveaxis', (['x1', '(0)', '(-1)'], {}), '(x1, 0, -1)\n', (5365, 5376), True, 'import numpy as np\n'), ((5587, 5607), 'numpy.zeros', 'np.zeros', (['corr.shape'], {}), '(corr.shape)\n', (5595, 5607), True, 'import numpy as np\n'), ((5628, 5682), 'numpy.percentile', 'np.percentile', (['corr_bs', '((100 - sigthresh) / 2)'], {'axis': '(-1)'}), '(corr_bs, (100 - sigthresh) / 2, axis=-1)\n', (5641, 5682), True, 'import numpy as np\n'), ((5695, 5761), 'numpy.percentile', 'np.percentile', (['corr_bs', '(sigthresh + (100 - sigthresh) / 2)'], {'axis': '(-1)'}), '(corr_bs, sigthresh + (100 - sigthresh) / 2, axis=-1)\n', (5708, 5761), True, 'import numpy as np\n'), ((1709, 1724), 'numpy.ndim', 'np.ndim', (['recblk'], {}), '(recblk)\n', (1716, 1724), True, 'import numpy as np\n'), ((1757, 1787), 'numpy.random.rand', 'np.random.rand', (['len_ser', 'nsurr'], {}), '(len_ser, nsurr)\n', (1771, 1787), True, 'import numpy as np\n'), ((1813, 1853), 'numpy.tile', 'np.tile', (['fft_recblk[None, :]', '(nsurr, 1)'], {}), '(fft_recblk[None, :], (nsurr, 1))\n', (1820, 1853), True, 'import numpy as np\n'), ((2391, 2412), 'numpy.flipud', 'np.flipud', (['ph_interv1'], {}), '(ph_interv1)\n', (2400, 2412), True, 'import numpy as np\n'), ((2660, 2696), 'numpy.fft.ifft', 'np.fft.ifft', (['fft_recblk_surr'], {'axis': '(0)'}), '(fft_recblk_surr, axis=0)\n', (2671, 2696), True, 'import numpy as np\n'), ((2930, 2943), 'numpy.ndim', 'np.ndim', (['data'], {}), '(data)\n', (2937, 2943), True, 'import numpy as np\n'), ((3011, 3027), 'ds21grl.misc.AxRoll', 'AxRoll', (['data', 'ax'], {}), '(data, ax)\n', (3017, 3027), False, 'from ds21grl.misc import AxRoll\n'), ((3042, 3064), 'numpy.mean', 'np.mean', (['data'], {'axis': 'ax'}), '(data, axis=ax)\n', (3049, 3064), True, 'import numpy as np\n'), ((3165, 3194), 'ds21grl.misc.AxRoll', 'AxRoll', (['data', 'ax'], {'invert': '(True)'}), '(data, ax, invert=True)\n', (3171, 3194), False, 'from ds21grl.misc import AxRoll\n'), ((3780, 3798), 'numpy.flip', 'np.flip', (['y'], {'axis': '(0)'}), '(y, axis=0)\n', (3787, 3798), True, 'import numpy as np\n'), ((3865, 3882), 'numpy.std', 'np.std', (['y'], {'axis': '(0)'}), '(y, axis=0)\n', (3871, 3882), True, 'import numpy as np\n'), ((4126, 4136), 'numpy.ndim', 'np.ndim', (['x'], {}), '(x)\n', (4133, 4136), True, 'import numpy as np\n'), ((5135, 5146), 'numpy.ndim', 'np.ndim', (['x1'], {}), '(x1)\n', (5142, 5146), True, 'import numpy as np\n'), ((5166, 5202), 'numpy.tile', 'np.tile', (['x1[None, :]', '(nbs, 1, 1, 1)'], {}), '(x1[None, :], (nbs, 1, 1, 1))\n', (5173, 5202), True, 'import numpy as np\n'), ((1860, 1875), 'numpy.ndim', 'np.ndim', (['recblk'], {}), '(recblk)\n', (1867, 1875), True, 'import numpy as np\n'), ((1912, 1959), 'numpy.random.rand', 'np.random.rand', (['len_ser', 'recblk.shape[1]', 'nsurr'], {}), '(len_ser, recblk.shape[1], nsurr)\n', (1926, 1959), True, 'import numpy as np\n'), ((1984, 2027), 'numpy.tile', 'np.tile', (['fft_recblk[None, :]', '(nsurr, 1, 1)'], {}), '(fft_recblk[None, :], (nsurr, 1, 1))\n', (1991, 2027), True, 'import numpy as np\n'), ((2972, 2985), 'numpy.mean', 'np.mean', (['data'], {}), '(data)\n', (2979, 2985), True, 'import numpy as np\n'), ((3848, 3865), 'numpy.std', 'np.std', (['x'], {'axis': '(0)'}), '(x, axis=0)\n', (3854, 3865), True, 'import numpy as np\n'), ((5207, 5218), 'numpy.ndim', 'np.ndim', (['x1'], {}), '(x1)\n', (5214, 5218), True, 'import numpy as np\n'), ((5238, 5271), 'numpy.tile', 'np.tile', (['x1[None, :]', '(nbs, 1, 1)'], {}), '(x1[None, :], (nbs, 1, 1))\n', (5245, 5271), True, 'import numpy as np\n'), ((2033, 2048), 'numpy.ndim', 'np.ndim', (['recblk'], {}), '(recblk)\n', (2040, 2048), True, 'import numpy as np\n'), ((2081, 2145), 'numpy.random.rand', 'np.random.rand', (['len_ser', 'recblk.shape[1]', 'recblk.shape[2]', 'nsurr'], {}), '(len_ser, recblk.shape[1], recblk.shape[2], nsurr)\n', (2095, 2145), True, 'import numpy as np\n'), ((2169, 2215), 'numpy.tile', 'np.tile', (['fft_recblk[None, :]', '(nsurr, 1, 1, 1)'], {}), '(fft_recblk[None, :], (nsurr, 1, 1, 1))\n', (2176, 2215), True, 'import numpy as np\n'), ((5277, 5288), 'numpy.ndim', 'np.ndim', (['x1'], {}), '(x1)\n', (5284, 5288), True, 'import numpy as np\n'), ((5308, 5338), 'numpy.tile', 'np.tile', (['x1[None, :]', '(nbs, 1)'], {}), '(x1[None, :], (nbs, 1))\n', (5315, 5338), True, 'import numpy as np\n')] |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name="home"),
url(r'success/(?P<id>\d+)', views.success, name="success"),
url(r'register$', views.register, name="register"),
url(r'login$', views.login, name="login"),
url(r'users$', views.users, name="users"),
url(r'users/(?P<id>\d+)/delete$', views.delete_user, name="delete_user"),
] | [
"django.conf.urls.url"
] | [((74, 109), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.index'], {'name': '"""home"""'}), "('^$', views.index, name='home')\n", (77, 109), False, 'from django.conf.urls import url\n'), ((116, 174), 'django.conf.urls.url', 'url', (['"""success/(?P<id>\\\\d+)"""', 'views.success'], {'name': '"""success"""'}), "('success/(?P<id>\\\\d+)', views.success, name='success')\n", (119, 174), False, 'from django.conf.urls import url\n'), ((180, 229), 'django.conf.urls.url', 'url', (['"""register$"""', 'views.register'], {'name': '"""register"""'}), "('register$', views.register, name='register')\n", (183, 229), False, 'from django.conf.urls import url\n'), ((236, 276), 'django.conf.urls.url', 'url', (['"""login$"""', 'views.login'], {'name': '"""login"""'}), "('login$', views.login, name='login')\n", (239, 276), False, 'from django.conf.urls import url\n'), ((283, 323), 'django.conf.urls.url', 'url', (['"""users$"""', 'views.users'], {'name': '"""users"""'}), "('users$', views.users, name='users')\n", (286, 323), False, 'from django.conf.urls import url\n'), ((330, 402), 'django.conf.urls.url', 'url', (['"""users/(?P<id>\\\\d+)/delete$"""', 'views.delete_user'], {'name': '"""delete_user"""'}), "('users/(?P<id>\\\\d+)/delete$', views.delete_user, name='delete_user')\n", (333, 402), False, 'from django.conf.urls import url\n')] |
import argparse
import os
import mlflow
import numpy as np
import pandas as pd
import torch
import torch.optim as optim
from matplotlib import pyplot as plt
from matplotlib.backends.backend_agg import FigureCanvasAgg
from mlflow import log_metric, log_param, get_artifact_uri
from skimage.io import imsave
from sklearn.model_selection import ParameterGrid
from torch.nn.utils import clip_grad_norm_
from torch.utils.data import DataLoader
from tqdm import tqdm
from dataset import TomoDetectionDataset as Dataset
from dense_yolo import DenseYOLO
from loss import objectness_module, LocalizationLoss
from sampler import TomoBatchSampler
from transform import transforms
def main(args):
torch.backends.cudnn.benchmark = True
device = torch.device("cpu" if not torch.cuda.is_available() else args.device)
loader_train, loader_valid = data_loaders(args)
loaders = {"train": loader_train, "valid": loader_valid}
hparams_dict = {
"block_config": [(1, 3, 2, 6, 4), (2, 6, 4, 12, 8)],
"num_init_features": [8, 16],
"growth_rate": [8, 16],
"bn_size": [2, 4],
}
hparams = list(ParameterGrid(hparams_dict)) # 16 configs
loss_params_dict = [
{"loss": ["CE", "weighted-CE"], "alpha": [0.25, 0.5, 1.0]}, # 6 configs
{"loss": ["focal"], "alpha": [0.25, 0.5, 1.0], "gamma": [0.5, 1.0, 2.0]}, # 9 configs
{
"loss": ["reduced-focal"],
"alpha": [0.25, 0.5, 1.0],
"gamma": [0.5, 1.0, 2.0],
"reduce_th": [0.5],
} # 9 configs
] # 24 configs
loss_params = list(ParameterGrid(loss_params_dict))
loss_params = loss_params * 2 # 48 configs
try:
mlflow.set_tracking_uri(args.mlruns_path)
experiment_id = (
args.experiment_id
if args.experiment_id
else mlflow.create_experiment(name=args.experiment_name)
)
except Exception as _:
print("experiment-id must be unique")
return
for i, loss_param in tqdm(enumerate(loss_params)):
for j, hparam in enumerate(hparams):
with mlflow.start_run(experiment_id=experiment_id):
mlflow_log_params(loss_param, hparam)
try:
yolo = DenseYOLO(img_channels=1, out_channels=Dataset.out_channels, **hparam)
yolo.to(device)
objectness_loss = objectness_module(
name=loss_param["loss"], args=argparse.Namespace(**loss_param)
)
localization_loss = LocalizationLoss(weight=args.loc_weight)
optimizer = optim.Adam(yolo.parameters(), lr=args.lr)
early_stop = args.patience
run_tpr2 = 0.0
run_tpr1 = 0.0
run_auc = 0.0
for _ in range(args.epochs):
if early_stop == 0:
break
for phase in ["train", "valid"]:
if phase == "train":
yolo.train()
early_stop -= 1
else:
yolo.eval()
df_validation_pred = pd.DataFrame()
valid_target_nb = 0
for data in loaders[phase]:
x, y_true = data
x, y_true = x.to(device), y_true.to(device)
optimizer.zero_grad()
with torch.set_grad_enabled(phase == "train"):
y_pred = yolo(x)
obj = objectness_loss(y_pred, y_true)
loc = localization_loss(y_pred, y_true)
total_loss = obj + loc
if phase == "train":
total_loss.backward()
clip_grad_norm_(yolo.parameters(), 0.5)
optimizer.step()
else:
y_true_np = y_true.detach().cpu().numpy()
valid_target_nb += np.sum(y_true_np[:, 0])
df_batch_pred = evaluate_batch(y_pred, y_true)
df_validation_pred = df_validation_pred.append(
df_batch_pred, ignore_index=True, sort=False
)
if phase == "valid":
tpr, fps = froc(df_validation_pred, valid_target_nb)
epoch_tpr2 = np.interp(2.0, fps, tpr)
epoch_tpr1 = np.interp(1.0, fps, tpr)
if epoch_tpr2 > run_tpr2:
early_stop = args.patience
run_tpr2 = epoch_tpr2
run_tpr1 = epoch_tpr1
run_auc = np.trapz(tpr, fps)
torch.save(
yolo.state_dict(),
os.path.join(get_artifact_uri(), "yolo.pt"),
)
imsave(
os.path.join(get_artifact_uri(), "froc.png"),
plot_froc(fps, tpr),
)
log_metric("TPR2", run_tpr2)
log_metric("TPR1", run_tpr1)
log_metric("AUC", run_auc)
except Exception as e:
print(
"{:0>2d}/{} | {} {}".format(
j + 1, len(hparams), hparams[j], type(e).__name__
)
)
def mlflow_log_params(loss_param, hparam):
for key in loss_param:
log_param(key, loss_param[key])
log_param("loss_fun", str(loss_param))
for key in hparam:
log_param(key, hparam[key])
log_param("network", str(hparam))
def data_loaders(args):
dataset_train, dataset_valid = datasets(args)
sampler_train = TomoBatchSampler(
batch_size=args.batch_size, data_frame=dataset_train.data_frame
)
def worker_init(worker_id):
np.random.seed(42 + worker_id)
loader_train = DataLoader(
dataset_train,
batch_sampler=sampler_train,
num_workers=args.workers,
worker_init_fn=worker_init,
)
loader_valid = DataLoader(
dataset_valid,
batch_size=args.batch_size,
drop_last=False,
num_workers=args.workers,
worker_init_fn=worker_init,
)
return loader_train, loader_valid
def datasets(args):
train = Dataset(
csv_views=args.data_views,
csv_bboxes=args.data_boxes,
root_dir=args.images,
subset="train",
random=True,
only_biopsied=args.only_biopsied,
transform=transforms(train=True),
skip_preprocessing=True,
downscale=args.downscale,
max_slice_offset=args.slice_offset,
seed=args.seed,
)
valid = Dataset(
csv_views=args.data_views,
csv_bboxes=args.data_boxes,
root_dir=args.images,
subset="validation",
random=False,
transform=transforms(train=False),
skip_preprocessing=True,
downscale=args.downscale,
max_slice_offset=args.slice_offset,
seed=args.seed,
)
return train, valid
def froc(df, targets_nb):
total_slices = len(df.drop_duplicates(subset=["PID"]))
total_tps = targets_nb
tpr = [0.0]
fps = [0.0]
max_fps = 4.0
thresholds = sorted(df[df["TP"] == 1]["Score"], reverse=True)
for th in thresholds:
df_th = df[df["Score"] >= th]
df_th_unique_tp = df_th.drop_duplicates(subset=["PID", "TP", "GTID"])
num_tps_th = float(sum(df_th_unique_tp["TP"]))
tpr_th = num_tps_th / total_tps
num_fps_th = float(len(df_th[df_th["TP"] == 0]))
fps_th = num_fps_th / total_slices
if fps_th > max_fps:
tpr.append(tpr[-1])
fps.append(max_fps)
break
tpr.append(tpr_th)
fps.append(fps_th)
if np.max(fps) < max_fps:
tpr.append(tpr[-1])
fps.append(max_fps)
return tpr, fps
def plot_froc(fps, tpr, color="darkorange", linestyle="-"):
fig = plt.figure(figsize=(10, 8))
canvas = FigureCanvasAgg(fig)
plt.plot(fps, tpr, color=color, linestyle=linestyle, lw=2)
plt.xlim([0.0, 4.0])
plt.xticks(np.arange(0.0, 4.5, 0.5))
plt.ylim([0.0, 1.0])
plt.yticks(np.arange(0.0, 1.1, 0.1))
plt.tick_params(axis="both", which="major", labelsize=16)
plt.xlabel("Mean FPs per slice", fontsize=24)
plt.ylabel("Sensitivity", fontsize=24)
plt.grid(color="silver", alpha=0.3, linestyle="--", linewidth=1)
plt.tight_layout()
canvas.draw()
plt.close()
s, (width, height) = canvas.print_to_buffer()
return np.fromstring(s, np.uint8).reshape((height, width, 4))
def is_tp(pred_box, true_box, min_dist=50):
# box: center point + dimensions
pred_y, pred_x = pred_box["Y"], pred_box["X"]
gt_y, gt_x = true_box["Y"], true_box["X"]
# distance between GT and predicted center points
dist = np.sqrt((pred_x - gt_x) ** 2 + (pred_y - gt_y) ** 2)
# TP radius based on GT box size
dist_threshold = np.sqrt(true_box["Width"] ** 2 + true_box["Height"] ** 2) / 2.
dist_threshold = max(dist_threshold, min_dist)
# TP if predicted center within GT radius
return dist <= dist_threshold
def evaluate_batch(y_pred, y_true):
y_pred = y_pred.detach().cpu().numpy()
y_true = y_true.detach().cpu().numpy()
df_eval = pd.DataFrame()
for i in range(y_pred.shape[0]):
df_gt_boxes = pred2boxes(y_true[i], threshold=1.0)
df_gt_boxes["GTID"] = np.random.randint(10e10) * (1 + df_gt_boxes["X"])
df_pred_boxes = pred2boxes(y_pred[i])
df_pred_boxes["PID"] = np.random.randint(10e12)
df_pred_boxes["TP"] = 0
df_pred_boxes["GTID"] = np.random.choice(
list(set(df_gt_boxes["GTID"])), df_pred_boxes.shape[0]
)
for index, pred_box in df_pred_boxes.iterrows():
tp_list = [
(j, is_tp(pred_box, x_box)) for j, x_box in df_gt_boxes.iterrows()
]
if any([tp[1] for tp in tp_list]):
tp_index = [tp[0] for tp in tp_list if tp[1]][0]
df_pred_boxes.at[index, "TP"] = 1
df_pred_boxes.at[index, "GTID"] = df_gt_boxes.at[tp_index, "GTID"]
df_eval = df_eval.append(df_pred_boxes, ignore_index=True, sort=False)
return df_eval
def pred2boxes(pred, threshold=None):
# box: center point + dimensions
anchor = Dataset.anchor
cell_size = Dataset.cell_size
np.nan_to_num(pred, copy=False)
obj_th = pred[0]
if threshold is None:
threshold = min(0.001, np.max(obj_th) * 0.5)
obj_th[obj_th < threshold] = 0
yy, xx = np.nonzero(obj_th)
scores = []
xs = []
ys = []
ws = []
hs = []
for i in range(len(yy)):
scores.append(pred[0, yy[i], xx[i]])
h = int(anchor[0] * pred[3, yy[i], xx[i]] ** 2)
hs.append(h)
w = int(anchor[1] * pred[4, yy[i], xx[i]] ** 2)
ws.append(w)
y_offset = pred[1, yy[i], xx[i]]
y_mid = yy[i] * cell_size + (cell_size / 2) + (cell_size / 2) * y_offset
ys.append(int(y_mid))
x_offset = pred[2, yy[i], xx[i]]
x_mid = xx[i] * cell_size + (cell_size / 2) + (cell_size / 2) * x_offset
xs.append(int(x_mid))
df_dict = {"Score": scores, "X": xs, "Y": ys, "Width": ws, "Height": hs}
df_boxes = pd.DataFrame(df_dict)
df_boxes.sort_values(by="Score", ascending=False, inplace=True)
return df_boxes
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Hyper-parameters grid search for YOLO model for cancer detection in Duke DBT volumes"
)
parser.add_argument(
"--batch-size",
type=int,
default=16,
help="input batch size for training (default: 16)",
)
parser.add_argument(
"--epochs",
type=int,
default=100,
help="number of epochs to train (default: 100)",
)
parser.add_argument(
"--patience",
type=int,
default=25,
help="early stopping: number of epochs to wait for improvement (default: 25)",
)
parser.add_argument(
"--lr", type=float, default=0.001, help="initial learning rate (default: 0.001)"
)
parser.add_argument(
"--loc-weight",
type=float,
default=0.5,
help="weight of localization loss (default: 0.5)",
)
parser.add_argument(
"--device",
type=str,
default="cuda:1",
help="device for training (default: cuda:1)",
)
parser.add_argument(
"--workers",
type=int,
default=4,
help="number of workers for data loading (default: 4)",
)
parser.add_argument(
"--data-views",
type=str,
default="/data/data_train_v2.csv",
help="csv file listing training views together with category label",
)
parser.add_argument(
"--data-boxes",
type=str,
default="/data/bboxes_v2.csv",
help="csv file defining ground truth bounding boxes",
)
parser.add_argument(
"--images",
type=str,
default="/data/TomoImagesPP/",
help="root folder with preprocessed images",
)
parser.add_argument(
"--seed",
type=int,
default=42,
help="random seed for validation split (default: 42)",
)
parser.add_argument(
"--downscale",
type=int,
default=2,
help="input image downscale factor (default 2)",
)
parser.add_argument(
"--experiment-name",
type=str,
default="0",
help="experiment name for new mlflow (default: 0)",
)
parser.add_argument(
"--experiment-id",
type=str,
default=None,
help="experiment id to restore in-progress mlflow experiment (default: None)",
)
parser.add_argument(
"--mlruns-path",
type=str,
default="/data/mlruns",
help="path for mlflow results (default: /data/mlruns)",
)
parser.add_argument(
"--slice-offset",
type=int,
default=0,
help="maximum offset from central slice to consider as GT bounding box (default: 0)",
)
parser.add_argument(
"--only-biopsied",
default=True, # set to true by default for convenience
action="store_true",
help="flag to use only biopsied cases",
)
args = parser.parse_args()
main(args)
| [
"mlflow.create_experiment",
"matplotlib.pyplot.grid",
"numpy.sqrt",
"matplotlib.pyplot.ylabel",
"mlflow.log_param",
"torch.cuda.is_available",
"argparse.Namespace",
"numpy.arange",
"sklearn.model_selection.ParameterGrid",
"transform.transforms",
"torch.set_grad_enabled",
"argparse.ArgumentPars... | [((6499, 6585), 'sampler.TomoBatchSampler', 'TomoBatchSampler', ([], {'batch_size': 'args.batch_size', 'data_frame': 'dataset_train.data_frame'}), '(batch_size=args.batch_size, data_frame=dataset_train.\n data_frame)\n', (6515, 6585), False, 'from sampler import TomoBatchSampler\n'), ((6687, 6800), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset_train'], {'batch_sampler': 'sampler_train', 'num_workers': 'args.workers', 'worker_init_fn': 'worker_init'}), '(dataset_train, batch_sampler=sampler_train, num_workers=args.\n workers, worker_init_fn=worker_init)\n', (6697, 6800), False, 'from torch.utils.data import DataLoader\n'), ((6854, 6982), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset_valid'], {'batch_size': 'args.batch_size', 'drop_last': '(False)', 'num_workers': 'args.workers', 'worker_init_fn': 'worker_init'}), '(dataset_valid, batch_size=args.batch_size, drop_last=False,\n num_workers=args.workers, worker_init_fn=worker_init)\n', (6864, 6982), False, 'from torch.utils.data import DataLoader\n'), ((8770, 8797), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 8)'}), '(figsize=(10, 8))\n', (8780, 8797), True, 'from matplotlib import pyplot as plt\n'), ((8811, 8831), 'matplotlib.backends.backend_agg.FigureCanvasAgg', 'FigureCanvasAgg', (['fig'], {}), '(fig)\n', (8826, 8831), False, 'from matplotlib.backends.backend_agg import FigureCanvasAgg\n'), ((8836, 8894), 'matplotlib.pyplot.plot', 'plt.plot', (['fps', 'tpr'], {'color': 'color', 'linestyle': 'linestyle', 'lw': '(2)'}), '(fps, tpr, color=color, linestyle=linestyle, lw=2)\n', (8844, 8894), True, 'from matplotlib import pyplot as plt\n'), ((8899, 8919), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0.0, 4.0]'], {}), '([0.0, 4.0])\n', (8907, 8919), True, 'from matplotlib import pyplot as plt\n'), ((8965, 8985), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0.0, 1.0]'], {}), '([0.0, 1.0])\n', (8973, 8985), True, 'from matplotlib import pyplot as plt\n'), ((9031, 9088), 'matplotlib.pyplot.tick_params', 'plt.tick_params', ([], {'axis': '"""both"""', 'which': '"""major"""', 'labelsize': '(16)'}), "(axis='both', which='major', labelsize=16)\n", (9046, 9088), True, 'from matplotlib import pyplot as plt\n'), ((9093, 9138), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Mean FPs per slice"""'], {'fontsize': '(24)'}), "('Mean FPs per slice', fontsize=24)\n", (9103, 9138), True, 'from matplotlib import pyplot as plt\n'), ((9143, 9181), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Sensitivity"""'], {'fontsize': '(24)'}), "('Sensitivity', fontsize=24)\n", (9153, 9181), True, 'from matplotlib import pyplot as plt\n'), ((9186, 9250), 'matplotlib.pyplot.grid', 'plt.grid', ([], {'color': '"""silver"""', 'alpha': '(0.3)', 'linestyle': '"""--"""', 'linewidth': '(1)'}), "(color='silver', alpha=0.3, linestyle='--', linewidth=1)\n", (9194, 9250), True, 'from matplotlib import pyplot as plt\n'), ((9255, 9273), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (9271, 9273), True, 'from matplotlib import pyplot as plt\n'), ((9296, 9307), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (9305, 9307), True, 'from matplotlib import pyplot as plt\n'), ((9668, 9720), 'numpy.sqrt', 'np.sqrt', (['((pred_x - gt_x) ** 2 + (pred_y - gt_y) ** 2)'], {}), '((pred_x - gt_x) ** 2 + (pred_y - gt_y) ** 2)\n', (9675, 9720), True, 'import numpy as np\n'), ((10111, 10125), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (10123, 10125), True, 'import pandas as pd\n'), ((11227, 11258), 'numpy.nan_to_num', 'np.nan_to_num', (['pred'], {'copy': '(False)'}), '(pred, copy=False)\n', (11240, 11258), True, 'import numpy as np\n'), ((11407, 11425), 'numpy.nonzero', 'np.nonzero', (['obj_th'], {}), '(obj_th)\n', (11417, 11425), True, 'import numpy as np\n'), ((12115, 12136), 'pandas.DataFrame', 'pd.DataFrame', (['df_dict'], {}), '(df_dict)\n', (12127, 12136), True, 'import pandas as pd\n'), ((12267, 12400), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Hyper-parameters grid search for YOLO model for cancer detection in Duke DBT volumes"""'}), "(description=\n 'Hyper-parameters grid search for YOLO model for cancer detection in Duke DBT volumes'\n )\n", (12290, 12400), False, 'import argparse\n'), ((1132, 1159), 'sklearn.model_selection.ParameterGrid', 'ParameterGrid', (['hparams_dict'], {}), '(hparams_dict)\n', (1145, 1159), False, 'from sklearn.model_selection import ParameterGrid\n'), ((1601, 1632), 'sklearn.model_selection.ParameterGrid', 'ParameterGrid', (['loss_params_dict'], {}), '(loss_params_dict)\n', (1614, 1632), False, 'from sklearn.model_selection import ParameterGrid\n'), ((1701, 1742), 'mlflow.set_tracking_uri', 'mlflow.set_tracking_uri', (['args.mlruns_path'], {}), '(args.mlruns_path)\n', (1724, 1742), False, 'import mlflow\n'), ((6231, 6262), 'mlflow.log_param', 'log_param', (['key', 'loss_param[key]'], {}), '(key, loss_param[key])\n', (6240, 6262), False, 'from mlflow import log_metric, log_param, get_artifact_uri\n'), ((6337, 6364), 'mlflow.log_param', 'log_param', (['key', 'hparam[key]'], {}), '(key, hparam[key])\n', (6346, 6364), False, 'from mlflow import log_metric, log_param, get_artifact_uri\n'), ((6636, 6666), 'numpy.random.seed', 'np.random.seed', (['(42 + worker_id)'], {}), '(42 + worker_id)\n', (6650, 6666), True, 'import numpy as np\n'), ((8599, 8610), 'numpy.max', 'np.max', (['fps'], {}), '(fps)\n', (8605, 8610), True, 'import numpy as np\n'), ((8935, 8959), 'numpy.arange', 'np.arange', (['(0.0)', '(4.5)', '(0.5)'], {}), '(0.0, 4.5, 0.5)\n', (8944, 8959), True, 'import numpy as np\n'), ((9001, 9025), 'numpy.arange', 'np.arange', (['(0.0)', '(1.1)', '(0.1)'], {}), '(0.0, 1.1, 0.1)\n', (9010, 9025), True, 'import numpy as np\n'), ((9779, 9836), 'numpy.sqrt', 'np.sqrt', (["(true_box['Width'] ** 2 + true_box['Height'] ** 2)"], {}), "(true_box['Width'] ** 2 + true_box['Height'] ** 2)\n", (9786, 9836), True, 'import numpy as np\n'), ((10379, 10414), 'numpy.random.randint', 'np.random.randint', (['(10000000000000.0)'], {}), '(10000000000000.0)\n', (10396, 10414), True, 'import numpy as np\n'), ((1851, 1902), 'mlflow.create_experiment', 'mlflow.create_experiment', ([], {'name': 'args.experiment_name'}), '(name=args.experiment_name)\n', (1875, 1902), False, 'import mlflow\n'), ((7314, 7336), 'transform.transforms', 'transforms', ([], {'train': '(True)'}), '(train=True)\n', (7324, 7336), False, 'from transform import transforms\n'), ((7670, 7693), 'transform.transforms', 'transforms', ([], {'train': '(False)'}), '(train=False)\n', (7680, 7693), False, 'from transform import transforms\n'), ((9369, 9395), 'numpy.fromstring', 'np.fromstring', (['s', 'np.uint8'], {}), '(s, np.uint8)\n', (9382, 9395), True, 'import numpy as np\n'), ((10252, 10285), 'numpy.random.randint', 'np.random.randint', (['(100000000000.0)'], {}), '(100000000000.0)\n', (10269, 10285), True, 'import numpy as np\n'), ((769, 794), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (792, 794), False, 'import torch\n'), ((2121, 2166), 'mlflow.start_run', 'mlflow.start_run', ([], {'experiment_id': 'experiment_id'}), '(experiment_id=experiment_id)\n', (2137, 2166), False, 'import mlflow\n'), ((11337, 11351), 'numpy.max', 'np.max', (['obj_th'], {}), '(obj_th)\n', (11343, 11351), True, 'import numpy as np\n'), ((2271, 2341), 'dense_yolo.DenseYOLO', 'DenseYOLO', ([], {'img_channels': '(1)', 'out_channels': 'Dataset.out_channels'}), '(img_channels=1, out_channels=Dataset.out_channels, **hparam)\n', (2280, 2341), False, 'from dense_yolo import DenseYOLO\n'), ((2585, 2625), 'loss.LocalizationLoss', 'LocalizationLoss', ([], {'weight': 'args.loc_weight'}), '(weight=args.loc_weight)\n', (2601, 2625), False, 'from loss import objectness_module, LocalizationLoss\n'), ((5780, 5808), 'mlflow.log_metric', 'log_metric', (['"""TPR2"""', 'run_tpr2'], {}), "('TPR2', run_tpr2)\n", (5790, 5808), False, 'from mlflow import log_metric, log_param, get_artifact_uri\n'), ((5829, 5857), 'mlflow.log_metric', 'log_metric', (['"""TPR1"""', 'run_tpr1'], {}), "('TPR1', run_tpr1)\n", (5839, 5857), False, 'from mlflow import log_metric, log_param, get_artifact_uri\n'), ((5878, 5904), 'mlflow.log_metric', 'log_metric', (['"""AUC"""', 'run_auc'], {}), "('AUC', run_auc)\n", (5888, 5904), False, 'from mlflow import log_metric, log_param, get_artifact_uri\n'), ((2490, 2522), 'argparse.Namespace', 'argparse.Namespace', ([], {}), '(**loss_param)\n', (2508, 2522), False, 'import argparse\n'), ((3310, 3324), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (3322, 3324), True, 'import pandas as pd\n'), ((4903, 4927), 'numpy.interp', 'np.interp', (['(2.0)', 'fps', 'tpr'], {}), '(2.0, fps, tpr)\n', (4912, 4927), True, 'import numpy as np\n'), ((4973, 4997), 'numpy.interp', 'np.interp', (['(1.0)', 'fps', 'tpr'], {}), '(1.0, fps, tpr)\n', (4982, 4997), True, 'import numpy as np\n'), ((3648, 3688), 'torch.set_grad_enabled', 'torch.set_grad_enabled', (["(phase == 'train')"], {}), "(phase == 'train')\n", (3670, 3688), False, 'import torch\n'), ((5281, 5299), 'numpy.trapz', 'np.trapz', (['tpr', 'fps'], {}), '(tpr, fps)\n', (5289, 5299), True, 'import numpy as np\n'), ((4393, 4416), 'numpy.sum', 'np.sum', (['y_true_np[:, 0]'], {}), '(y_true_np[:, 0])\n', (4399, 4416), True, 'import numpy as np\n'), ((5460, 5478), 'mlflow.get_artifact_uri', 'get_artifact_uri', ([], {}), '()\n', (5476, 5478), False, 'from mlflow import log_metric, log_param, get_artifact_uri\n'), ((5627, 5645), 'mlflow.get_artifact_uri', 'get_artifact_uri', ([], {}), '()\n', (5643, 5645), False, 'from mlflow import log_metric, log_param, get_artifact_uri\n')] |
from server_commands.argument_helpers import TunableInstanceParam, get_tunable_instance
import services
import sims4.commands
ZONE_MODIFIER_CAP = 3
@sims4.commands.Command('zone_modifier.add_zone_modifier', command_type=sims4.commands.CommandType.DebugOnly)
def add_zone_modifier(zone_modifier:TunableInstanceParam(sims4.resources.Types.ZONE_MODIFIER), target_zone_id:int=None, _connection=None):
if target_zone_id is None:
target_zone_id = services.current_zone_id()
persistence_service = services.get_persistence_service()
zone_data = persistence_service.get_zone_proto_buff(services.current_zone_id())
if zone_data is None:
return
if len(zone_data.lot_traits) == ZONE_MODIFIER_CAP:
sims4.commands.output('There are already {} lot traits on the lot. Remove one first.'.format(ZONE_MODIFIER_CAP), _connection)
return
zone_modifier_id = zone_modifier.guid64
if zone_modifier_id in zone_data.lot_traits:
sims4.commands.output('{} is already a trait on the lot.'.format(zone_modifier), _connection)
return
zone_data.lot_traits.append(zone_modifier_id)
services.get_zone_modifier_service().check_for_and_apply_new_zone_modifiers(target_zone_id)
@sims4.commands.Command('zone_modifier.remove_zone_modifier', command_type=sims4.commands.CommandType.DebugOnly)
def remove_zone_modifier(zone_modifier:TunableInstanceParam(sims4.resources.Types.ZONE_MODIFIER), target_zone_id:int=None, _connection=None):
if target_zone_id is None:
target_zone_id = services.current_zone_id()
persistence_service = services.get_persistence_service()
zone_data = persistence_service.get_zone_proto_buff(services.current_zone_id())
if zone_data is None:
return
zone_modifier_id = zone_modifier.guid64
if zone_modifier_id not in zone_data.lot_traits:
sims4.commands.output('{} is not a trait on the lot.'.format(zone_modifier), _connection)
return
zone_data.lot_traits.remove(zone_modifier_id)
services.get_zone_modifier_service().check_for_and_apply_new_zone_modifiers(target_zone_id)
@sims4.commands.Command('zone_modifier.remove_all_zone_modifiers', command_type=sims4.commands.CommandType.DebugOnly)
def remove_all_zone_modifiers(target_zone_id:int=None, _connection=None):
if target_zone_id is None:
target_zone_id = services.current_zone_id()
persistence_service = services.get_persistence_service()
zone_data = persistence_service.get_zone_proto_buff(services.current_zone_id())
if zone_data is None:
return
traits_to_remove = list(zone_data.lot_traits)
for trait in traits_to_remove:
zone_data.lot_traits.remove(trait)
services.get_zone_modifier_service().check_for_and_apply_new_zone_modifiers(target_zone_id)
def run_zone_modifier_entry(zone_modifier, schedule_entry_index, _connection):
persistence_service = services.get_persistence_service()
zone_data = persistence_service.get_zone_proto_buff(services.current_zone_id())
if zone_data is None:
return
zone_modifier_id = zone_modifier.guid64
if zone_modifier_id not in zone_data.lot_traits:
sims4.commands.output('{} is not a trait on the lot.'.format(zone_modifier), _connection)
return
index = int(schedule_entry_index)
schedule_entries = zone_modifier.schedule.schedule_entries
if index < 0 or index >= len(schedule_entries):
sims4.commands.output('{} is an invalid schedule entry index.'.format(index), _connection)
return
zone_modifier_service = services.get_zone_modifier_service()
zone_modifier_service.run_zone_modifier_schedule_entry(schedule_entries[index])
@sims4.commands.Command('zone_modifier.run_schedule_entry', command_type=sims4.commands.CommandType.DebugOnly)
def run_schedule_entry(zone_modifier:TunableInstanceParam(sims4.resources.Types.ZONE_MODIFIER), schedule_entry_index, _connection=None):
run_zone_modifier_entry(zone_modifier, schedule_entry_index, _connection)
@sims4.commands.Command('volcanic_eruption', command_type=sims4.commands.CommandType.Live)
def volcanic_eruption(eruption_size, _connection=None):
size_to_schedule_entry_dict = {'large': '0', 'small': '1'}
zone_modifier = get_tunable_instance(sims4.resources.Types.ZONE_MODIFIER, 'zoneModifier_lotTrait_VolcanicActivity')
if zone_modifier is None:
return
schedule_entry_index = size_to_schedule_entry_dict.get(eruption_size.lower())
if schedule_entry_index is None:
return
run_zone_modifier_entry(zone_modifier, schedule_entry_index, _connection)
| [
"services.current_zone_id",
"server_commands.argument_helpers.get_tunable_instance",
"services.get_zone_modifier_service",
"services.get_persistence_service",
"server_commands.argument_helpers.TunableInstanceParam"
] | [((507, 541), 'services.get_persistence_service', 'services.get_persistence_service', ([], {}), '()\n', (539, 541), False, 'import services\n'), ((1593, 1627), 'services.get_persistence_service', 'services.get_persistence_service', ([], {}), '()\n', (1625, 1627), False, 'import services\n'), ((2411, 2445), 'services.get_persistence_service', 'services.get_persistence_service', ([], {}), '()\n', (2443, 2445), False, 'import services\n'), ((2901, 2935), 'services.get_persistence_service', 'services.get_persistence_service', ([], {}), '()\n', (2933, 2935), False, 'import services\n'), ((3566, 3602), 'services.get_zone_modifier_service', 'services.get_zone_modifier_service', ([], {}), '()\n', (3600, 3602), False, 'import services\n'), ((4245, 4348), 'server_commands.argument_helpers.get_tunable_instance', 'get_tunable_instance', (['sims4.resources.Types.ZONE_MODIFIER', '"""zoneModifier_lotTrait_VolcanicActivity"""'], {}), "(sims4.resources.Types.ZONE_MODIFIER,\n 'zoneModifier_lotTrait_VolcanicActivity')\n", (4265, 4348), False, 'from server_commands.argument_helpers import TunableInstanceParam, get_tunable_instance\n'), ((295, 352), 'server_commands.argument_helpers.TunableInstanceParam', 'TunableInstanceParam', (['sims4.resources.Types.ZONE_MODIFIER'], {}), '(sims4.resources.Types.ZONE_MODIFIER)\n', (315, 352), False, 'from server_commands.argument_helpers import TunableInstanceParam, get_tunable_instance\n'), ((454, 480), 'services.current_zone_id', 'services.current_zone_id', ([], {}), '()\n', (478, 480), False, 'import services\n'), ((598, 624), 'services.current_zone_id', 'services.current_zone_id', ([], {}), '()\n', (622, 624), False, 'import services\n'), ((1381, 1438), 'server_commands.argument_helpers.TunableInstanceParam', 'TunableInstanceParam', (['sims4.resources.Types.ZONE_MODIFIER'], {}), '(sims4.resources.Types.ZONE_MODIFIER)\n', (1401, 1438), False, 'from server_commands.argument_helpers import TunableInstanceParam, get_tunable_instance\n'), ((1540, 1566), 'services.current_zone_id', 'services.current_zone_id', ([], {}), '()\n', (1564, 1566), False, 'import services\n'), ((1684, 1710), 'services.current_zone_id', 'services.current_zone_id', ([], {}), '()\n', (1708, 1710), False, 'import services\n'), ((2358, 2384), 'services.current_zone_id', 'services.current_zone_id', ([], {}), '()\n', (2382, 2384), False, 'import services\n'), ((2502, 2528), 'services.current_zone_id', 'services.current_zone_id', ([], {}), '()\n', (2526, 2528), False, 'import services\n'), ((2992, 3018), 'services.current_zone_id', 'services.current_zone_id', ([], {}), '()\n', (3016, 3018), False, 'import services\n'), ((3836, 3893), 'server_commands.argument_helpers.TunableInstanceParam', 'TunableInstanceParam', (['sims4.resources.Types.ZONE_MODIFIER'], {}), '(sims4.resources.Types.ZONE_MODIFIER)\n', (3856, 3893), False, 'from server_commands.argument_helpers import TunableInstanceParam, get_tunable_instance\n'), ((1136, 1172), 'services.get_zone_modifier_service', 'services.get_zone_modifier_service', ([], {}), '()\n', (1170, 1172), False, 'import services\n'), ((2017, 2053), 'services.get_zone_modifier_service', 'services.get_zone_modifier_service', ([], {}), '()\n', (2051, 2053), False, 'import services\n'), ((2703, 2739), 'services.get_zone_modifier_service', 'services.get_zone_modifier_service', ([], {}), '()\n', (2737, 2739), False, 'import services\n')] |
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
LSBLK_OUTPUT = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
/dev/mapper/fedora_dhcp129--186-swap eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d
/dev/mapper/fedora_dhcp129--186-root d34cf5e3-3449-4a6c-8179-a1feb2bca6ce
/dev/mapper/fedora_dhcp129--186-home 2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d
/dev/sr0
/dev/loop0 0f031512-ab15-497d-9abd-3a512b4a9390
/dev/loop1 7c1b0f30-cf34-459f-9a70-2612f82b870a
/dev/loop9 0f031512-ab15-497d-9abd-3a512b4a9390
/dev/loop9 7c1b4444-cf34-459f-9a70-2612f82b870a
/dev/mapper/docker-253:1-1050967-pool
/dev/loop2
/dev/mapper/docker-253:1-1050967-pool
"""
LSBLK_OUTPUT_2 = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
/dev/mapper/fedora_dhcp129--186-swap eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d
/dev/mapper/fedora_dhcp129--186-root d34cf5e3-3449-4a6c-8179-a1feb2bca6ce
/dev/mapper/fedora_dhcp129--186-home 2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d
/dev/mapper/an-example-mapper with a space in the name 84639acb-013f-4d2f-9392-526a572b4373
/dev/sr0
/dev/loop0 0f031512-ab15-497d-9abd-3a512b4a9390
"""
LSBLK_UUIDS = {'/dev/sda1': '66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK'}
UDEVADM_UUID = 'N/A'
UDEVADM_OUTPUT = """
UDEV_LOG=3
DEVPATH=/devices/pci0000:00/0000:00:07.0/virtio2/block/vda/vda1
MAJOR=252
MINOR=1
DEVNAME=/dev/vda1
DEVTYPE=partition
SUBSYSTEM=block
MPATH_SBIN_PATH=/sbin
ID_PATH=pci-0000:00:07.0-virtio-pci-virtio2
ID_PART_TABLE_TYPE=dos
ID_FS_UUID=57b1a3e7-9019-4747-9809-7ec52bba9179
ID_FS_UUID_ENC=57b1a3e7-9019-4747-9809-7ec52bba9179
ID_FS_VERSION=1.0
ID_FS_TYPE=ext4
ID_FS_USAGE=filesystem
LVM_SBIN_PATH=/sbin
DEVLINKS=/dev/block/252:1 /dev/disk/by-path/pci-0000:00:07.0-virtio-pci-virtio2-part1 /dev/disk/by-uuid/57b1a3e7-9019-4747-9809-7ec52bba9179
"""
MTAB = """
sysfs /sys sysfs rw,seclabel,nosuid,nodev,noexec,relatime 0 0
proc /proc proc rw,nosuid,nodev,noexec,relatime 0 0
devtmpfs /dev devtmpfs rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755 0 0
securityfs /sys/kernel/security securityfs rw,nosuid,nodev,noexec,relatime 0 0
tmpfs /dev/shm tmpfs rw,seclabel,nosuid,nodev 0 0
devpts /dev/pts devpts rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000 0 0
tmpfs /run tmpfs rw,seclabel,nosuid,nodev,mode=755 0 0
tmpfs /sys/fs/cgroup tmpfs ro,seclabel,nosuid,nodev,noexec,mode=755 0 0
cgroup /sys/fs/cgroup/systemd cgroup rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd 0 0
pstore /sys/fs/pstore pstore rw,seclabel,nosuid,nodev,noexec,relatime 0 0
cgroup /sys/fs/cgroup/devices cgroup rw,nosuid,nodev,noexec,relatime,devices 0 0
cgroup /sys/fs/cgroup/freezer cgroup rw,nosuid,nodev,noexec,relatime,freezer 0 0
cgroup /sys/fs/cgroup/memory cgroup rw,nosuid,nodev,noexec,relatime,memory 0 0
cgroup /sys/fs/cgroup/pids cgroup rw,nosuid,nodev,noexec,relatime,pids 0 0
cgroup /sys/fs/cgroup/blkio cgroup rw,nosuid,nodev,noexec,relatime,blkio 0 0
cgroup /sys/fs/cgroup/cpuset cgroup rw,nosuid,nodev,noexec,relatime,cpuset 0 0
cgroup /sys/fs/cgroup/cpu,cpuacct cgroup rw,nosuid,nodev,noexec,relatime,cpu,cpuacct 0 0
cgroup /sys/fs/cgroup/hugetlb cgroup rw,nosuid,nodev,noexec,relatime,hugetlb 0 0
cgroup /sys/fs/cgroup/perf_event cgroup rw,nosuid,nodev,noexec,relatime,perf_event 0 0
cgroup /sys/fs/cgroup/net_cls,net_prio cgroup rw,nosuid,nodev,noexec,relatime,net_cls,net_prio 0 0
configfs /sys/kernel/config configfs rw,relatime 0 0
/dev/mapper/fedora_dhcp129--186-root / ext4 rw,seclabel,relatime,data=ordered 0 0
selinuxfs /sys/fs/selinux selinuxfs rw,relatime 0 0
systemd-1 /proc/sys/fs/binfmt_misc autofs rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct 0 0
debugfs /sys/kernel/debug debugfs rw,seclabel,relatime 0 0
hugetlbfs /dev/hugepages hugetlbfs rw,seclabel,relatime 0 0
tmpfs /tmp tmpfs rw,seclabel 0 0
mqueue /dev/mqueue mqueue rw,seclabel,relatime 0 0
/dev/loop0 /var/lib/machines btrfs rw,seclabel,relatime,space_cache,subvolid=5,subvol=/ 0 0
/dev/sda1 /boot ext4 rw,seclabel,relatime,data=ordered 0 0
/dev/mapper/fedora_dhcp129--186-home /home ext4 rw,seclabel,relatime,data=ordered 0 0
tmpfs /run/user/1000 tmpfs rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000 0 0
gvfsd-fuse /run/user/1000/gvfs fuse.gvfsd-fuse rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
fusectl /sys/fs/fuse/connections fusectl rw,relatime 0 0
grimlock.g.a: /home/adrian/sshfs-grimlock fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:test_path/path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote-2 fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:/mnt/data/foto's /home/adrian/fotos fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
"""
MTAB_ENTRIES = [
[
'sysfs',
'/sys',
'sysfs',
'rw,seclabel,nosuid,nodev,noexec,relatime',
'0',
'0'
],
['proc', '/proc', 'proc', 'rw,nosuid,nodev,noexec,relatime', '0', '0'],
[
'devtmpfs',
'/dev',
'devtmpfs',
'rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755',
'0',
'0'
],
[
'securityfs',
'/sys/kernel/security',
'securityfs',
'rw,nosuid,nodev,noexec,relatime',
'0',
'0'
],
['tmpfs', '/dev/shm', 'tmpfs', 'rw,seclabel,nosuid,nodev', '0', '0'],
[
'devpts',
'/dev/pts',
'devpts',
'rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000',
'0',
'0'
],
['tmpfs', '/run', 'tmpfs', 'rw,seclabel,nosuid,nodev,mode=755', '0', '0'],
[
'tmpfs',
'/sys/fs/cgroup',
'tmpfs',
'ro,seclabel,nosuid,nodev,noexec,mode=755',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/systemd',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd',
'0',
'0'
],
[
'pstore',
'/sys/fs/pstore',
'pstore',
'rw,seclabel,nosuid,nodev,noexec,relatime',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/devices',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,devices',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/freezer',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,freezer',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/memory',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,memory',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/pids',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,pids',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/blkio',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,blkio',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/cpuset',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,cpuset',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/cpu,cpuacct',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,cpu,cpuacct',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/hugetlb',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,hugetlb',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/perf_event',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,perf_event',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/net_cls,net_prio',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,net_cls,net_prio',
'0',
'0'
],
['configfs', '/sys/kernel/config', 'configfs', 'rw,relatime', '0', '0'],
[
'/dev/mapper/fedora_dhcp129--186-root',
'/',
'ext4',
'rw,seclabel,relatime,data=ordered',
'0',
'0'
],
['selinuxfs', '/sys/fs/selinux', 'selinuxfs', 'rw,relatime', '0', '0'],
[
'systemd-1',
'/proc/sys/fs/binfmt_misc',
'autofs',
'rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct',
'0',
'0'
],
['debugfs', '/sys/kernel/debug', 'debugfs', 'rw,seclabel,relatime', '0', '0'],
[
'hugetlbfs',
'/dev/hugepages',
'hugetlbfs',
'rw,seclabel,relatime',
'0',
'0'
],
['tmpfs', '/tmp', 'tmpfs', 'rw,seclabel', '0', '0'],
['mqueue', '/dev/mqueue', 'mqueue', 'rw,seclabel,relatime', '0', '0'],
[
'/dev/loop0',
'/var/lib/machines',
'btrfs',
'rw,seclabel,relatime,space_cache,subvolid=5,subvol=/',
'0',
'0'
],
['/dev/sda1', '/boot', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
# A 'none' fstype
['/dev/sdz3', '/not/a/real/device', 'none', 'rw,seclabel,relatime,data=ordered', '0', '0'],
# lets assume this is a bindmount
['/dev/sdz4', '/not/a/real/bind_mount', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
[
'/dev/mapper/fedora_dhcp129--186-home',
'/home',
'ext4',
'rw,seclabel,relatime,data=ordered',
'0',
'0'
],
[
'tmpfs',
'/run/user/1000',
'tmpfs',
'rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000',
'0',
'0'
],
[
'gvfsd-fuse',
'/run/user/1000/gvfs',
'fuse.gvfsd-fuse',
'rw,nosuid,nodev,relatime,user_id=1000,group_id=1000',
'0',
'0'
],
['fusectl', '/sys/fs/fuse/connections', 'fusectl', 'rw,relatime', '0', '0']]
STATVFS_INFO = {'/': {'block_available': 10192323,
'block_size': 4096,
'block_total': 12868728,
'block_used': 2676405,
'inode_available': 3061699,
'inode_total': 3276800,
'inode_used': 215101,
'size_available': 41747755008,
'size_total': 52710309888},
'/not/a/real/bind_mount': {},
'/home': {'block_available': 1001578731,
'block_size': 4096,
'block_total': 105871006,
'block_used': 5713133,
'inode_available': 26860880,
'inode_total': 26902528,
'inode_used': 41648,
'size_available': 410246647808,
'size_total': 433647640576},
'/var/lib/machines': {'block_available': 10192316,
'block_size': 4096,
'block_total': 12868728,
'block_used': 2676412,
'inode_available': 3061699,
'inode_total': 3276800,
'inode_used': 215101,
'size_available': 41747726336,
'size_total': 52710309888},
'/boot': {'block_available': 187585,
'block_size': 4096,
'block_total': 249830,
'block_used': 62245,
'inode_available': 65096,
'inode_total': 65536,
'inode_used': 440,
'size_available': 768348160,
'size_total': 1023303680}
}
# ['/dev/sdz4', '/not/a/real/bind_mount', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
BIND_MOUNTS = ['/not/a/real/bind_mount']
CPU_INFO_TEST_SCENARIOS = [
{
'architecture': 'armv61',
'nproc_out': 1,
'sched_getaffinity': set([0]),
'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv6-rev7-1cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': ['0', 'ARMv6-compatible processor rev 7 (v6l)'],
'processor_cores': 1,
'processor_count': 1,
'processor_nproc': 1,
'processor_threads_per_core': 1,
'processor_vcpus': 1},
},
{
'architecture': 'armv71',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv7-rev4-4cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': [
'0', 'ARMv7 Processor rev 4 (v7l)',
'1', 'ARMv7 Processor rev 4 (v7l)',
'2', 'ARMv7 Processor rev 4 (v7l)',
'3', 'ARMv7 Processor rev 4 (v7l)',
],
'processor_cores': 1,
'processor_count': 4,
'processor_nproc': 4,
'processor_threads_per_core': 1,
'processor_vcpus': 4},
},
{
'architecture': 'aarch64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/aarch64-4cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': [
'0', 'AArch64 Processor rev 4 (aarch64)',
'1', 'AArch64 Processor rev 4 (aarch64)',
'2', 'AArch64 Processor rev 4 (aarch64)',
'3', 'AArch64 Processor rev 4 (aarch64)',
],
'processor_cores': 1,
'processor_count': 4,
'processor_nproc': 4,
'processor_threads_per_core': 1,
'processor_vcpus': 4},
},
{
'architecture': 'x86_64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-4cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': [
'0', 'AuthenticAMD', 'Dual-Core AMD Opteron(tm) Processor 2216',
'1', 'AuthenticAMD', 'Dual-Core AMD Opteron(tm) Processor 2216',
'2', 'AuthenticAMD', 'Dual-Core AMD Opteron(tm) Processor 2216',
'3', 'AuthenticAMD', 'Dual-Core AMD Opteron(tm) Processor 2216',
],
'processor_cores': 2,
'processor_count': 2,
'processor_nproc': 4,
'processor_threads_per_core': 1,
'processor_vcpus': 4},
},
{
'architecture': 'x86_64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-8cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': [
'0', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'1', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'2', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'3', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'4', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'5', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'6', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'7', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
],
'processor_cores': 4,
'processor_count': 1,
'processor_nproc': 4,
'processor_threads_per_core': 2,
'processor_vcpus': 8},
},
{
'architecture': 'arm64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/arm64-4cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': ['0', '1', '2', '3'],
'processor_cores': 1,
'processor_count': 4,
'processor_nproc': 4,
'processor_threads_per_core': 1,
'processor_vcpus': 4},
},
{
'architecture': 'armv71',
'nproc_out': 8,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7]),
'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv7-rev3-8cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': [
'0', 'ARMv7 Processor rev 3 (v7l)',
'1', 'ARMv7 Processor rev 3 (v7l)',
'2', 'ARMv7 Processor rev 3 (v7l)',
'3', 'ARMv7 Processor rev 3 (v7l)',
'4', 'ARMv7 Processor rev 3 (v7l)',
'5', 'ARMv7 Processor rev 3 (v7l)',
'6', 'ARMv7 Processor rev 3 (v7l)',
'7', 'ARMv7 Processor rev 3 (v7l)',
],
'processor_cores': 1,
'processor_count': 8,
'processor_nproc': 8,
'processor_threads_per_core': 1,
'processor_vcpus': 8},
},
{
'architecture': 'x86_64',
'nproc_out': 2,
'sched_getaffinity': set([0, 1]),
'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-2cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': [
'0', 'GenuineIntel', 'Intel(R) Xeon(R) CPU E5-2680 v2 @ 2.80GHz',
'1', 'GenuineIntel', 'Intel(R) Xeon(R) CPU E5-2680 v2 @ 2.80GHz',
],
'processor_cores': 1,
'processor_count': 2,
'processor_nproc': 2,
'processor_threads_per_core': 1,
'processor_vcpus': 2},
},
{
'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/ppc64-power7-rhel7-8cpu-cpuinfo')).readlines(),
'architecture': 'ppc64',
'nproc_out': 8,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7]),
'expected_result': {
'processor': [
'0', 'POWER7 (architected), altivec supported',
'1', 'POWER7 (architected), altivec supported',
'2', 'POWER7 (architected), altivec supported',
'3', 'POWER7 (architected), altivec supported',
'4', 'POWER7 (architected), altivec supported',
'5', 'POWER7 (architected), altivec supported',
'6', 'POWER7 (architected), altivec supported',
'7', 'POWER7 (architected), altivec supported'
],
'processor_cores': 1,
'processor_count': 8,
'processor_nproc': 8,
'processor_threads_per_core': 1,
'processor_vcpus': 8
},
},
{
'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/ppc64le-power8-24cpu-cpuinfo')).readlines(),
'architecture': 'ppc64le',
'nproc_out': 24,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]),
'expected_result': {
'processor': [
'0', 'POWER8 (architected), altivec supported',
'1', 'POWER8 (architected), altivec supported',
'2', 'POWER8 (architected), altivec supported',
'3', 'POWER8 (architected), altivec supported',
'4', 'POWER8 (architected), altivec supported',
'5', 'POWER8 (architected), altivec supported',
'6', 'POWER8 (architected), altivec supported',
'7', 'POWER8 (architected), altivec supported',
'8', 'POWER8 (architected), altivec supported',
'9', 'POWER8 (architected), altivec supported',
'10', 'POWER8 (architected), altivec supported',
'11', 'POWER8 (architected), altivec supported',
'12', 'POWER8 (architected), altivec supported',
'13', 'POWER8 (architected), altivec supported',
'14', 'POWER8 (architected), altivec supported',
'15', 'POWER8 (architected), altivec supported',
'16', 'POWER8 (architected), altivec supported',
'17', 'POWER8 (architected), altivec supported',
'18', 'POWER8 (architected), altivec supported',
'19', 'POWER8 (architected), altivec supported',
'20', 'POWER8 (architected), altivec supported',
'21', 'POWER8 (architected), altivec supported',
'22', 'POWER8 (architected), altivec supported',
'23', 'POWER8 (architected), altivec supported',
],
'processor_cores': 1,
'processor_count': 24,
'processor_nproc': 24,
'processor_threads_per_core': 1,
'processor_vcpus': 24
},
},
{
'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/sparc-t5-debian-ldom-24vcpu')).readlines(),
'architecture': 'sparc64',
'nproc_out': 24,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]),
'expected_result': {
'processor': [
'UltraSparc T5 (Niagara5)',
],
'processor_cores': 1,
'processor_count': 24,
'processor_nproc': 24,
'processor_threads_per_core': 1,
'processor_vcpus': 24
},
},
]
| [
"os.path.dirname"
] | [((13231, 13256), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (13246, 13256), False, 'import os\n'), ((13760, 13785), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (13775, 13785), False, 'import os\n'), ((14466, 14491), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (14481, 14491), False, 'import os\n'), ((15192, 15217), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (15207, 15217), False, 'import os\n'), ((16009, 16034), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (16024, 16034), False, 'import os\n'), ((17157, 17182), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (17172, 17182), False, 'import os\n'), ((17666, 17691), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (17681, 17691), False, 'import os\n'), ((18573, 18598), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (18588, 18598), False, 'import os\n'), ((19124, 19149), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (19139, 19149), False, 'import os\n'), ((20160, 20185), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (20175, 20185), False, 'import os\n'), ((22300, 22325), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (22315, 22325), False, 'import os\n')] |
import numpy as np
def point_to_seg(x1, x2) -> np.ndarray:
'''
Method:
-------
Transform 2 points into a parametrized segment. Implicitely phi is in
[-pi/2; pi/2], it is the oriented angle the segment makes with the
horizontal line passing through its middle c.
'''
c = (x1[:2] + x2[:2])/2
# TODO: funny could define different topologies to explore.
if np.sum((x2-x1)**2) == 0:
print('x2 is equal to x1?')
r = np.sqrt(np.sum((x2-x1)**2))
# TODO: chack that the angle is well oriented
sign = np.sign(x2[0] - x1[0]) * np.sign(x2[1] - x1[1])
phi = sign * np.arccos(np.abs(x2[0]-x1[0])/r)
if phi < - np.pi/2 or phi > np.pi/2:
raise ValueError('the value of phi is not in [-pi/2, pi/2] but it {}'.format(phi))
res = np.hstack([c, r, phi])
return res
def seg_to_point(seg) -> (np.ndarray, np.ndarray):
'''transforms seg (c,r,phi) into a tuple of two 2-d points'''
phi = seg[3]
r = seg[2]
c = seg[:2]
dx = np.abs(np.cos(phi)*r/2)
dy = np.abs(np.sin(phi)*r/2)
x1 = c - np.array([dx, np.sign(phi)*dy])
x2 = c + np.array([dx, np.sign(phi)*dy])
return(x1, x2)
| [
"numpy.abs",
"numpy.hstack",
"numpy.sum",
"numpy.cos",
"numpy.sign",
"numpy.sin"
] | [((793, 815), 'numpy.hstack', 'np.hstack', (['[c, r, phi]'], {}), '([c, r, phi])\n', (802, 815), True, 'import numpy as np\n'), ((395, 417), 'numpy.sum', 'np.sum', (['((x2 - x1) ** 2)'], {}), '((x2 - x1) ** 2)\n', (401, 417), True, 'import numpy as np\n'), ((472, 494), 'numpy.sum', 'np.sum', (['((x2 - x1) ** 2)'], {}), '((x2 - x1) ** 2)\n', (478, 494), True, 'import numpy as np\n'), ((553, 575), 'numpy.sign', 'np.sign', (['(x2[0] - x1[0])'], {}), '(x2[0] - x1[0])\n', (560, 575), True, 'import numpy as np\n'), ((578, 600), 'numpy.sign', 'np.sign', (['(x2[1] - x1[1])'], {}), '(x2[1] - x1[1])\n', (585, 600), True, 'import numpy as np\n'), ((628, 649), 'numpy.abs', 'np.abs', (['(x2[0] - x1[0])'], {}), '(x2[0] - x1[0])\n', (634, 649), True, 'import numpy as np\n'), ((1014, 1025), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (1020, 1025), True, 'import numpy as np\n'), ((1047, 1058), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (1053, 1058), True, 'import numpy as np\n'), ((1091, 1103), 'numpy.sign', 'np.sign', (['phi'], {}), '(phi)\n', (1098, 1103), True, 'import numpy as np\n'), ((1136, 1148), 'numpy.sign', 'np.sign', (['phi'], {}), '(phi)\n', (1143, 1148), True, 'import numpy as np\n')] |
#!/usr/bin/python
from __future__ import absolute_import
from __future__ import print_function
import sys, os
import syslog
try:
import time
import daemon
import pwd
from . import pid
djangopath = os.path.join(os.path.dirname(sys.argv[0]), "../../")
sys.path.append(djangopath)
os.environ['DJANGO_SETTINGS_MODULE'] = "settings"
from main.models import Project, Task, ProjectUpload
from main.types import type_list
import traceback
from django.db import transaction
except Exception as e:
syslog.syslog(syslog.LOG_ERR, "Failed importing %s" % e)
raise e
@transaction.commit_on_success
def run_upload(upload):
upload_type = upload.project.type
type = type_list[upload_type]
project = type.cast(upload.project)
if project.handle_input:
project.handle_input(upload)
def check_uploads():
pu = ProjectUpload.objects.filter(complete=False)
if pu.count():
upload = pu[0]
print("Running %s" % upload.id)
error = None
try:
run_upload(upload)
except Exception as E:
tb = "".join(traceback.format_tb(sys.exc_info()[2]))
error = "Exception Type: %s, Text: %s\nTraceback:\n%s" % (
type(E),
str(E),
tb,
)
upload.complete = True
if error:
syslog.syslog(syslog.LOG_ERR, error)
upload.error = error
upload.full_clean()
upload.save()
print("Done %s" % upload.id)
def main_loop():
while True:
check_uploads()
time.sleep(10)
if __name__ == "__main__":
try:
assert os.getuid() == 0, "Must be run as root"
# is there something better here?
apache = pwd.getpwnam('apache')
# make a pid, run as apache
context = daemon.DaemonContext(
working_directory='/usr/lib/clickwork/',
umask=0o002,
pidfile=pid.PidFile('/var/run/taskfactory/pid'),
uid=apache[2],
gid=apache[3],
)
with context:
main_loop()
except Exception as e:
syslog.syslog(syslog.LOG_ERR, "Unhandled Exception %s - Died" % e)
raise e
| [
"main.models.ProjectUpload.objects.filter",
"pwd.getpwnam",
"os.getuid",
"time.sleep",
"syslog.syslog",
"os.path.dirname",
"sys.exc_info",
"sys.path.append"
] | [((278, 305), 'sys.path.append', 'sys.path.append', (['djangopath'], {}), '(djangopath)\n', (293, 305), False, 'import sys, os\n'), ((878, 922), 'main.models.ProjectUpload.objects.filter', 'ProjectUpload.objects.filter', ([], {'complete': '(False)'}), '(complete=False)\n', (906, 922), False, 'from main.models import Project, Task, ProjectUpload\n'), ((234, 262), 'os.path.dirname', 'os.path.dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (249, 262), False, 'import sys, os\n'), ((542, 598), 'syslog.syslog', 'syslog.syslog', (['syslog.LOG_ERR', "('Failed importing %s' % e)"], {}), "(syslog.LOG_ERR, 'Failed importing %s' % e)\n", (555, 598), False, 'import syslog\n'), ((1606, 1620), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (1616, 1620), False, 'import time\n'), ((1773, 1795), 'pwd.getpwnam', 'pwd.getpwnam', (['"""apache"""'], {}), "('apache')\n", (1785, 1795), False, 'import pwd\n'), ((1381, 1417), 'syslog.syslog', 'syslog.syslog', (['syslog.LOG_ERR', 'error'], {}), '(syslog.LOG_ERR, error)\n', (1394, 1417), False, 'import syslog\n'), ((1674, 1685), 'os.getuid', 'os.getuid', ([], {}), '()\n', (1683, 1685), False, 'import sys, os\n'), ((2157, 2223), 'syslog.syslog', 'syslog.syslog', (['syslog.LOG_ERR', "('Unhandled Exception %s - Died' % e)"], {}), "(syslog.LOG_ERR, 'Unhandled Exception %s - Died' % e)\n", (2170, 2223), False, 'import syslog\n'), ((1146, 1160), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1158, 1160), False, 'import sys, os\n')] |
# -*- coding: utf-8 -*-
# Copyright (C) 2010-2016 <NAME> All rights reserved
# Langstrasse 4, A--2244 Spannberg, Austria. <EMAIL>
# ****************************************************************************
# This module is part of the package GTW.__test__.
#
# This module is licensed under the terms of the BSD 3-Clause License
# <http://www.c-tanzer.at/license/bsd_3c.html>.
# ****************************************************************************
#
#++
# Name
# GTW.__test__.Document_Link
#
# Purpose
# Test how generic links to object without a relevant root work,
#
# Revision Dates
# 27-Oct-2010 (MG) Creation
# 29-Mar-2012 (CT) Add test for `link_map`
# 4-Jun-2012 (MG) Test for query with order_by added
# 6-Jun-2012 (CT) Add test for `Entity_created_by_Person.sort_key`
# 12-Jun-2012 (CT) Add `date` to get deterministic output
# 3-Aug-2012 (CT) Add tests for `Ref_Req_Map` and `Ref_Opt_Map`
# 3-Aug-2012 (CT) Use `Ref_Req_Map`, not `link_map`
# 12-Sep-2012 (RS) Add `Id_Entity`
# 6-Dec-2012 (CT) Add `PAP.Person_has_Account`
# 20-Jan-2013 (CT) Add `Auth.Certificate`
# 4-Mar-2013 (CT) Add `PAP.Legal_Entity`
# 28-Jul-2013 (CT) Replace `tn_pid` by `type_name` and `pid`
# 13-Jun-2014 (RS) Fix tests for `PAP.Group`
# 5-May-2016 (CT) Add `date_cleaner`, use `A_Date.now`
# 6-May-2016 (CT) Add test for `start__not_in_past`, `playback_p`
# ««revision-date»»···
#--
from _TFL.Regexp import Re_Replacer, re
date_cleaner = Re_Replacer \
( r"'start', '\d{4}-\d{2}-\d{2}'"
, r"'start', <date instance>"
)
test_code = r"""
>>> scope = Scaffold.scope (%(p1)s, %(n1)s) # doctest:+ELLIPSIS
Creating new scope MOMT__...
>>> date = (("start", A_Date.as_string (A_Date.now ())), )
>>> MOM = scope.MOM
>>> PAP = scope.PAP
>>> SWP = scope.SWP
>>> per = PAP.Person ("ln", "fn")
>>> pa1 = SWP.Page ("title_1", text = "text 1", date = date, raw = True)
>>> pa2 = SWP.Page ("title_2", text = "text 2", date = date, raw = True)
>>> pa3 = SWP.Page ("title_3", text = "text 3", date = date, raw = True)
>>> scope.commit ()
>>> with expect_except (MOM.Error.Invariants) :
... pa4 = SWP.Page ("title_4", text = "text 4", date = ("2012-06-10", ), raw = True)
Invariants: Condition `start__not_in_past` : Value must be in the future, not the past (start >= now)
date.start = 2012-06-10
>>> with scope.LET (playback_p = True) :
... pa4 = SWP.Page ("title_4", text = "text 4", date = ("2012-06-10", ), raw = True)
>>> pa4
SWP.Page ('title_4')
>>> scope.rollback ()
>>> _ = MOM.Document (per, "//foo.bar/baz")
>>> _ = MOM.Document (per, "//foo.bar/qux")
>>> _ = MOM.Document (pa1, "//foo.bar/quux.jpg")
>>> scope.commit ()
>>> q = MOM.Document.query ()
>>> qs = MOM.Document.query_s ()
>>> q.order_by (Q.pid).all ()
[MOM.Document (('ln', 'fn', '', ''), '//foo.bar/baz', ''), MOM.Document (('ln', 'fn', '', ''), '//foo.bar/qux', ''), MOM.Document (('title_1', ), '//foo.bar/quux.jpg', '')]
>>> q.order_by (TFL.Sorted_By ("pid")).all ()
[MOM.Document (('ln', 'fn', '', ''), '//foo.bar/baz', ''), MOM.Document (('ln', 'fn', '', ''), '//foo.bar/qux', ''), MOM.Document (('title_1', ), '//foo.bar/quux.jpg', '')]
>>> q.order_by (MOM.Document.sorted_by).all ()
[MOM.Document (('ln', 'fn', '', ''), '//foo.bar/baz', ''), MOM.Document (('ln', 'fn', '', ''), '//foo.bar/qux', ''), MOM.Document (('title_1', ), '//foo.bar/quux.jpg', '')]
>>> qs.all ()
[MOM.Document (('ln', 'fn', '', ''), '//foo.bar/baz', ''), MOM.Document (('ln', 'fn', '', ''), '//foo.bar/qux', ''), MOM.Document (('title_1', ), '//foo.bar/quux.jpg', '')]
>>> q = scope.query_changes (type_name = "SWP.Page").order_by (Q.cid)
>>> for c in q.all () :
... print (date_cleaner (str (c)))
<Create SWP.Page ('title_1', 'SWP.Page'), new-values = {'contents' : '<p>text 1</p>\n', 'date' : (('start', <date instance>),), 'last_cid' : '2', 'text' : 'text 1'}>
<Create SWP.Page ('title_2', 'SWP.Page'), new-values = {'contents' : '<p>text 2</p>\n', 'date' : (('start', <date instance>),), 'last_cid' : '3', 'text' : 'text 2'}>
<Create SWP.Page ('title_3', 'SWP.Page'), new-values = {'contents' : '<p>text 3</p>\n', 'date' : (('start', <date instance>),), 'last_cid' : '4', 'text' : 'text 3'}>
>>> sorted (scope.MOM.Id_Entity.query ().attrs ("type_name", ))
[('MOM.Document',), ('PAP.Person',), ('SWP.Page',)]
>>> show_ref_maps (scope, "Ref_Req_Map")
MOM.Id_Entity
('EVT.Event', ['left'])
('MOM.Document', ['left'])
MOM.Link
('EVT.Event', ['left'])
('MOM.Document', ['left'])
MOM.Link1
('EVT.Event', ['left'])
('MOM.Document', ['left'])
MOM._Link_n_
('EVT.Event', ['left'])
('MOM.Document', ['left'])
MOM.Link2
('EVT.Event', ['left'])
('MOM.Document', ['left'])
MOM.Link3
('EVT.Event', ['left'])
('MOM.Document', ['left'])
MOM.Object
('EVT.Event', ['left'])
('MOM.Document', ['left'])
MOM.Document
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth.Id_Entity
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth.Object
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth._Account_
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth.Account
('Auth.Account_Activation', ['left'])
('Auth.Account_EMail_Verification', ['left'])
('Auth.Account_Password_Change_Required', ['left'])
('Auth.Account_Password_Reset', ['left'])
('Auth.Account_in_Group', ['left'])
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('PAP.Person_has_Account', ['right'])
Auth.Certificate
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth.Group
('Auth.Account_in_Group', ['right'])
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth.Link
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth._Link_n_
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth.Link2
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth.Account_in_Group
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth.Link1
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth._Account_Action_
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth.Account_Activation
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth.Account_Password_Change_Required
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth._Account_Token_Action_
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth.Account_EMail_Verification
('EVT.Event', ['left'])
('MOM.Document', ['left'])
Auth.Account_Password_Reset
('EVT.Event', ['left'])
('MOM.Document', ['left'])
EVT.Id_Entity
('EVT.Event', ['left'])
('MOM.Document', ['left'])
EVT.Object
('EVT.Event', ['left'])
('MOM.Document', ['left'])
EVT.Calendar
('EVT.Event', ['left'])
('MOM.Document', ['left'])
EVT.Link
('EVT.Event', ['left'])
('MOM.Document', ['left'])
EVT.Link1
('EVT.Event', ['left'])
('MOM.Document', ['left'])
EVT.Event
('EVT.Event', ['left'])
('EVT.Event_occurs', ['left'])
('EVT.Recurrence_Spec', ['left'])
('MOM.Document', ['left'])
EVT.Event_occurs
('EVT.Event', ['left'])
('MOM.Document', ['left'])
EVT._Recurrence_Mixin_
('EVT.Event', ['left'])
('MOM.Document', ['left'])
EVT.Recurrence_Spec
('EVT.Event', ['left'])
('EVT.Recurrence_Rule', ['left'])
('MOM.Document', ['left'])
EVT.Recurrence_Rule
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Id_Entity
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Object
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Property
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Address
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('PAP.Address_Position', ['left'])
('PAP.Company_has_Address', ['right'])
('PAP.Person_has_Address', ['right'])
PAP.Subject
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Group
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Legal_Entity
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Company
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('PAP.Company_has_Address', ['left'])
('PAP.Company_has_Email', ['left'])
('PAP.Company_has_Phone', ['left'])
('PAP.Company_has_Url', ['left'])
PAP.Email
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('PAP.Company_has_Email', ['right'])
('PAP.Person_has_Email', ['right'])
PAP.Phone
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('PAP.Company_has_Phone', ['right'])
('PAP.Person_has_Phone', ['right'])
PAP.Person
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('PAP.Person_has_Account', ['left'])
('PAP.Person_has_Address', ['left'])
('PAP.Person_has_Email', ['left'])
('PAP.Person_has_Phone', ['left'])
('PAP.Person_has_Url', ['left'])
('SRM.Sailor', ['left'])
PAP.Url
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('PAP.Company_has_Url', ['right'])
('PAP.Person_has_Url', ['right'])
PAP.Link
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Link1
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Address_Position
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP._Link_n_
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Link2
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Subject_has_Property
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Person_has_Account
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SRM.Id_Entity
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SRM.Object
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SRM._Boat_Class_
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SRM.Boat_Class
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SRM.Boat', ['left'])
('SRM.Regatta_C', ['boat_class'])
SRM.Handicap
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SRM.Regatta_H', ['boat_class'])
SRM.Link
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SRM.Link1
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SRM.Boat
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SRM.Boat_in_Regatta', ['left'])
SRM.Club
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SRM.Regatta_Event
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SRM.Page', ['event'])
('SRM.Regatta_C', ['left'])
('SRM.Regatta_H', ['left'])
SWP.Id_Entity
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SWP.Object
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SWP.Object_PN
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SWP.Clip_O', ['left'])
SWP.Page
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SWP.Clip_O', ['left'])
SWP.Page_Y
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SWP.Clip_O', ['left'])
SWP.Link
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SWP.Link1
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SWP.Clip_O
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SWP.Clip_X
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SWP.Clip_O', ['left'])
SWP.Gallery
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SWP.Clip_O', ['left'])
('SWP.Picture', ['left'])
SWP.Picture
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SWP.Referral
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SWP.Clip_O', ['left'])
SRM.Page
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SWP.Clip_O', ['left'])
SRM.Regatta
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SRM.Boat_in_Regatta', ['right'])
SRM.Regatta_C
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SRM.Boat_in_Regatta', ['right'])
('SRM.Team', ['left'])
SRM.Regatta_H
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SRM.Boat_in_Regatta', ['right'])
SRM.Sailor
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SRM.Boat_in_Regatta', ['skipper'])
('SRM.Crew_Member', ['right'])
SRM._Link_n_
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SRM.Link2
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SRM.Boat_in_Regatta
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SRM.Crew_Member', ['left'])
('SRM.Race_Result', ['left'])
('SRM.Team_has_Boat_in_Regatta', ['right'])
SRM.Race_Result
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SRM.Team
('EVT.Event', ['left'])
('MOM.Document', ['left'])
('SRM.Team_has_Boat_in_Regatta', ['left'])
SRM.Crew_Member
('EVT.Event', ['left'])
('MOM.Document', ['left'])
SRM.Team_has_Boat_in_Regatta
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Subject_has_Address
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Subject_has_Email
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Subject_has_Phone
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Subject_has_Url
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Company_has_Url
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Person_has_Url
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Company_has_Phone
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Person_has_Phone
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Company_has_Email
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Person_has_Email
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Company_has_Address
('EVT.Event', ['left'])
('MOM.Document', ['left'])
PAP.Person_has_Address
('EVT.Event', ['left'])
('MOM.Document', ['left'])
>>> show_ref_maps (scope, "Ref_Opt_Map")
EVT.Calendar
('EVT.Event', ['calendar'])
PAP.Person
('SRM.Team', ['leader'])
SRM.Club
('SRM.Regatta_Event', ['club'])
('SRM.Sailor', ['club'])
('SRM.Team', ['club'])
"""
import _MOM.Document
from _GTW.__test__.model import *
__test__ = Scaffold.create_test_dict (test_code)
### __END__ GTW.__test__.Document_Link
| [
"_TFL.Regexp.Re_Replacer"
] | [((1502, 1576), '_TFL.Regexp.Re_Replacer', 'Re_Replacer', (['"""\'start\', \'\\\\d{4}-\\\\d{2}-\\\\d{2}\'"""', '"""\'start\', <date instance>"""'], {}), '("\'start\', \'\\\\d{4}-\\\\d{2}-\\\\d{2}\'", "\'start\', <date instance>")\n', (1513, 1576), False, 'from _TFL.Regexp import Re_Replacer, re\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
sfftk.unittests.test_readers
This testing module should have no side-effects because it only reads.
"""
from __future__ import division, print_function
import glob
import os
import struct
import sys
import unittest
import numpy
import random_words
import __init__ as tests
import ahds
from ..readers import amreader, mapreader, modreader, segreader, stlreader, surfreader
__author__ = "<NAME>, PhD"
__email__ = "<EMAIL>, <EMAIL>"
__date__ = "2017-05-15"
__updated__ = '2018-02-14'
rw = random_words.RandomWords()
# readers
class TestReaders_amreader(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.am_file = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_data.am')
cls.header, cls.segments_by_stream = amreader.get_data(cls.am_file)
def test_get_data(self):
"""Test the main entry point: get_data(...)"""
self.assertIsInstance(self.header, ahds.header.AmiraHeader)
self.assertIsInstance(self.segments_by_stream, numpy.ndarray)
self.assertGreaterEqual(len(self.segments_by_stream), 1)
def test_first_line_amiramesh(self):
"""test that it's declared as an AmiraMesh file"""
self.assertEqual(self.header.designation.filetype, 'AmiraMesh')
def test_first_line_binary_little_endian(self):
"""test that it is formatted as BINARY-LITTLE-ENDIAN"""
self.assertEqual(self.header.designation.format, 'BINARY-LITTLE-ENDIAN')
def test_first_line_version(self):
"""test that it is version 2.1"""
self.assertEqual(self.header.designation.version, '2.1')
def test_lattice_present(self):
"""test Lattice definition exists in definitions"""
self.assertTrue('Lattice' in self.header.definitions.attrs)
def test_materials_present(self):
"""test Materials exist in parameters"""
self.assertIsNotNone('Materials' in self.header.parameters.attrs)
def test_read_hxsurface(self):
"""Test handling of AmiraMesh hxsurface files"""
am_hxsurface_file = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_data_hxsurface.am')
header, segments_by_stream = amreader.get_data(am_hxsurface_file)
self.assertIsInstance(header, ahds.header.AmiraHeader)
self.assertIsNone(segments_by_stream)
class TestReaders_mapreader(unittest.TestCase):
def setUp(self):
self.map_file = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_data.map')
def test_get_data(self):
"""Test the main entry point: get_data(...)"""
map_ = mapreader.get_data(self.map_file)
self.assertIsInstance(map_, mapreader.Map)
self.assertGreater(map_._nc, 0)
self.assertGreater(map_._nr, 0)
self.assertGreater(map_._ns, 0)
self.assertIn(map_._mode, range(5))
self.assertIsInstance(map_._ncstart, int)
self.assertIsInstance(map_._nrstart, int)
self.assertIsInstance(map_._nsstart, int)
self.assertGreater(map_._nx, 0)
self.assertGreater(map_._ny, 0)
self.assertGreater(map_._nz, 0)
self.assertGreater(map_._x_length, 0)
self.assertGreater(map_._y_length, 0)
self.assertGreater(map_._z_length, 0)
self.assertTrue(0 < map_._alpha < 180)
self.assertTrue(0 < map_._beta < 180)
self.assertTrue(0 < map_._gamma < 180)
self.assertIn(map_._mapc, range(1, 4))
self.assertIn(map_._mapr, range(1, 4))
self.assertIn(map_._maps, range(1, 4))
self.assertIsInstance(map_._amin, float)
self.assertIsInstance(map_._amax, float)
self.assertIsInstance(map_._amean, float)
self.assertIn(map_._ispg, range(1, 231))
self.assertTrue(map_._nsymbt % 80 == 0)
self.assertIn(map_._lskflg, range(2))
self.assertIsInstance(map_._s11, float)
self.assertIsInstance(map_._s12, float)
self.assertIsInstance(map_._s13, float)
self.assertIsInstance(map_._s21, float)
self.assertIsInstance(map_._s22, float)
self.assertIsInstance(map_._s23, float)
self.assertIsInstance(map_._s31, float)
self.assertIsInstance(map_._s32, float)
self.assertIsInstance(map_._s33, float)
self.assertIsInstance(map_._t1, float)
self.assertIsInstance(map_._t2, float)
self.assertIsInstance(map_._t3, float)
self.assertEqual(map_._map, 'MAP ')
self.assertIsInstance(map_._machst, tuple)
self.assertGreater(map_._rms, 0)
self.assertGreater(map_._nlabl, 0)
def test_write(self):
"""Test write map file"""
map_to_write = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_write_map.map')
written_maps = glob.glob(map_to_write)
self.assertEqual(len(written_maps), 0)
with open(map_to_write, 'w') as f:
map_ = mapreader.get_data(self.map_file)
map_.write(f)
written_maps = glob.glob(map_to_write)
self.assertEqual(len(written_maps), 1)
map(os.remove, written_maps)
def test_invert(self):
"""Test invert map intensities"""
map_ = mapreader.get_data(self.map_file, inverted=False)
self.assertFalse(map_._inverted)
map_.invert()
self.assertTrue(map_._inverted)
map_ = mapreader.get_data(self.map_file, inverted=True)
self.assertTrue(map_._inverted)
# check the inversion is complete and that we add a new label
with open('rm.map', 'w') as f:
map_.write(f)
map__ = mapreader.get_data('rm.map')
self.assertEqual(map__._nlabl, 2)
os.remove('rm.map')
def test_fix_mask(self):
"""Test fix mask for fixable mask"""
fixable_mask = mapreader.Map(os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_fixable_mask.map'))
self.assertFalse(fixable_mask.is_mask)
fixable_mask.fix_mask()
self.assertTrue(fixable_mask.is_mask)
def test_unfixable_mask(self):
"""Test exception for unfixable mask"""
unfixable_mask = mapreader.Map(os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_unfixable_mask.map'))
self.assertFalse(unfixable_mask.is_mask)
with self.assertRaises(ValueError):
unfixable_mask.fix_mask()
self.assertFalse(unfixable_mask.is_mask)
def test_bad_data_fail(self):
"""Test that a corrupted file (extra data at end) raises Exception"""
with self.assertRaises(ValueError):
mapreader.Map(os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_bad_data1.map'))
def test_bad_data_fail2(self):
"""Test that we can raise an exception with a malformed header"""
with self.assertRaises(ValueError):
mapreader.get_data(os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_data_corrupt_header.map'))
def test_bad_data_fail3(self):
"""Test that we can't have too long a header"""
with self.assertRaises(ValueError):
# create a map file with a header larger than 1024 to see the exception
map = mapreader.get_data(os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_data.map'))
for i in range(map._nlabl):
label = getattr(map, '_label_{}'.format(i))
y = 11
for j in range(1, y):
setattr(map, '_label_{}'.format(j), label)
map._nlabl = y
with open('rm.map', 'w') as f:
map.write(f)
class TestReaders_modreader(unittest.TestCase):
@classmethod
def setUp(cls):
cls.mod_file = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_data.mod')
cls.mod = modreader.get_data(cls.mod_file)
def test_get_data(self):
"""Test the main entry point: get_data(...)"""
self.assertTrue(self.mod.isset)
self.assertGreater(len(self.mod.objts), 0)
self.assertGreater(self.mod.objt_count, 0)
self.assertEqual(self.mod.version, 'V1.2')
self.assertEqual(self.mod.name, 'IMOD-NewModel')
self.assertGreater(self.mod.xmax, 0)
self.assertGreater(self.mod.ymax, 0)
self.assertGreater(self.mod.zmax, 0)
self.assertGreaterEqual(self.mod.objsize, 1)
self.assertIn(self.mod.drawmode, [-1, 1])
self.assertIn(self.mod.mousemode, range(3)) # unclear what 2 is equal to INVALID VALUE
self.assertIn(self.mod.blacklevel, range(256))
self.assertIn(self.mod.whitelevel, range(256))
self.assertEqual(self.mod.xoffset, 0)
self.assertEqual(self.mod.yoffset, 0)
self.assertEqual(self.mod.zoffset, 0)
self.assertGreater(self.mod.xscale, 0)
self.assertGreater(self.mod.yscale, 0)
self.assertGreater(self.mod.zscale, 0)
self.assertGreaterEqual(self.mod.object, 0)
self.assertGreaterEqual(self.mod.contour, -1)
self.assertGreaterEqual(self.mod.point, -1)
self.assertGreaterEqual(self.mod.res, 0)
self.assertIn(self.mod.thresh, range(256))
self.assertGreater(self.mod.pixsize, 0)
self.assertIn(self.mod.units, ['pm', 'Angstroms', 'nm', 'microns', 'mm', 'cm', 'm', 'pixels', 'km'])
self.assertIsInstance(self.mod.csum, int)
self.assertEqual(self.mod.alpha, 0)
self.assertEqual(self.mod.beta, 0)
self.assertEqual(self.mod.gamma, 0)
def test_read_fail1(self):
"""Test that file missing 'IMOD' at beginning fails"""
mod_fn = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_bad_data1.mod')
with self.assertRaises(ValueError):
modreader.get_data(mod_fn) # missing 'IMOD' start
def test_read_fail2(self):
"""Test that file missing 'IEOF' at end fails"""
mod_fn = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_bad_data2.mod')
with self.assertRaises(ValueError):
modreader.get_data(mod_fn) # missing 'IEOF' end
def test_IMOD_pass(self):
"""Test that IMOD chunk read"""
self.assertTrue(self.mod.isset)
def test_OBJT_pass(self):
"""Test that OBJT chunk read"""
for O in self.mod.objts.itervalues():
self.assertTrue(O.isset)
def test_CONT_pass(self):
"""Test that CONT chunk read"""
for O in self.mod.objts.itervalues():
for C in O.conts.itervalues():
self.assertTrue(C.isset)
def test_MESH_pass(self):
"""Test that MESH chunk read"""
for O in self.mod.objts.itervalues():
for M in O.meshes.itervalues():
self.assertTrue(M.isset)
def test_IMAT_pass(self):
"""Test that IMAT chunk read"""
for O in self.mod.objts.itervalues():
self.assertTrue(O.imat.isset)
def test_VIEW_pass(self):
"""Test that VIEW chunk read"""
for V in self.mod.views.itervalues():
self.assertTrue(V.isset)
def test_MINX_pass(self):
"""Test that MINX chunk read"""
self.assertTrue(self.mod.minx.isset)
def test_MEPA_pass(self):
"""Test that MEPA chunk read"""
for O in self.mod.objts.itervalues():
try:
self.assertTrue(O.mepa.isset)
except AttributeError:
self.assertEqual(O.mepa, None)
def test_CLIP_pass(self):
"""Test that CLIP chunk read"""
for O in self.mod.objts.itervalues():
try:
self.assertTrue(O.clip.isset)
except AttributeError:
self.assertEqual(O.clip, None)
def test_number_of_OBJT_chunks(self):
"""Test that compares declared and found OBJT chunks"""
self.assertEqual(self.mod.objsize, len(self.mod.objts))
def test_number_of_CONT_chunks(self):
"""Test that compares declared and found CONT chunks"""
for O in self.mod.objts.itervalues():
self.assertEqual(O.contsize, len(O.conts))
def test_number_of_MESH_chunks(self):
"""Test that compares declared and found MESH chunks"""
for O in self.mod.objts.itervalues():
self.assertEqual(O.meshsize, len(O.meshes))
def test_number_of_surface_objects(self):
"""Test that compares declared and found surface objects"""
for O in self.mod.objts.itervalues():
no_of_surfaces = 0
for C in O.conts.itervalues():
if C.surf != 0:
no_of_surfaces += 1
self.assertEqual(O.surfsize, no_of_surfaces)
def test_number_of_points_in_CONT_chunk(self):
"""Test that compares declared an found points in CONT chunks"""
for O in self.mod.objts.itervalues():
for C in O.conts.itervalues():
self.assertEqual(C.psize, len(C.pt))
def test_number_of_vertex_elements_in_MESH_chunk(self):
"""Test that compares declared an found vertices in MESH chunks"""
for O in self.mod.objts.itervalues():
for M in O.meshes.itervalues():
self.assertEqual(M.vsize, len(M.vert))
def test_number_of_list_elements_in_MESH_chunk(self):
"""Test that compares declared an found indices in MESH chunks"""
for O in self.mod.objts.itervalues():
for M in O.meshes.itervalues():
self.assertEqual(M.lsize, len(M.list))
class TestReaders_segreader(unittest.TestCase):
def setUp(self):
self.seg_file = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_data.seg')
def test_get_data(self):
"""Test the main entry point: get_data(...)"""
seg = segreader.get_data(self.seg_file)
print(seg, file=sys.stderr)
self.assertIsInstance(seg, segreader.SeggerSegmentation)
self.assertEqual(seg.map_level, 0.852)
self.assertEqual(seg.format_version, 2)
self.assertItemsEqual(seg.map_size, [26, 27, 30])
self.assertEqual(seg.format, 'segger')
self.assertEqual(seg.mask.shape, (30, 27, 26))
class TestReaders_stlreader(unittest.TestCase):
def setUp(self):
self.stl_file = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_data.stl')
self.stl_bin_file = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_data_binary.stl')
self.stl_multi_file = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_data_multiple.stl')
def test_get_data(self):
"""Test the main entry point: get_data(...)"""
meshes = stlreader.get_data(self.stl_file) # only one mesh here
name, vertices, polygons = meshes[0]
num_vertices = len(vertices)
a, b, c = zip(*polygons.values())
vertex_ids = set(a + b + c)
self.assertEqual(name, "{}#{}".format(os.path.basename(self.stl_file), 0))
self.assertGreaterEqual(num_vertices, 1)
self.assertEqual(min(vertex_ids), min(vertices.keys()))
self.assertEqual(max(vertex_ids), max(vertices.keys()))
self.assertEqual(sum(set(vertex_ids)), sum(vertices.keys()))
self.assertEqual(set(vertex_ids), set(vertices.keys()))
def test_read_binary(self):
"""Test that we can read a binary STL file"""
meshes = stlreader.get_data(self.stl_bin_file)
print(meshes[0][0], file=sys.stderr)
name, vertices, polygons = meshes[0]
self.assertEqual(name, "{}#{}".format(os.path.basename(self.stl_bin_file), 0))
self.assertTrue(len(vertices) > 0)
self.assertTrue(len(polygons) > 0)
polygon_ids = list()
for a, b, c in polygons.itervalues():
polygon_ids += [a, b, c]
self.assertItemsEqual(set(vertices.keys()), set(polygon_ids))
def test_read_multiple(self):
"""Test that we can read a multi-solid STL file
Only works for ASCII by concatenation"""
meshes = stlreader.get_data(self.stl_multi_file)
for name, vertices, polygons in meshes:
self.assertEqual(name, "{}#{}".format(os.path.basename(self.stl_multi_file), 0))
self.assertTrue(len(vertices) > 0)
self.assertTrue(len(polygons) > 0)
polygon_ids = list()
for a, b, c in polygons.itervalues():
polygon_ids += [a, b, c]
self.assertItemsEqual(set(vertices.keys()), set(polygon_ids))
class TestReaders_surfreader(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.surf_file = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_data.surf')
cls.header, cls.segments = surfreader.get_data(cls.surf_file) # only one mesh here
def test_get_data(self):
"""Test the main entry point: get_data(...)"""
name = self.segments[2].name
vertices = self.segments[2].vertices
triangles = self.segments[2].triangles
num_vertices = len(vertices)
a, b, c = zip(*triangles)
vertex_ids = set(a + b + c)
self.assertIsInstance(self.header, ahds.header.AmiraHeader)
self.assertIsInstance(self.segments, dict)
self.assertEqual(name, 'medulla_r')
self.assertGreaterEqual(num_vertices, 1)
self.assertGreaterEqual(len(self.segments), 1)
self.assertEqual(min(vertex_ids), min(vertices.keys()))
self.assertEqual(max(vertex_ids), max(vertices.keys()))
self.assertEqual(sum(set(vertex_ids)), sum(vertices.keys()))
self.assertEqual(set(vertex_ids), set(vertices.keys()))
if __name__ == "__main__":
unittest.main()
| [
"random_words.RandomWords",
"os.path.join",
"os.path.basename",
"unittest.main",
"glob.glob",
"os.remove"
] | [((544, 570), 'random_words.RandomWords', 'random_words.RandomWords', ([], {}), '()\n', (568, 570), False, 'import random_words\n'), ((17550, 17565), 'unittest.main', 'unittest.main', ([], {}), '()\n', (17563, 17565), False, 'import unittest\n'), ((693, 760), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_data.am"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_data.am')\n", (705, 760), False, 'import os\n'), ((2091, 2168), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_data_hxsurface.am"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_data_hxsurface.am')\n", (2103, 2168), False, 'import os\n'), ((2447, 2515), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_data.map"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_data.map')\n", (2459, 2515), False, 'import os\n'), ((4681, 4754), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_write_map.map"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_write_map.map')\n", (4693, 4754), False, 'import os\n'), ((4778, 4801), 'glob.glob', 'glob.glob', (['map_to_write'], {}), '(map_to_write)\n', (4787, 4801), False, 'import glob\n'), ((4994, 5017), 'glob.glob', 'glob.glob', (['map_to_write'], {}), '(map_to_write)\n', (5003, 5017), False, 'import glob\n'), ((5674, 5693), 'os.remove', 'os.remove', (['"""rm.map"""'], {}), "('rm.map')\n", (5683, 5693), False, 'import os\n'), ((7668, 7736), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_data.mod"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_data.mod')\n", (7680, 7736), False, 'import os\n'), ((9554, 9627), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_bad_data1.mod"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_bad_data1.mod')\n", (9566, 9627), False, 'import os\n'), ((9841, 9914), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_bad_data2.mod"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_bad_data2.mod')\n", (9853, 9914), False, 'import os\n'), ((13514, 13582), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_data.seg"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_data.seg')\n", (13526, 13582), False, 'import os\n'), ((14167, 14235), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_data.stl"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_data.stl')\n", (14179, 14235), False, 'import os\n'), ((14264, 14339), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_data_binary.stl"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_data_binary.stl')\n", (14276, 14339), False, 'import os\n'), ((14370, 14447), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_data_multiple.stl"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_data_multiple.stl')\n", (14382, 14447), False, 'import os\n'), ((16505, 16574), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_data.surf"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_data.surf')\n", (16517, 16574), False, 'import os\n'), ((5806, 5882), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_fixable_mask.map"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_fixable_mask.map')\n", (5818, 5882), False, 'import os\n'), ((6132, 6210), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_unfixable_mask.map"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_unfixable_mask.map')\n", (6144, 6210), False, 'import os\n'), ((6575, 6648), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_bad_data1.map"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_bad_data1.map')\n", (6587, 6648), False, 'import os\n'), ((6835, 6922), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_data_corrupt_header.map"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations',\n 'test_data_corrupt_header.map')\n", (6847, 6922), False, 'import os\n'), ((7177, 7245), 'os.path.join', 'os.path.join', (['tests.TEST_DATA_PATH', '"""segmentations"""', '"""test_data.map"""'], {}), "(tests.TEST_DATA_PATH, 'segmentations', 'test_data.map')\n", (7189, 7245), False, 'import os\n'), ((14813, 14844), 'os.path.basename', 'os.path.basename', (['self.stl_file'], {}), '(self.stl_file)\n', (14829, 14844), False, 'import os\n'), ((15438, 15473), 'os.path.basename', 'os.path.basename', (['self.stl_bin_file'], {}), '(self.stl_bin_file)\n', (15454, 15473), False, 'import os\n'), ((16053, 16090), 'os.path.basename', 'os.path.basename', (['self.stl_multi_file'], {}), '(self.stl_multi_file)\n', (16069, 16090), False, 'import os\n')] |
#!/usr/bin/env python3
# _*_ coding: utf-8 _*_
###
# Project : SubLime
# FileName : util.py
# -----------------------------------------------------------------------------
# Author : sham
# E-Mail : <EMAIL>
# -----------------------------------------------------------------------------
# Creation date : 29/08/2013
##
import os
import csv
import logging
from sublime import util
# Logger
LOG = logging.getLogger(__name__)
# -----------------------------------------------------------------------------
#
# Signature class
#
# -----------------------------------------------------------------------------
class Signature:
""" Signature class which hold information about file signatures. """
def __init__(self, magic_number, description):
""" Initializes instance. """
self.magic_number = magic_number
self.description = description
self.extensions = set()
def __eq__(self, other):
return self.magic_number == other.magic_number
def __repr__(self):
return "<Signature('{}', '{}', '{}')>".format(
self.magic_number, self.description, self.extensions)
# -----------------------------------------------------------------------------
#
# FileMagic class
#
# -----------------------------------------------------------------------------
class FileMagic:
""" FileMagic will try to determine the file's type by using
file signatures (magic numbers in the file's header). """
# Singleton pattern
_instance = None
def __new__(cls, *args, **kwargs):
""" If there is already a FileMagic instance
returns this one.
Ensures that there is only one instance of FileMagic
is running in SubLime."""
if not FileMagic._instance:
FileMagic._instance = FileMagic.__FileMagic(*args, **kwargs)
return FileMagic._instance
def __getattr__(self, attr):
return getattr(self._instance, attr)
def __setattr__(self, attr, val):
return setattr(self._instance, attr, val)
class __FileMagic():
""" Inner class for Singleton purpose. """
def __init__(self, video_extensions):
""" Initializes instance. """
self._video_extensions = video_extensions
self._magic_numbers = {}
self._max_nb_bytes = 0
# Loads CSV config file containing all magic numbers
signatures_filepath = os.path.join(
util.get_exe_dir(), "Config", "file_signatures.csv")
with open(signatures_filepath, "r", encoding='utf-8') as sign_file:
reader = csv.reader(
sign_file, delimiter=',', quoting=csv.QUOTE_ALL)
for line in reader:
extension = line[0].strip()
magic_number = line[1].strip()
description = line[2].strip()
if extension in self._video_extensions:
magic_number = tuple(
int(figure, 16) for figure in magic_number.split()
)
cur_signature = Signature(magic_number, description)
signature = self._magic_numbers.setdefault(
magic_number, cur_signature)
signature.extensions.add(extension)
self._max_nb_bytes = max(
[len(magic) for magic in self._magic_numbers.keys()])
self._mkv_magic_number = tuple(
int(figure, 16) for figure in "1A 45 DF A3 93 42 82 88".split()
)
def get_video_signature(self, filepath):
""" Gets video file signature
if a file given by its filepath is a video. """
recognized = False
file_signature = None
_, ext = os.path.splitext(filepath)
if ext in self._video_extensions:
all_magic_numbers = self._magic_numbers.keys()
with open(filepath, 'rb') as file_handler:
header = tuple(
int(o) for o in file_handler.read(self._max_nb_bytes)
)
for magic in all_magic_numbers:
if header[:len(magic)] == magic:
file_signature = self._magic_numbers[magic]
if ext in file_signature.extensions:
recognized = True
break
if not recognized:
if file_signature:
raise FileExtensionMismatchError(
filepath, file_signature)
else:
raise FileUnknownError(filepath)
return file_signature
def is_mkv(self, file_signature):
""" Determines if a file signature is a MKV. """
return file_signature.magic_number == self._mkv_magic_number
# -----------------------------------------------------------------------------
#
# Exceptions
#
# -----------------------------------------------------------------------------
class FileMagicError(Exception):
pass
class FileExtensionMismatchError(FileMagicError):
""" Exception raised if the extension of a file and its signature mismatch.
Attributes:
filepath -- path of file
file_signature -- File signature detected by FileMagic. """
def __init__(self, filepath, file_signature):
self.filepath = filepath
self.file_signature = file_signature
def __str__(self):
return (
"The video file called {} is supposed to be a video but "
"its signature doesn't: {}."
"\nExpected extension: {}".format(
self.filepath,
self.file_signature.description,
" or ".join(self.file_signature.extensions))
)
class FileUnknownError(FileMagicError):
""" Exception raised if a file is not recognized by FileMagic.
Attributes:
filepath -- path of file """
def __init__(self, filepath):
self.filepath = filepath
def __str__(self):
return (
"The file called {} was not recognized by Sublime.".format(
self.filepath)
)
# EOF
| [
"logging.getLogger",
"os.path.splitext",
"sublime.util.get_exe_dir",
"csv.reader"
] | [((439, 466), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (456, 466), False, 'import logging\n'), ((3886, 3912), 'os.path.splitext', 'os.path.splitext', (['filepath'], {}), '(filepath)\n', (3902, 3912), False, 'import os\n'), ((2500, 2518), 'sublime.util.get_exe_dir', 'util.get_exe_dir', ([], {}), '()\n', (2516, 2518), False, 'from sublime import util\n'), ((2658, 2717), 'csv.reader', 'csv.reader', (['sign_file'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_ALL'}), "(sign_file, delimiter=',', quoting=csv.QUOTE_ALL)\n", (2668, 2717), False, 'import csv\n')] |
# -*- coding: utf-8 -*-
"""
@author:XuMing(<EMAIL>), <NAME>(<EMAIL>)
@description: Graph classify
"""
import numpy
from sklearn.metrics import f1_score, accuracy_score
from sklearn.multiclass import OneVsRestClassifier
from sklearn.preprocessing import MultiLabelBinarizer
class TopKRanker(OneVsRestClassifier):
def predict(self, X, top_k_list):
probs = numpy.asarray(super(TopKRanker, self).predict_proba(X))
all_labels = []
for i, k in enumerate(top_k_list):
probs_ = probs[i, :]
labels = self.classes_[probs_.argsort()[-k:]].tolist()
probs_[:] = 0
probs_[labels] = 1
all_labels.append(probs_)
return numpy.asarray(all_labels)
class Classifier(object):
def __init__(self, embeddings, clf):
self.embeddings = embeddings
self.clf = TopKRanker(clf)
self.binarizer = MultiLabelBinarizer(sparse_output=True)
def train(self, X, Y, Y_all):
self.binarizer.fit(Y_all)
X_train = [self.embeddings[x] for x in X]
Y = self.binarizer.transform(Y)
self.clf.fit(X_train, Y)
def evaluate(self, X, Y):
top_k_list = [len(l) for l in Y]
Y_ = self.predict(X, top_k_list)
Y = self.binarizer.transform(Y)
results = {}
results['f1'] = f1_score(Y, Y_, average="weighted")
results['acc'] = accuracy_score(Y, Y_)
print(results)
return results
def predict(self, X, top_k_list):
X_ = numpy.asarray([self.embeddings[x] for x in X])
Y = self.clf.predict(X_, top_k_list=top_k_list)
return Y
def split_train_evaluate(self, X, Y, train_precent, seed=0):
training_size = int(train_precent * len(X))
numpy.random.seed(seed)
shuffle_indices = numpy.random.permutation(numpy.arange(len(X)))
X_train = [X[shuffle_indices[i]] for i in range(training_size)]
Y_train = [Y[shuffle_indices[i]] for i in range(training_size)]
X_test = [X[shuffle_indices[i]] for i in range(training_size, len(X))]
Y_test = [Y[shuffle_indices[i]] for i in range(training_size, len(X))]
self.train(X_train, Y_train, Y)
return self.evaluate(X_test, Y_test)
def read_node_label(filename, skip_head=False):
with open(filename, 'r', encoding='utf-8') as f:
X = []
Y = []
count = 0
for line in f:
line = line.strip()
count += 1
if skip_head and count == 1:
continue
parts = line.split(' ')
if len(parts) > 1:
X.append(parts[0])
Y.append(parts[1:])
return X, Y
| [
"sklearn.metrics.f1_score",
"numpy.asarray",
"numpy.random.seed",
"sklearn.preprocessing.MultiLabelBinarizer",
"sklearn.metrics.accuracy_score"
] | [((702, 727), 'numpy.asarray', 'numpy.asarray', (['all_labels'], {}), '(all_labels)\n', (715, 727), False, 'import numpy\n'), ((894, 933), 'sklearn.preprocessing.MultiLabelBinarizer', 'MultiLabelBinarizer', ([], {'sparse_output': '(True)'}), '(sparse_output=True)\n', (913, 933), False, 'from sklearn.preprocessing import MultiLabelBinarizer\n'), ((1324, 1359), 'sklearn.metrics.f1_score', 'f1_score', (['Y', 'Y_'], {'average': '"""weighted"""'}), "(Y, Y_, average='weighted')\n", (1332, 1359), False, 'from sklearn.metrics import f1_score, accuracy_score\n'), ((1385, 1406), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['Y', 'Y_'], {}), '(Y, Y_)\n', (1399, 1406), False, 'from sklearn.metrics import f1_score, accuracy_score\n'), ((1505, 1551), 'numpy.asarray', 'numpy.asarray', (['[self.embeddings[x] for x in X]'], {}), '([self.embeddings[x] for x in X])\n', (1518, 1551), False, 'import numpy\n'), ((1752, 1775), 'numpy.random.seed', 'numpy.random.seed', (['seed'], {}), '(seed)\n', (1769, 1775), False, 'import numpy\n')] |
###############################################################################
# Module: ddl_statement
# Purpose: Parent class for DDL (Data Definition Language) statements
#
# Notes:
#
###############################################################################
import data_pipeline.constants.const as const
from abc import ABCMeta, abstractmethod
from .base_statement import BaseStatement
class DdlStatement(BaseStatement):
"""Contains data necessary for producing a valid DDL statement"""
__metaclass__ = ABCMeta
def __init__(self, table_name):
super(DdlStatement, self).__init__(table_name)
self._entries = []
@property
def entries(self):
return self._entries
@abstractmethod
def add_entry(self, **kwargs):
pass
def _build_field_params(self, params):
if params:
return "({})".format(const.COMMASPACE.join(params))
return const.EMPTY_STRING
def _build_field_string(self, value):
return " {}".format(value if value else const.EMPTY_STRING)
| [
"data_pipeline.constants.const.COMMASPACE.join"
] | [((889, 918), 'data_pipeline.constants.const.COMMASPACE.join', 'const.COMMASPACE.join', (['params'], {}), '(params)\n', (910, 918), True, 'import data_pipeline.constants.const as const\n')] |
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common logging helpers."""
from google.cloud.logging.entries import ProtobufEntry
from google.cloud.logging.entries import StructEntry
from google.cloud.logging.entries import TextEntry
def entry_from_resource(resource, client, loggers):
"""Detect correct entry type from resource and instantiate.
:type resource: dict
:param resource: One entry resource from API response.
:type client: :class:`~google.cloud.logging.client.Client`
:param client: Client that owns the log entry.
:type loggers: dict
:param loggers:
A mapping of logger fullnames -> loggers. If the logger
that owns the entry is not in ``loggers``, the entry
will have a newly-created logger.
:rtype: :class:`~google.cloud.logging.entries._BaseEntry`
:returns: The entry instance, constructed via the resource
"""
if 'textPayload' in resource:
return TextEntry.from_api_repr(resource, client, loggers)
elif 'jsonPayload' in resource:
return StructEntry.from_api_repr(resource, client, loggers)
elif 'protoPayload' in resource:
return ProtobufEntry.from_api_repr(resource, client, loggers)
raise ValueError('Cannot parse log entry resource.')
| [
"google.cloud.logging.entries.ProtobufEntry.from_api_repr",
"google.cloud.logging.entries.StructEntry.from_api_repr",
"google.cloud.logging.entries.TextEntry.from_api_repr"
] | [((1481, 1531), 'google.cloud.logging.entries.TextEntry.from_api_repr', 'TextEntry.from_api_repr', (['resource', 'client', 'loggers'], {}), '(resource, client, loggers)\n', (1504, 1531), False, 'from google.cloud.logging.entries import TextEntry\n'), ((1583, 1635), 'google.cloud.logging.entries.StructEntry.from_api_repr', 'StructEntry.from_api_repr', (['resource', 'client', 'loggers'], {}), '(resource, client, loggers)\n', (1608, 1635), False, 'from google.cloud.logging.entries import StructEntry\n'), ((1688, 1742), 'google.cloud.logging.entries.ProtobufEntry.from_api_repr', 'ProtobufEntry.from_api_repr', (['resource', 'client', 'loggers'], {}), '(resource, client, loggers)\n', (1715, 1742), False, 'from google.cloud.logging.entries import ProtobufEntry\n')] |
import os
import pandas as pd
import numpy as np
def inp_mortality_tot():
df=pd.read_csv('total_country.csv')
mortal_rate=[]
cured_rate=[]
i=0
while(i<len(df)):
res=df.iloc[i]['Deaths']/df.iloc[i]['Confirmed']
res_2=df.iloc[i]['Cured']/df.iloc[i]['Confirmed']
mortal_rate.append(res)
cured_rate.append(res_2)
i+=1
df['mortal_rate']=mortal_rate
df['cured_rate']=cured_rate
df.to_csv('total_country_mod.csv',index=False)
def state_wise_process():
files=os.listdir('details_state')
for file in files:
name='details_state/'+file
df=pd.read_csv(name)
mortal_rate=[]
cured_rate=[]
i=0
while(i<len(df)):
res=df.iloc[i]['Deaths']/df.iloc[i]['Confirmed']
res_2=df.iloc[i]['Cured']/df.iloc[i]['Confirmed']
mortal_rate.append(res)
cured_rate.append(res_2)
i+=1
df['mortal_rate']=mortal_rate
df['cured_rate']=cured_rate
df.to_csv(name,index=False)
def mark_rate_tot():
confirm_rate=[]
rec_rate=[]
df=pd.read_csv('total_country_mod.csv')
i=0
while(i<len(df)):
if i==0:
res=0
res2=0
else:
res=(df.iloc[i]['Confirmed']-df.iloc[i-1]['Confirmed'])/(df.iloc[i]['Confirmed'])
try:
res2=(df.iloc[i]['Cured']-df.iloc[i-1]['Cured'])/(df.iloc[i]['Cured'])
except:
res2=0
confirm_rate.append(res)
rec_rate.append(res2)
i+=1
df['confirmation_increase_rate']=confirm_rate
df['cured_increase_rate']=rec_rate
df.to_csv('total_country_mod.csv',index=False)
mark_rate_tot()
| [
"os.listdir",
"pandas.read_csv"
] | [((80, 112), 'pandas.read_csv', 'pd.read_csv', (['"""total_country.csv"""'], {}), "('total_country.csv')\n", (91, 112), True, 'import pandas as pd\n'), ((482, 509), 'os.listdir', 'os.listdir', (['"""details_state"""'], {}), "('details_state')\n", (492, 509), False, 'import os\n'), ((966, 1002), 'pandas.read_csv', 'pd.read_csv', (['"""total_country_mod.csv"""'], {}), "('total_country_mod.csv')\n", (977, 1002), True, 'import pandas as pd\n'), ((565, 582), 'pandas.read_csv', 'pd.read_csv', (['name'], {}), '(name)\n', (576, 582), True, 'import pandas as pd\n')] |
# encoding: utf-8
from collections import OrderedDict
import string
from pydatacube.pydatacube import _DataCube
import px_reader
# A bit scandinavian specific
default_translate = dict(zip(
u"äöä -",
u"aoa__"
))
class Sluger(object):
def __init__(self, translate=default_translate):
self.given_out = {}
self.translate = translate
def __call__(self, value):
slug = value.lower()
chars = []
for c in slug:
c = self.translate.get(c, c)
if c == '_':
chars.append(c)
if c.isalnum():
chars.append(c)
slug = ''.join(chars)
slug = slug.encode('ascii', errors='ignore')
realslug = slug
# Hopefully won't happen
while realslug in self.given_out:
realslug = realslug + '_'
return realslug
PxSyntaxError = px_reader.PxSyntaxError
def to_cube(pcaxis_data, origin_url=None, Sluger=Sluger):
px = px_reader.Px(pcaxis_data)
cube = OrderedDict()
metadata = OrderedDict()
def setmeta(target, src):
if not hasattr(px, src):
return
metadata[target] = getattr(px, src)
setmeta('title', 'title')
setmeta('source', 'source')
if origin_url:
metadata['origin_url'] = origin_url
if hasattr(px, 'last-updated'):
metadata['updated'] = getattr(px, 'updated_dt').isoformat()
setmeta('note', 'note')
cube['metadata'] = metadata
if hasattr(px, 'codes'):
codes = px.codes
else:
codes = {}
dimensions = []
dim_sluger = Sluger()
for label, px_categories in px.values.iteritems():
if label in px.codes:
cat_ids = codes[label]
else:
cat_sluger = Sluger()
cat_ids = [cat_sluger(c) for c in px_categories]
categories = []
for cat_id, cat_label in zip(cat_ids, px_categories):
cat = dict(id=cat_id, label=cat_label)
categories.append(cat)
dimension = dict(
id=dim_sluger(label),
label=label,
categories=categories
)
dimensions.append(dimension)
cube['dimensions'] = dimensions
# TODO: Casting?
# TODO: Add a public method to get raw
# data from a Px-object
values = px._data.split()
cube['value_dimensions'] = [
dict(id=dim_sluger('value'), values=values)
]
return _DataCube(cube)
| [
"pydatacube.pydatacube._DataCube",
"px_reader.Px",
"collections.OrderedDict"
] | [((834, 859), 'px_reader.Px', 'px_reader.Px', (['pcaxis_data'], {}), '(pcaxis_data)\n', (846, 859), False, 'import px_reader\n'), ((868, 881), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (879, 881), False, 'from collections import OrderedDict\n'), ((894, 907), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (905, 907), False, 'from collections import OrderedDict\n'), ((2073, 2088), 'pydatacube.pydatacube._DataCube', '_DataCube', (['cube'], {}), '(cube)\n', (2082, 2088), False, 'from pydatacube.pydatacube import _DataCube\n')] |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=wrong-import-position,invalid-name
"""
Test the cascader in the compilation flow.
"""
import pytest
pytest.importorskip("ethosu.vela")
import numpy as np
import tvm
from tvm import relay
from tvm.relay.backend.contrib.ethosu.codegen import _create_cascader
from tvm.relay.backend.contrib.ethosu.tir.compiler import _lower_to_tir
from tvm.contrib.ethosu.cascader import MemoryRegion, EthosuDeviceConfig
from .. import infra as test_infra
from . import infra as cascader_test_infra
def _ethos_u55_cascader():
sram = MemoryRegion(
name="SRAM",
size=10**6,
read_bandwidth=16,
write_bandwidth=16,
read_latency=0,
write_latency=0,
burst_length=1,
)
flash = MemoryRegion(name="FLASH", size=10**7, read_bandwidth=4, write_bandwidth=4)
device_config = EthosuDeviceConfig("ethos-u55-256")
cascader_options = cascader_test_infra.make_options(
cascade_region=sram,
max_proposals=64,
stripe_factors=4,
max_plan_size=10,
max_open_plans=8,
max_closed_plans=32,
always_copy_size=1024,
disable_pareto_plans=False,
disable_pareto_proposals=False,
enable_striping=False,
)
return _create_cascader(
options=cascader_options,
io_region=sram,
constant_region=flash,
working_regions=[sram],
device_config=device_config,
)
def _compile_model(relay_function):
mod = tvm.IRModule()
mod["main"] = relay_function
mod = relay.transform.InferType()(mod)
tir_mod = _lower_to_tir(mod["main"], _ethos_u55_cascader())[0]
return tir_mod["main"]
def _create_single_conv2d():
ifm = relay.var("x", shape=(1, 8, 8, 4), dtype="int8")
conv1 = test_infra.make_ethosu_conv2d(ifm, 4, 4, (3, 3), (1, 1), (1, 1), (1, 1))
func = relay.Function(relay.analysis.free_vars(conv1), conv1)
return func
def _create_double_conv2d():
ifm = relay.var("x", shape=(1, 8, 8, 4), dtype="int8")
conv1 = test_infra.make_ethosu_conv2d(ifm, 4, 4, (3, 3), (1, 1), (1, 1), (1, 1))
conv2 = test_infra.make_ethosu_conv2d(conv1, 4, 4, (1, 3), (1, 1), (1, 1), (1, 1))
func = relay.Function(relay.analysis.free_vars(conv2), conv2)
return func
def _create_scalar_add():
ifm = relay.var("x", shape=(1, 5, 4, 3), dtype="int8")
ifm2 = relay.const(np.ones((1, 1, 1, 1)), dtype="int8")
add = test_infra.make_ethosu_binary_elementwise(
ifm, ifm2, ifm_channels=3, ifm2_channels=1, operator_type="ADD", ofm_dtype="int8"
)
func = relay.Function(relay.analysis.free_vars(add), add)
return func
def test_single_conv_compute_cycles_hint():
"""
Check the "compute_cycles_hint" annotation remains in the lowering flow
for single convolution.
"""
primfunc = _compile_model(_create_single_conv2d())
ops = primfunc.body.body.body.seq
compute_cycles_hints = [2304, 640, 320]
for op, compute_cycle_hint in zip(ops, compute_cycles_hints):
assert op.attr_key == "pragma_compute_cycles_hint"
assert op.value == compute_cycle_hint
def test_double_conv_compute_cycles_hint():
"""
Check the "compute_cycles_hint" annotation remains in the lowering flow
for double convolution.
"""
primfunc = _compile_model(_create_double_conv2d())
ops = primfunc.body.body.body.body.body.body.seq
compute_cycles_hints = [2304, 640, 768, 640, 320, 240]
for op, compute_cycle_hint in zip(ops, compute_cycles_hints):
assert op.attr_key == "pragma_compute_cycles_hint"
assert op.value == compute_cycle_hint
def test_scalar_add_compute_cycles_hint():
"""
Check the "compute_cycles_hint" annotation remains in the lowering flow
for add with scalar values.
"""
primfunc = _compile_model(_create_scalar_add())
ops = primfunc.body.body.seq
compute_cycles_hints = [16, 24]
for op, compute_cycle_hint in zip(ops, compute_cycles_hints):
assert op.attr_key == "pragma_compute_cycles_hint"
assert op.value == compute_cycle_hint
| [
"numpy.ones",
"tvm.relay.backend.contrib.ethosu.codegen._create_cascader",
"tvm.contrib.ethosu.cascader.EthosuDeviceConfig",
"pytest.importorskip",
"tvm.contrib.ethosu.cascader.MemoryRegion",
"tvm.IRModule",
"tvm.relay.transform.InferType",
"tvm.relay.analysis.free_vars",
"tvm.relay.var"
] | [((906, 940), 'pytest.importorskip', 'pytest.importorskip', (['"""ethosu.vela"""'], {}), "('ethosu.vela')\n", (925, 940), False, 'import pytest\n'), ((1329, 1461), 'tvm.contrib.ethosu.cascader.MemoryRegion', 'MemoryRegion', ([], {'name': '"""SRAM"""', 'size': '(10 ** 6)', 'read_bandwidth': '(16)', 'write_bandwidth': '(16)', 'read_latency': '(0)', 'write_latency': '(0)', 'burst_length': '(1)'}), "(name='SRAM', size=10 ** 6, read_bandwidth=16, write_bandwidth=\n 16, read_latency=0, write_latency=0, burst_length=1)\n", (1341, 1461), False, 'from tvm.contrib.ethosu.cascader import MemoryRegion, EthosuDeviceConfig\n'), ((1530, 1607), 'tvm.contrib.ethosu.cascader.MemoryRegion', 'MemoryRegion', ([], {'name': '"""FLASH"""', 'size': '(10 ** 7)', 'read_bandwidth': '(4)', 'write_bandwidth': '(4)'}), "(name='FLASH', size=10 ** 7, read_bandwidth=4, write_bandwidth=4)\n", (1542, 1607), False, 'from tvm.contrib.ethosu.cascader import MemoryRegion, EthosuDeviceConfig\n'), ((1627, 1662), 'tvm.contrib.ethosu.cascader.EthosuDeviceConfig', 'EthosuDeviceConfig', (['"""ethos-u55-256"""'], {}), "('ethos-u55-256')\n", (1645, 1662), False, 'from tvm.contrib.ethosu.cascader import MemoryRegion, EthosuDeviceConfig\n'), ((2037, 2176), 'tvm.relay.backend.contrib.ethosu.codegen._create_cascader', '_create_cascader', ([], {'options': 'cascader_options', 'io_region': 'sram', 'constant_region': 'flash', 'working_regions': '[sram]', 'device_config': 'device_config'}), '(options=cascader_options, io_region=sram, constant_region=\n flash, working_regions=[sram], device_config=device_config)\n', (2053, 2176), False, 'from tvm.relay.backend.contrib.ethosu.codegen import _create_cascader\n'), ((2267, 2281), 'tvm.IRModule', 'tvm.IRModule', ([], {}), '()\n', (2279, 2281), False, 'import tvm\n'), ((2493, 2541), 'tvm.relay.var', 'relay.var', (['"""x"""'], {'shape': '(1, 8, 8, 4)', 'dtype': '"""int8"""'}), "('x', shape=(1, 8, 8, 4), dtype='int8')\n", (2502, 2541), False, 'from tvm import relay\n'), ((2750, 2798), 'tvm.relay.var', 'relay.var', (['"""x"""'], {'shape': '(1, 8, 8, 4)', 'dtype': '"""int8"""'}), "('x', shape=(1, 8, 8, 4), dtype='int8')\n", (2759, 2798), False, 'from tvm import relay\n'), ((3091, 3139), 'tvm.relay.var', 'relay.var', (['"""x"""'], {'shape': '(1, 5, 4, 3)', 'dtype': '"""int8"""'}), "('x', shape=(1, 5, 4, 3), dtype='int8')\n", (3100, 3139), False, 'from tvm import relay\n'), ((2325, 2352), 'tvm.relay.transform.InferType', 'relay.transform.InferType', ([], {}), '()\n', (2350, 2352), False, 'from tvm import relay\n'), ((2653, 2684), 'tvm.relay.analysis.free_vars', 'relay.analysis.free_vars', (['conv1'], {}), '(conv1)\n', (2677, 2684), False, 'from tvm import relay\n'), ((2997, 3028), 'tvm.relay.analysis.free_vars', 'relay.analysis.free_vars', (['conv2'], {}), '(conv2)\n', (3021, 3028), False, 'from tvm import relay\n'), ((3163, 3184), 'numpy.ones', 'np.ones', (['(1, 1, 1, 1)'], {}), '((1, 1, 1, 1))\n', (3170, 3184), True, 'import numpy as np\n'), ((3375, 3404), 'tvm.relay.analysis.free_vars', 'relay.analysis.free_vars', (['add'], {}), '(add)\n', (3399, 3404), False, 'from tvm import relay\n')] |
# encoding: utf-8
'''
Created on Dec 18, 2018
@author: <NAME>
'''
import time
from array import *
from ctypes import *
from sys import exit
from multiprocessing import Process
from multiprocessing import Queue
import numpy as np
class EmotivDeviceReader(object):
'''
classdocs
This class is used to read EEG data from emotiv
Attributes:
queue: the queue save EEG data
'''
def __init__(self):
'''
Constructor
'''
self.queue = Queue(maxsize=-1)
# num_EDR = 0 # 记录创建了多少个EmotivDeviceReader
self.num_start = 0 # 记录start了多少个线程
def test(self):
print("real_time_detection.GUI.EmotivDeviceReader.py now test.")
print("test test test test test")
# check_status(self)
def check_status(self):
print("EmotivDeviceReader.py.check_status(self).start...")
'''
check if the device is connect correctly, if not, exit this process
'''
if self.libEDK.IEE_EngineConnect(create_string_buffer(b"Emotiv Systems-5")) != 0:
print("Failed to start up Emotiv Engine.")
exit()
else:
print("Successfully start up Emotiv Engine.")
print("EmotivDeviceReader.py.check_status(self).end...")
# check_status(self)
# loop(self)
def loop(self):
print("EmotivDeviceReader.py..loop(self).start...")
'''
the loop is used to continuously read data from device
'''
try:
self.libEDK = cdll.LoadLibrary("win64/edk.dll")
except Exception as e:
print('Error: cannot load EDK lib:', e)
exit()
print("EmotivDeviceReader.py...successfully connect")
self.IEE_EmoEngineEventCreate = self.libEDK.IEE_EmoEngineEventCreate
self.IEE_EmoEngineEventCreate.restype = c_void_p
self.eEvent = self.IEE_EmoEngineEventCreate()
# print("self.eEvent = self.IEE_EmoEngineEventCreate()")
self.IEE_EmoEngineEventGetEmoState = self.libEDK.IEE_EmoEngineEventGetEmoState
self.IEE_EmoEngineEventGetEmoState.argtypes = [c_void_p, c_void_p]
self.IEE_EmoEngineEventGetEmoState.restype = c_int
# print("self.IEE_EmoEngineEventGetEmoState.restype = c_int")
self.IEE_EmoStateCreate = self.libEDK.IEE_EmoStateCreate
self.IEE_EmoStateCreate.restype = c_void_p
self.eState = self.IEE_EmoStateCreate()
# print("self.eState = self.IEE_EmoStateCreate()")
self.IEE_EngineGetNextEvent = self.libEDK.IEE_EngineGetNextEvent
self.IEE_EngineGetNextEvent.restype = c_int
self.IEE_EngineGetNextEvent.argtypes = [c_void_p]
# print("self.IEE_EngineGetNextEvent.argtypes = [c_void_p]")
self.IEE_EmoEngineEventGetUserId = self.libEDK.IEE_EmoEngineEventGetUserId
self.IEE_EmoEngineEventGetUserId.restype = c_int
self.IEE_EmoEngineEventGetUserId.argtypes = [c_void_p , c_void_p]
# print("self.IEE_EmoEngineEventGetUserId.argtypes = [c_void_p , c_void_p]")
self.IEE_EmoEngineEventGetType = self.libEDK.IEE_EmoEngineEventGetType
self.IEE_EmoEngineEventGetType.restype = c_int
self.IEE_EmoEngineEventGetType.argtypes = [c_void_p]
# print("self.IEE_EmoEngineEventGetType.argtypes = [c_void_p]")
self.IEE_EmoEngineEventCreate = self.libEDK.IEE_EmoEngineEventCreate
self.IEE_EmoEngineEventCreate.restype = c_void_p
# print("self.IEE_EmoEngineEventCreate.restype = c_void_p")
self.IEE_EmoEngineEventGetEmoState = self.libEDK.IEE_EmoEngineEventGetEmoState
self.IEE_EmoEngineEventGetEmoState.argtypes = [c_void_p, c_void_p]
self.IEE_EmoEngineEventGetEmoState.restype = c_int
# print("self.IEE_EmoEngineEventGetEmoState.restype = c_int")
self.IEE_EmoStateCreate = self.libEDK.IEE_EmoStateCreate
self.IEE_EmoStateCreate.argtype = c_void_p
self.IEE_EmoStateCreate.restype = c_void_p
# print("self.IEE_EmoStateCreate.restype = c_void_p")
self.IEE_FFTSetWindowingType = self.libEDK.IEE_FFTSetWindowingType
self.IEE_FFTSetWindowingType.restype = c_int
self.IEE_FFTSetWindowingType.argtypes = [c_uint, c_void_p]
# print("self.IEE_FFTSetWindowingType.argtypes = [c_uint, c_void_p]")
self.IEE_GetAverageBandPowers = self.libEDK.IEE_GetAverageBandPowers
self.IEE_GetAverageBandPowers.restype = c_int
self.IEE_GetAverageBandPowers.argtypes = [c_uint, c_int, c_void_p, c_void_p, c_void_p, c_void_p, c_void_p]
# print("self.IEE_GetAverageBandPowers.argtypes = [c_uint, c_int, c_void_p, c_void_p, c_void_p, c_void_p, c_void_p]")
self.IEE_EngineDisconnect = self.libEDK.IEE_EngineDisconnect
self.IEE_EngineDisconnect.restype = c_int
self.IEE_EngineDisconnect.argtype = c_void_p
# print("self.IEE_EngineDisconnect.argtype = c_void_p")
self.IEE_EmoStateFree = self.libEDK.IEE_EmoStateFree
self.IEE_EmoStateFree.restype = c_int
self.IEE_EmoStateFree.argtypes = [c_void_p]
# print("self.IEE_EmoStateFree.argtypes = [c_void_p]")
self.IEE_EmoEngineEventFree = self.libEDK.IEE_EmoEngineEventFree
self.IEE_EmoEngineEventFree.restype = c_int
self.IEE_EmoEngineEventFree.argtypes = [c_void_p]
# print("self.IEE_EmoEngineEventFree.argtypes = [c_void_p]")
self.check_status()
print("EmotivDeviceReader.py...self.check_status()...")
userID = c_uint(0)
user = pointer(userID)
ready = 0
state = c_int(0)
alphaValue = c_double(0)
low_betaValue = c_double(0)
high_betaValue = c_double(0)
gammaValue = c_double(0)
thetaValue = c_double(0)
alpha = pointer(alphaValue)
low_beta = pointer(low_betaValue)
high_beta = pointer(high_betaValue)
gamma = pointer(gammaValue)
theta = pointer(thetaValue)
channelList = array('I', [3, 7, 9, 12, 16]) # IED_AF3, IED_AF4, IED_T7, IED_T8, IED_Pz
loop_times = 0 # count how many times did while(1) run
# while(1)
while(1):
loop_times += 1
state = self.IEE_EngineGetNextEvent(self.eEvent)
data = []
if state == 0:
eventType = self.IEE_EmoEngineEventGetType(self.eEvent)
self.IEE_EmoEngineEventGetUserId(self.eEvent, user)
if eventType == 16: # libEDK.IEE_Event_enum.IEE_UserAdded
ready = 1
self.IEE_FFTSetWindowingType(userID, 1); # 1: libEDK.IEE_WindowingTypes_enum.IEE_HAMMING
print("User added")
if ready == 1:
for i in channelList:
result = c_int(0)
result = self.IEE_GetAverageBandPowers(userID, i, theta, alpha, low_beta, high_beta, gamma)
if result == 0: # EDK_OK
print("theta: %.6f, alpha: %.6f, low beta: %.6f, high beta: %.6f, gamma: %.6f \n" %
(thetaValue.value, alphaValue.value, low_betaValue.value,
high_betaValue.value, gammaValue.value))
one_read_data = [thetaValue.value, alphaValue.value,
low_betaValue.value, high_betaValue.value, gammaValue.value]
if len(one_read_data) > 0:
data += one_read_data
elif state != 0x0600:
print("Internal error in Emotiv Engine ! ")
if len(data) > 0:
self.queue.put(np.array(data))
# --------------- #
# sleep_time = 0.5
# print("sleep(%f)" % sleep_time)
# print("loop_times(%d)" % loop_times)
# time.sleep(sleep_time)
# if loop_times >= 50:
# break
# while(1)
print("EmotivDeviceReader.py..loop(self).end...")
return 0
# loop(self)
def start(self):
'''
start a sub-process
'''
print("sub_process")
self.num_start += 1
print("num_start: %d " % self.num_start)
sub_process = Process(target=self.loop) # self.loop is the loop(self) function above
print("sub_process.start().start")
sub_process.start()
print("sub_process.start().end")
#error when run __main__ in the tool.py
'''
line 204, in start
sub_process.start()
'''
def get_data(self):
'''
read psd data
Returns:
theta, alpha, low_beta, high_beta, gamma in order
IED_AF3, IED_AF4, IED_T7, IED_T8, IED_Pz in order
'''
print("EmotivDeviceReader.get_data().start...")
data_list = []
while self.queue.qsize() > 0:
ele = self.queue.get()
data_list.append(ele)
print("data_list[0]")
print(data_list[0])
print("data_list[1]")
print(data_list[1])
# print(data_list[2])
print("EmotivDeviceReader.get_data().end...")
return data_list
# __main__
if __name__ == '__main__':
print("EmotivDeviceReader.py..__main__.start...")
device_reader = EmotivDeviceReader()
print("device_reader.start()")
device_reader.start()
print("device_reader.start()")
time.sleep(5)
print("for 5 loop: data")
for i in range(5):
print("i:%d" % i)
data = device_reader.get_data()
data = np.array(data)
print(data)
time.sleep(1)
print("EmotivDeviceReader.py..__main__.end...")
# __main__
| [
"multiprocessing.Process",
"time.sleep",
"numpy.array",
"sys.exit",
"multiprocessing.Queue"
] | [((9695, 9708), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (9705, 9708), False, 'import time\n'), ((494, 511), 'multiprocessing.Queue', 'Queue', ([], {'maxsize': '(-1)'}), '(maxsize=-1)\n', (499, 511), False, 'from multiprocessing import Queue\n'), ((8523, 8548), 'multiprocessing.Process', 'Process', ([], {'target': 'self.loop'}), '(target=self.loop)\n', (8530, 8548), False, 'from multiprocessing import Process\n'), ((9843, 9857), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (9851, 9857), True, 'import numpy as np\n'), ((9886, 9899), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (9896, 9899), False, 'import time\n'), ((1124, 1130), 'sys.exit', 'exit', ([], {}), '()\n', (1128, 1130), False, 'from sys import exit\n'), ((1646, 1652), 'sys.exit', 'exit', ([], {}), '()\n', (1650, 1652), False, 'from sys import exit\n'), ((7922, 7936), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (7930, 7936), True, 'import numpy as np\n')] |
import scipy
import matplotlib.pyplot as plt
import numpy as np
x = [
0.001, 0.019, 0.039, 0.058, 0.080, 0.098, 0.119, 0.139,
0.159, 0.180, 0.198, 0.249, 0.298, 0.349, 0.398, 0.419,
0.439, 0.460, 0.479, 0.499, 0.519, 0.540, 0.558, 0.578,
0.598, 0.649, 0.698, 0.749, 0.798, 0.819, 0.839, 0.859,
0.879, 0.900, 0.920, 0.939, 0.958, 0.980, 0.998
]
y = [
0.056, 0.077, 0.076, 0.078, 0.088, 0.078, 0.105, 0.101,
0.107, 0.111, 0.119, 0.120, 0.155, 0.195, 0.223, 0.276,
0.293, 0.304, 0.325, 0.349, 0.370, 0.387, 0.390, 0.386,
0.408, 0.458, 0.449, 0.467, 0.456, 0.447, 0.436, 0.443,
0.444, 0.423, 0.429, 0.428, 0.445, 0.416, 0.400
]
x_axis = np.arange(min(x), max(x) + 0.1, 0.1)
fig, ax = plt.subplots()
ax.scatter(x, y,)
for degree in range(1,5):
poly_coefficient, residual, _, _, _ = np.polyfit(x, y, degree, full=True)
poly_function = np.poly1d(poly_coefficient)
ax.plot(x_axis, poly_function(x_axis), label=f'deg: {degree}, res: {residual}')
print(residual)
ax.grid(ls='-')
plt.show()
| [
"numpy.polyfit",
"numpy.poly1d",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((724, 738), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (736, 738), True, 'import matplotlib.pyplot as plt\n'), ((1037, 1047), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1045, 1047), True, 'import matplotlib.pyplot as plt\n'), ((827, 862), 'numpy.polyfit', 'np.polyfit', (['x', 'y', 'degree'], {'full': '(True)'}), '(x, y, degree, full=True)\n', (837, 862), True, 'import numpy as np\n'), ((884, 911), 'numpy.poly1d', 'np.poly1d', (['poly_coefficient'], {}), '(poly_coefficient)\n', (893, 911), True, 'import numpy as np\n')] |
#!/usr/bin/env python
from setuptools import setup
import backtracepython
setup(
name='backtracepython',
version=backtracepython.version_string,
description='Backtrace error reporting tool for Python',
author='<NAME>',
author_email='<EMAIL>',
packages=['backtracepython'],
test_suite="tests",
url='https://github.com/backtrace-labs/backtrace-python',
)
| [
"setuptools.setup"
] | [((77, 370), 'setuptools.setup', 'setup', ([], {'name': '"""backtracepython"""', 'version': 'backtracepython.version_string', 'description': '"""Backtrace error reporting tool for Python"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'packages': "['backtracepython']", 'test_suite': '"""tests"""', 'url': '"""https://github.com/backtrace-labs/backtrace-python"""'}), "(name='backtracepython', version=backtracepython.version_string,\n description='Backtrace error reporting tool for Python', author=\n '<NAME>', author_email='<EMAIL>', packages=['backtracepython'],\n test_suite='tests', url=\n 'https://github.com/backtrace-labs/backtrace-python')\n", (82, 370), False, 'from setuptools import setup\n')] |
#
# MIT License
#
# Copyright (c) 2020 Airbyte
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import json
import time
from typing import Iterator, List, Mapping
import boto3
from airbyte_cdk.logger import AirbyteLogger
from botocore.errorfactory import ClientError
from source_s3.stream import IncrementalFileStreamS3
from .integration_test_abstract import HERE, SAMPLE_DIR, AbstractTestIncrementalFileStream
LOGGER = AirbyteLogger()
class TestIncrementalFileStreamS3(AbstractTestIncrementalFileStream):
@property
def stream_class(self) -> type:
return IncrementalFileStreamS3
@property
def credentials(self) -> Mapping:
filename = HERE.parent / "secrets/config.json"
with open(filename) as json_file:
config = json.load(json_file)
return {
"aws_access_key_id": config["provider"]["aws_access_key_id"],
"aws_secret_access_key": config["provider"]["aws_secret_access_key"],
}
def provider(self, bucket_name: str) -> Mapping:
return {"storage": "S3", "bucket": bucket_name}
def _s3_connect(self, credentials: Mapping):
region = "eu-west-3"
self.s3_client = boto3.client(
"s3",
aws_access_key_id=credentials["aws_access_key_id"],
aws_secret_access_key=credentials["aws_secret_access_key"],
region_name=region,
)
self.s3_resource = boto3.resource(
"s3", aws_access_key_id=credentials["aws_access_key_id"], aws_secret_access_key=credentials["aws_secret_access_key"]
)
def cloud_files(self, cloud_bucket_name: str, credentials: Mapping, files_to_upload: List, private: bool = True) -> Iterator[str]:
self._s3_connect(credentials)
region = "eu-west-3"
location = {"LocationConstraint": region}
bucket_name = cloud_bucket_name
print("\n")
LOGGER.info(f"Uploading {len(files_to_upload)} file(s) to {'private' if private else 'public'} aws bucket '{bucket_name}'")
try:
self.s3_client.head_bucket(Bucket=bucket_name)
except ClientError:
acl = "private" if private else "public-read"
self.s3_client.create_bucket(ACL=acl, Bucket=bucket_name, CreateBucketConfiguration=location)
# wait here until the bucket is ready
ready = False
attempts, max_attempts = 0, 30
while not ready:
time.sleep(1)
try:
self.s3_client.head_bucket(Bucket=bucket_name)
except ClientError:
attempts += 1
if attempts >= max_attempts:
raise RuntimeError(f"Couldn't get a successful ping on bucket after ~{max_attempts} seconds")
else:
ready = True
LOGGER.info(f"bucket {bucket_name} initialised")
extra_args = {}
if not private:
extra_args = {"ACL": "public-read"}
for filepath in files_to_upload:
upload_path = str(filepath).replace(str(SAMPLE_DIR), "")
upload_path = upload_path[1:] if upload_path[0] == "/" else upload_path
self.s3_client.upload_file(str(filepath), bucket_name, upload_path, ExtraArgs=extra_args)
yield f"{bucket_name}/{upload_path}"
def teardown_infra(self, cloud_bucket_name: str, credentials: Mapping):
self._s3_connect(credentials)
bucket = self.s3_resource.Bucket(cloud_bucket_name)
bucket.objects.all().delete()
bucket.delete()
LOGGER.info(f"S3 Bucket {cloud_bucket_name} is now deleted")
| [
"boto3.client",
"airbyte_cdk.logger.AirbyteLogger",
"time.sleep",
"boto3.resource",
"json.load"
] | [((1435, 1450), 'airbyte_cdk.logger.AirbyteLogger', 'AirbyteLogger', ([], {}), '()\n', (1448, 1450), False, 'from airbyte_cdk.logger import AirbyteLogger\n'), ((2201, 2360), 'boto3.client', 'boto3.client', (['"""s3"""'], {'aws_access_key_id': "credentials['aws_access_key_id']", 'aws_secret_access_key': "credentials['aws_secret_access_key']", 'region_name': 'region'}), "('s3', aws_access_key_id=credentials['aws_access_key_id'],\n aws_secret_access_key=credentials['aws_secret_access_key'], region_name\n =region)\n", (2213, 2360), False, 'import boto3\n'), ((2438, 2574), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {'aws_access_key_id': "credentials['aws_access_key_id']", 'aws_secret_access_key': "credentials['aws_secret_access_key']"}), "('s3', aws_access_key_id=credentials['aws_access_key_id'],\n aws_secret_access_key=credentials['aws_secret_access_key'])\n", (2452, 2574), False, 'import boto3\n'), ((1783, 1803), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (1792, 1803), False, 'import json\n'), ((3448, 3461), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3458, 3461), False, 'import time\n')] |
from serverless_crud.model import BaseModel
try:
from troposphere import Sub
except ImportError:
class Sub:
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
class PolicyBuilder:
def __init__(self, statements=None):
statements = statements or []
if isinstance(statements, dict):
statements = [statements]
self.statements = {statement.get("Sid"): statement for statement in statements}
def get_statement(self, sid):
return self.statements.get(sid)
def add_statement(self, statement: dict):
self.statements[statement.get("Sid")] = statement
def registry(self, model: BaseModel):
pass
def all(self):
return [
statement for statement in self.statements.values() if statement.get("Action") and statement.get("Resource")
]
class DynamoDBPolicyBuilder(PolicyBuilder):
def __init__(self):
statements = {
"Sid": "DynamodbTables",
"Effect": "Allow",
"Action": [
"dynamodb:BatchGet*",
"dynamodb:Get*",
"dynamodb:Query",
"dynamodb:Scan",
"dynamodb:BatchWrite*",
"dynamodb:Delete*",
"dynamodb:Update*",
"dynamodb:PutItem",
],
"Resource": [],
}
super().__init__(statements)
def registry(self, model: BaseModel):
super().registry(model)
self.get_statement("DynamodbTables").get("Resource").append(
Sub(f"arn:aws:dynamodb:${{AWS::Region}}:${{AWS::AccountId}}:table/{model._meta.table_name}")
)
self.get_statement("DynamodbTables").get("Resource").append(
Sub(f"arn:aws:dynamodb:${{AWS::Region}}:${{AWS::AccountId}}:table/{model._meta.table_name}/index/*")
)
| [
"troposphere.Sub"
] | [((1630, 1727), 'troposphere.Sub', 'Sub', (['f"""arn:aws:dynamodb:${{AWS::Region}}:${{AWS::AccountId}}:table/{model._meta.table_name}"""'], {}), "(f'arn:aws:dynamodb:${{AWS::Region}}:${{AWS::AccountId}}:table/{model._meta.table_name}'\n )\n", (1633, 1727), False, 'from troposphere import Sub\n'), ((1814, 1919), 'troposphere.Sub', 'Sub', (['f"""arn:aws:dynamodb:${{AWS::Region}}:${{AWS::AccountId}}:table/{model._meta.table_name}/index/*"""'], {}), "(f'arn:aws:dynamodb:${{AWS::Region}}:${{AWS::AccountId}}:table/{model._meta.table_name}/index/*'\n )\n", (1817, 1919), False, 'from troposphere import Sub\n')] |
# Installation script
from distutils.core import setup, Extension
setup(
name='neat-python',
version='0.1',
description='A NEAT (NeuroEvolution of Augmenting Topologies) implementation',
packages=['neat', 'neat/iznn', 'neat/nn', 'neat/ctrnn', 'neat/ifnn'],
#ext_modules=[
# Extension('neat/iznn/iznn_cpp', ['neat/iznn/iznn.cpp']),
# Extension('neat/nn/ann', ['neat/nn/nn_cpp/ANN.cpp', 'neat/nn/nn_cpp/PyANN.cpp']),
# Extension('neat/ifnn/ifnn_cpp', ['neat/ifnn/ifnn.cpp']),],
)
| [
"distutils.core.setup"
] | [((66, 264), 'distutils.core.setup', 'setup', ([], {'name': '"""neat-python"""', 'version': '"""0.1"""', 'description': '"""A NEAT (NeuroEvolution of Augmenting Topologies) implementation"""', 'packages': "['neat', 'neat/iznn', 'neat/nn', 'neat/ctrnn', 'neat/ifnn']"}), "(name='neat-python', version='0.1', description=\n 'A NEAT (NeuroEvolution of Augmenting Topologies) implementation',\n packages=['neat', 'neat/iznn', 'neat/nn', 'neat/ctrnn', 'neat/ifnn'])\n", (71, 264), False, 'from distutils.core import setup, Extension\n')] |
from unittest import TestCase
from src.stack import StackWithMaxValue
class TestStackWithMaxValue(TestCase):
def test_push(self):
stack = StackWithMaxValue()
stack.push(1)
stack.push(2)
stack.push(3)
self.assertEqual([1, 2, 3], stack.as_list())
def test_pop(self):
stack = StackWithMaxValue()
stack.push(1)
self.assertEqual(1, stack.pop())
with self.assertRaises(IndexError):
stack.pop()
self.assertEqual([], stack.as_list())
def test_peek(self):
stack = StackWithMaxValue()
stack.push(1)
self.assertEqual(1, stack.peek())
self.assertEqual([1], stack.as_list())
stack.pop()
with self.assertRaises(IndexError):
stack.peek()
def test_is_empty(self):
stack = StackWithMaxValue()
self.assertTrue(stack.is_empty())
stack.push(1)
self.assertFalse(stack.is_empty())
def test_maximum(self):
stack = StackWithMaxValue()
stack.push(1)
stack.push(2)
self.assertEqual(2, stack.maximum())
stack.pop()
self.assertEqual(1, stack.maximum())
| [
"src.stack.StackWithMaxValue"
] | [((154, 173), 'src.stack.StackWithMaxValue', 'StackWithMaxValue', ([], {}), '()\n', (171, 173), False, 'from src.stack import StackWithMaxValue\n'), ((334, 353), 'src.stack.StackWithMaxValue', 'StackWithMaxValue', ([], {}), '()\n', (351, 353), False, 'from src.stack import StackWithMaxValue\n'), ((573, 592), 'src.stack.StackWithMaxValue', 'StackWithMaxValue', ([], {}), '()\n', (590, 592), False, 'from src.stack import StackWithMaxValue\n'), ((839, 858), 'src.stack.StackWithMaxValue', 'StackWithMaxValue', ([], {}), '()\n', (856, 858), False, 'from src.stack import StackWithMaxValue\n'), ((1011, 1030), 'src.stack.StackWithMaxValue', 'StackWithMaxValue', ([], {}), '()\n', (1028, 1030), False, 'from src.stack import StackWithMaxValue\n')] |
# -*- coding: utf-8 -*-
from keras.models import load_model
import numpy as np
import os
import cv2
from FaceQNet import load_Qnet_model, face_quality
# Loading the pretrained model
model = load_Qnet_model()
IMG_PATH = '/home/sai/YANG/image/video/nanning/haha'
dir = os.listdir(IMG_PATH)
count = len(dir)
print('count:', count)
for i in dir:
count -= 1
if count%1000==0:
print('count:', count)
dir_path = os.path.join(IMG_PATH, i)
imgs_dir = os.listdir(dir_path)
for j in imgs_dir:
img_path = os.path.join(dir_path, j)
img = cv2.imread(img_path)
score = face_quality(model, img)
# img = [cv2.resize(cv2.imread(img_path, cv2.IMREAD_COLOR), (224, 224))]
# test_data = np.array(img, copy=False, dtype=np.float32)
# score = model.predict(test_data, batch_size=1, verbose=1)
path1 = str(score[0][0]) + '@'
rename = path1 + j
os.rename(img_path, os.path.join(dir_path, rename))
| [
"os.listdir",
"FaceQNet.load_Qnet_model",
"os.path.join",
"cv2.imread",
"FaceQNet.face_quality"
] | [((191, 208), 'FaceQNet.load_Qnet_model', 'load_Qnet_model', ([], {}), '()\n', (206, 208), False, 'from FaceQNet import load_Qnet_model, face_quality\n'), ((269, 289), 'os.listdir', 'os.listdir', (['IMG_PATH'], {}), '(IMG_PATH)\n', (279, 289), False, 'import os\n'), ((427, 452), 'os.path.join', 'os.path.join', (['IMG_PATH', 'i'], {}), '(IMG_PATH, i)\n', (439, 452), False, 'import os\n'), ((468, 488), 'os.listdir', 'os.listdir', (['dir_path'], {}), '(dir_path)\n', (478, 488), False, 'import os\n'), ((531, 556), 'os.path.join', 'os.path.join', (['dir_path', 'j'], {}), '(dir_path, j)\n', (543, 556), False, 'import os\n'), ((571, 591), 'cv2.imread', 'cv2.imread', (['img_path'], {}), '(img_path)\n', (581, 591), False, 'import cv2\n'), ((608, 632), 'FaceQNet.face_quality', 'face_quality', (['model', 'img'], {}), '(model, img)\n', (620, 632), False, 'from FaceQNet import load_Qnet_model, face_quality\n'), ((942, 972), 'os.path.join', 'os.path.join', (['dir_path', 'rename'], {}), '(dir_path, rename)\n', (954, 972), False, 'import os\n')] |
# Generated by Django 3.2.2 on 2021-05-10 04:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('imgrepo', '0002_auto_20210509_2317'),
]
operations = [
migrations.AlterField(
model_name='image',
name='image_src',
field=models.ImageField(upload_to='static/media/images/'),
),
]
| [
"django.db.models.ImageField"
] | [((338, 389), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""static/media/images/"""'}), "(upload_to='static/media/images/')\n", (355, 389), False, 'from django.db import migrations, models\n')] |
# Generated by Django 3.1.3 on 2022-02-28 16:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0003_auto_20220228_1112'),
]
operations = [
migrations.CreateModel(
name='Results',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('recepts_num', models.IntegerField(db_column='recept_num')),
('judge', models.IntegerField(db_column='judge')),
],
),
]
| [
"django.db.models.AutoField",
"django.db.models.IntegerField"
] | [((328, 421), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (344, 421), False, 'from django.db import migrations, models\n'), ((452, 495), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'db_column': '"""recept_num"""'}), "(db_column='recept_num')\n", (471, 495), False, 'from django.db import migrations, models\n'), ((524, 562), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'db_column': '"""judge"""'}), "(db_column='judge')\n", (543, 562), False, 'from django.db import migrations, models\n')] |
from flask import Flask, request, render_template, flash, redirect, url_for
from wtforms import Form, TextField, TextAreaField, validators, StringField, SubmitField
from wtforms.validators import DataRequired
from app.DineCision import yelprequest
from flask_wtf import FlaskForm
import random
import json
import os
API_KEY = os.environ.get("DINECISION_API_KEY") or "Please obtain a Yelp API Key and set it as an environment variable named 'DINECISION_API_KEY'"
SECRET_KEY = os.environ.get("SECRET_KEY") or "my super secret"
# API constants, you shouldn't have to change these.
API_HOST = 'https://api.yelp.com'
SEARCH_PATH = '/v3/businesses/search'
BUSINESS_PATH = '/v3/businesses/'
app = Flask(__name__)
app.secret_key = SECRET_KEY
class NameForm(Form):
location = TextField('location:', validators=[validators.required()])
submit = SubmitField('Submit')
@app.route('/', methods=['GET', 'POST'])
def index():
location = None
form = NameForm(request.form)
if request.method == 'POST' and 'location' in request.form:
if form.validate():
return redirect(url_for('confirm', location=request.form.get('location')))
else:
flash("Please enter a location")
return redirect(url_for('error'))
return render_template('index.html')
@app.route('/error')
def error():
return render_template('error.html')
@app.route('/confirm/<location>')
def confirm(location):
random_business = yelp(location)
if random_business is None:
flash("Sadly there is no good restaurant to recommend in this location due to limited data, please choose another location")
return redirect(url_for('error'))
else:
return render_template('confirm.html', random_business=random_business['name'], where_is_it_0=random_business["location"]['display_address'][0], where_is_it_1=random_business["location"]['display_address'][1], number_review=random_business["review_count"], pic_url=random_business["image_url"])
def yelp(location_input):
url_params = {
'location': location_input.replace(' ', '+'),
'radius': 500,
'is_closed': "false",
'rating': 4,
'limit': 5,
'categories': "restaurants, All",
'price': 2
}
result = yelprequest(API_HOST, SEARCH_PATH, API_KEY, url_params)
business_list = result["businesses"]
try:
random_business = random.choice(business_list)
return random_business
except IndexError:
return None
if __name__ == "__main__":
app.debug = True
app.run()
| [
"flask.render_template",
"random.choice",
"flask.flash",
"flask.Flask",
"wtforms.validators.required",
"os.environ.get",
"wtforms.SubmitField",
"flask.url_for",
"flask.request.form.get",
"app.DineCision.yelprequest"
] | [((693, 708), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (698, 708), False, 'from flask import Flask, request, render_template, flash, redirect, url_for\n'), ((327, 363), 'os.environ.get', 'os.environ.get', (['"""DINECISION_API_KEY"""'], {}), "('DINECISION_API_KEY')\n", (341, 363), False, 'import os\n'), ((476, 504), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (490, 504), False, 'import os\n'), ((847, 868), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (858, 868), False, 'from wtforms import Form, TextField, TextAreaField, validators, StringField, SubmitField\n'), ((1273, 1302), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (1288, 1302), False, 'from flask import Flask, request, render_template, flash, redirect, url_for\n'), ((1349, 1378), 'flask.render_template', 'render_template', (['"""error.html"""'], {}), "('error.html')\n", (1364, 1378), False, 'from flask import Flask, request, render_template, flash, redirect, url_for\n'), ((2271, 2326), 'app.DineCision.yelprequest', 'yelprequest', (['API_HOST', 'SEARCH_PATH', 'API_KEY', 'url_params'], {}), '(API_HOST, SEARCH_PATH, API_KEY, url_params)\n', (2282, 2326), False, 'from app.DineCision import yelprequest\n'), ((1514, 1648), 'flask.flash', 'flash', (['"""Sadly there is no good restaurant to recommend in this location due to limited data, please choose another location"""'], {}), "(\n 'Sadly there is no good restaurant to recommend in this location due to limited data, please choose another location'\n )\n", (1519, 1648), False, 'from flask import Flask, request, render_template, flash, redirect, url_for\n'), ((1706, 2010), 'flask.render_template', 'render_template', (['"""confirm.html"""'], {'random_business': "random_business['name']", 'where_is_it_0': "random_business['location']['display_address'][0]", 'where_is_it_1': "random_business['location']['display_address'][1]", 'number_review': "random_business['review_count']", 'pic_url': "random_business['image_url']"}), "('confirm.html', random_business=random_business['name'],\n where_is_it_0=random_business['location']['display_address'][0],\n where_is_it_1=random_business['location']['display_address'][1],\n number_review=random_business['review_count'], pic_url=random_business[\n 'image_url'])\n", (1721, 2010), False, 'from flask import Flask, request, render_template, flash, redirect, url_for\n'), ((2403, 2431), 'random.choice', 'random.choice', (['business_list'], {}), '(business_list)\n', (2416, 2431), False, 'import random\n'), ((1183, 1215), 'flask.flash', 'flash', (['"""Please enter a location"""'], {}), "('Please enter a location')\n", (1188, 1215), False, 'from flask import Flask, request, render_template, flash, redirect, url_for\n'), ((1663, 1679), 'flask.url_for', 'url_for', (['"""error"""'], {}), "('error')\n", (1670, 1679), False, 'from flask import Flask, request, render_template, flash, redirect, url_for\n'), ((810, 831), 'wtforms.validators.required', 'validators.required', ([], {}), '()\n', (829, 831), False, 'from wtforms import Form, TextField, TextAreaField, validators, StringField, SubmitField\n'), ((1244, 1260), 'flask.url_for', 'url_for', (['"""error"""'], {}), "('error')\n", (1251, 1260), False, 'from flask import Flask, request, render_template, flash, redirect, url_for\n'), ((1126, 1154), 'flask.request.form.get', 'request.form.get', (['"""location"""'], {}), "('location')\n", (1142, 1154), False, 'from flask import Flask, request, render_template, flash, redirect, url_for\n')] |
import unittest
import numpy as np
import torch
from torch import optim
from spn.structure.Base import Product, Sum
from spn.structure.Base import assign_ids, rebuild_scopes_bottom_up
from spn.structure.leaves.parametric.Parametric import Gaussian, Categorical
from spn.gpu.TensorFlow import spn_to_tf_graph, optimize_tf_graph
from spn.gpu.TensorFlow import eval_tf
from spn.algorithms.Inference import log_likelihood
from torch import nn
from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch
DELTA = 1e-10
class IdentityLeaf(nn.Module):
"""Identity leaf node (in log space) for testing purposes."""
def __init__(self, scope):
super(IdentityLeaf, self).__init__()
self.scope = scope
def forward(self, x):
return torch.log(x[:, self.scope])
class TestPytorchNodes(unittest.TestCase):
"""Test cases for different node types."""
def test_sum_node(self):
"""Test SumNode implementation."""
# Init product node
id0 = IdentityLeaf(scope=0)
id1 = IdentityLeaf(scope=1)
id2 = IdentityLeaf(scope=2)
children = [id0, id1, id2]
weights = np.array([0.1, 0.4, 0.5])
sumnode = SumNode(children=children, weights=weights)
# Define input: Two samples with three features
sample1 = np.array([1, 2, 3])
sample2 = np.array([10, 20, 30])
x = torch.Tensor([sample1, sample2])
# Get sum node result
result = sumnode(x)
# Expected results
expected_result = np.log([np.sum(weights * sample1), np.sum(weights * sample2)]).tolist()
# Assertions
self.assertEqual(len(result.tolist()), 2)
self.assertTrue(np.isclose(result.tolist(), expected_result, atol=DELTA).all())
def test_product_node(self):
"""Test product node implementation"""
# Init product node
id0 = IdentityLeaf(scope=0)
id1 = IdentityLeaf(scope=1)
id2 = IdentityLeaf(scope=2)
prod = ProductNode(children=[id0, id1, id2])
# Define input: Two samples with three features
sample1 = np.array([1, 2, 3])
sample2 = np.array([10, 20, 30])
x = torch.Tensor([sample1, sample2])
# Get product node result
result = prod(x)
# Product in logspace is sum
expected_result = [np.sum(np.log(sample1)), np.sum(np.log(sample2))]
# Assertions
self.assertEqual(len(result.tolist()), 2)
self.assertTrue(np.isclose(result.tolist(), expected_result, atol=DELTA).all())
def test_gaussian_node(self):
"""Test the GaussianNode implementation"""
means = [0.0, 0.5, 1.0]
stds = [1.0, 2.0, 3.0]
gauss0 = GaussianNode(mean=means[0], std=stds[0], scope=0)
gauss1 = GaussianNode(mean=means[1], std=stds[1], scope=1)
gauss2 = GaussianNode(mean=means[2], std=stds[2], scope=2)
sample1 = np.array([1, 2, 3])
sample2 = np.array([10, 20, 30])
x = torch.Tensor([sample1, sample2])
# Get results
res_gauss0 = gauss0(x)
res_gauss1 = gauss1(x)
res_gauss2 = gauss2(x)
# Expect results from normal distributions
normal0 = torch.distributions.Normal(loc=means[0], scale=stds[0])
normal1 = torch.distributions.Normal(loc=means[1], scale=stds[1])
normal2 = torch.distributions.Normal(loc=means[2], scale=stds[2])
exp_gauss0 = normal0.log_prob(torch.Tensor([1, 10]))
exp_gauss1 = normal1.log_prob(torch.Tensor([2, 20]))
exp_gauss2 = normal2.log_prob(torch.Tensor([3, 30]))
# Assertions
self.assertEqual(len(res_gauss0.tolist()), 2)
self.assertEqual(len(res_gauss1.tolist()), 2)
self.assertEqual(len(res_gauss2.tolist()), 2)
# Assert that results are numerically equal
self.assertTrue(np.isclose(res_gauss0.tolist(), exp_gauss0, atol=DELTA).all())
self.assertTrue(np.isclose(res_gauss1.tolist(), exp_gauss1, atol=DELTA).all())
self.assertTrue(np.isclose(res_gauss2.tolist(), exp_gauss2, atol=DELTA).all())
def test_equal_to_tf(self):
# SPFLow implementation
g00 = Gaussian(mean=0.0, stdev=1.0, scope=0)
g10 = Gaussian(mean=1.0, stdev=2.0, scope=1)
g01 = Gaussian(mean=3.0, stdev=2.0, scope=0)
g11 = Gaussian(mean=5.0, stdev=1.0, scope=1)
p0 = Product(children=[g00, g10])
p1 = Product(children=[g01, g11])
s = Sum(weights=[0.2, 0.8], children=[p0, p1])
assign_ids(s)
rebuild_scopes_bottom_up(s)
# Test for 100 random samples
data = np.random.randn(100, 2)
# LL from SPN
ll = log_likelihood(s, data)
# PyTorch implementation
g00 = GaussianNode(mean=0.0, std=1.0, scope=0)
g10 = GaussianNode(mean=1.0, std=2.0, scope=1)
g01 = GaussianNode(mean=3.0, std=2.0, scope=0)
g11 = GaussianNode(mean=5.0, std=1.0, scope=1)
p0 = ProductNode(children=[g00, g10])
p1 = ProductNode(children=[g01, g11])
rootnode = SumNode(weights=[0.2, 0.8], children=[p0, p1])
datatensor = torch.Tensor(data)
# LL from pytorch
ll_torch = rootnode(datatensor)
# Assert equality
self.assertTrue(np.isclose(np.array(ll).squeeze(), ll_torch.detach().numpy(), atol=DELTA).all())
def test_spn_to_torch(self):
# SPFLow implementation
n0 = Gaussian(mean=0.0, stdev=1.0, scope=0)
n1 = Categorical(p=[0.1, 0.3, 0.6])
n2 = Sum(weights=[0.1, 0.2, 0.3, 0.4], children=[n0, n1])
n3 = Product(children=[n0, n1])
torch_n0 = GaussianNode.from_spn(n0)
torch_n1 = CategoricalNode.from_spn(n1)
torch_n2 = SumNode.from_spn(n2)
torch_n3 = ProductNode.from_spn(n3)
self.assertEqual(torch_n0.mean, n0.mean)
self.assertEqual(torch_n0.std, n0.stdev)
self.assertTrue(np.isclose(torch_n1.p.detach().numpy(), n1.p, atol=DELTA).all())
self.assertTrue(np.isclose(torch_n2.weights.detach().numpy(), n2.weights, atol=DELTA).all())
def test_torch_vs_tf_time(self):
# Create sample data
from sklearn.datasets.samples_generator import make_blobs
import tensorflow as tf
from time import time
X, y = make_blobs(n_samples=10, centers=3, n_features=2, random_state=0)
X = X.astype(np.float32)
# SPFLow implementation
g00 = Gaussian(mean=0.0, stdev=1.0, scope=0)
g10 = Gaussian(mean=1.0, stdev=2.0, scope=1)
g01 = Gaussian(mean=3.0, stdev=2.0, scope=0)
g11 = Gaussian(mean=5.0, stdev=1.0, scope=1)
p0 = Product(children=[g00, g10])
p1 = Product(children=[g01, g11])
s = Sum(weights=[0.2, 0.8], children=[p0, p1])
assign_ids(s)
rebuild_scopes_bottom_up(s)
# Convert
tf_spn, data_placeholder, variable_dict = spn_to_tf_graph(s, data=X)
torch_spn = SumNode.from_spn(s)
# Optimizer
lr = 0.001
tf_optim = tf.train.AdamOptimizer(lr)
torch_optim = optim.Adam(torch_spn.parameters(), lr)
t0 = time()
epochs = 10
optimize_tf_graph(tf_spn, variable_dict, data_placeholder, X, epochs=epochs, optimizer=tf_optim)
t1 = time()
optimize_torch(torch_spn, X, epochs=epochs, optimizer=torch_optim)
t2 = time()
print("Tensorflow took: ", t1 - t0)
print("PyTorch took: ", t2 - t1)
if __name__ == "__main__":
unittest.main()
| [
"pytorch.SumNode.from_spn",
"spn.structure.Base.Sum",
"numpy.log",
"spn.algorithms.Inference.log_likelihood",
"spn.structure.leaves.parametric.Parametric.Gaussian",
"numpy.array",
"unittest.main",
"pytorch.GaussianNode.from_spn",
"spn.structure.leaves.parametric.Parametric.Categorical",
"tensorflo... | [((7532, 7547), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7545, 7547), False, 'import unittest\n'), ((790, 817), 'torch.log', 'torch.log', (['x[:, self.scope]'], {}), '(x[:, self.scope])\n', (799, 817), False, 'import torch\n'), ((1172, 1197), 'numpy.array', 'np.array', (['[0.1, 0.4, 0.5]'], {}), '([0.1, 0.4, 0.5])\n', (1180, 1197), True, 'import numpy as np\n'), ((1216, 1259), 'pytorch.SumNode', 'SumNode', ([], {'children': 'children', 'weights': 'weights'}), '(children=children, weights=weights)\n', (1223, 1259), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((1335, 1354), 'numpy.array', 'np.array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (1343, 1354), True, 'import numpy as np\n'), ((1373, 1395), 'numpy.array', 'np.array', (['[10, 20, 30]'], {}), '([10, 20, 30])\n', (1381, 1395), True, 'import numpy as np\n'), ((1408, 1440), 'torch.Tensor', 'torch.Tensor', (['[sample1, sample2]'], {}), '([sample1, sample2])\n', (1420, 1440), False, 'import torch\n'), ((2018, 2055), 'pytorch.ProductNode', 'ProductNode', ([], {'children': '[id0, id1, id2]'}), '(children=[id0, id1, id2])\n', (2029, 2055), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((2131, 2150), 'numpy.array', 'np.array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (2139, 2150), True, 'import numpy as np\n'), ((2169, 2191), 'numpy.array', 'np.array', (['[10, 20, 30]'], {}), '([10, 20, 30])\n', (2177, 2191), True, 'import numpy as np\n'), ((2204, 2236), 'torch.Tensor', 'torch.Tensor', (['[sample1, sample2]'], {}), '([sample1, sample2])\n', (2216, 2236), False, 'import torch\n'), ((2738, 2787), 'pytorch.GaussianNode', 'GaussianNode', ([], {'mean': 'means[0]', 'std': 'stds[0]', 'scope': '(0)'}), '(mean=means[0], std=stds[0], scope=0)\n', (2750, 2787), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((2805, 2854), 'pytorch.GaussianNode', 'GaussianNode', ([], {'mean': 'means[1]', 'std': 'stds[1]', 'scope': '(1)'}), '(mean=means[1], std=stds[1], scope=1)\n', (2817, 2854), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((2872, 2921), 'pytorch.GaussianNode', 'GaussianNode', ([], {'mean': 'means[2]', 'std': 'stds[2]', 'scope': '(2)'}), '(mean=means[2], std=stds[2], scope=2)\n', (2884, 2921), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((2940, 2959), 'numpy.array', 'np.array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (2948, 2959), True, 'import numpy as np\n'), ((2978, 3000), 'numpy.array', 'np.array', (['[10, 20, 30]'], {}), '([10, 20, 30])\n', (2986, 3000), True, 'import numpy as np\n'), ((3013, 3045), 'torch.Tensor', 'torch.Tensor', (['[sample1, sample2]'], {}), '([sample1, sample2])\n', (3025, 3045), False, 'import torch\n'), ((3232, 3287), 'torch.distributions.Normal', 'torch.distributions.Normal', ([], {'loc': 'means[0]', 'scale': 'stds[0]'}), '(loc=means[0], scale=stds[0])\n', (3258, 3287), False, 'import torch\n'), ((3306, 3361), 'torch.distributions.Normal', 'torch.distributions.Normal', ([], {'loc': 'means[1]', 'scale': 'stds[1]'}), '(loc=means[1], scale=stds[1])\n', (3332, 3361), False, 'import torch\n'), ((3380, 3435), 'torch.distributions.Normal', 'torch.distributions.Normal', ([], {'loc': 'means[2]', 'scale': 'stds[2]'}), '(loc=means[2], scale=stds[2])\n', (3406, 3435), False, 'import torch\n'), ((4197, 4235), 'spn.structure.leaves.parametric.Parametric.Gaussian', 'Gaussian', ([], {'mean': '(0.0)', 'stdev': '(1.0)', 'scope': '(0)'}), '(mean=0.0, stdev=1.0, scope=0)\n', (4205, 4235), False, 'from spn.structure.leaves.parametric.Parametric import Gaussian, Categorical\n'), ((4250, 4288), 'spn.structure.leaves.parametric.Parametric.Gaussian', 'Gaussian', ([], {'mean': '(1.0)', 'stdev': '(2.0)', 'scope': '(1)'}), '(mean=1.0, stdev=2.0, scope=1)\n', (4258, 4288), False, 'from spn.structure.leaves.parametric.Parametric import Gaussian, Categorical\n'), ((4303, 4341), 'spn.structure.leaves.parametric.Parametric.Gaussian', 'Gaussian', ([], {'mean': '(3.0)', 'stdev': '(2.0)', 'scope': '(0)'}), '(mean=3.0, stdev=2.0, scope=0)\n', (4311, 4341), False, 'from spn.structure.leaves.parametric.Parametric import Gaussian, Categorical\n'), ((4356, 4394), 'spn.structure.leaves.parametric.Parametric.Gaussian', 'Gaussian', ([], {'mean': '(5.0)', 'stdev': '(1.0)', 'scope': '(1)'}), '(mean=5.0, stdev=1.0, scope=1)\n', (4364, 4394), False, 'from spn.structure.leaves.parametric.Parametric import Gaussian, Categorical\n'), ((4408, 4436), 'spn.structure.Base.Product', 'Product', ([], {'children': '[g00, g10]'}), '(children=[g00, g10])\n', (4415, 4436), False, 'from spn.structure.Base import Product, Sum\n'), ((4450, 4478), 'spn.structure.Base.Product', 'Product', ([], {'children': '[g01, g11]'}), '(children=[g01, g11])\n', (4457, 4478), False, 'from spn.structure.Base import Product, Sum\n'), ((4491, 4533), 'spn.structure.Base.Sum', 'Sum', ([], {'weights': '[0.2, 0.8]', 'children': '[p0, p1]'}), '(weights=[0.2, 0.8], children=[p0, p1])\n', (4494, 4533), False, 'from spn.structure.Base import Product, Sum\n'), ((4543, 4556), 'spn.structure.Base.assign_ids', 'assign_ids', (['s'], {}), '(s)\n', (4553, 4556), False, 'from spn.structure.Base import assign_ids, rebuild_scopes_bottom_up\n'), ((4565, 4592), 'spn.structure.Base.rebuild_scopes_bottom_up', 'rebuild_scopes_bottom_up', (['s'], {}), '(s)\n', (4589, 4592), False, 'from spn.structure.Base import assign_ids, rebuild_scopes_bottom_up\n'), ((4647, 4670), 'numpy.random.randn', 'np.random.randn', (['(100)', '(2)'], {}), '(100, 2)\n', (4662, 4670), True, 'import numpy as np\n'), ((4707, 4730), 'spn.algorithms.Inference.log_likelihood', 'log_likelihood', (['s', 'data'], {}), '(s, data)\n', (4721, 4730), False, 'from spn.algorithms.Inference import log_likelihood\n'), ((4779, 4819), 'pytorch.GaussianNode', 'GaussianNode', ([], {'mean': '(0.0)', 'std': '(1.0)', 'scope': '(0)'}), '(mean=0.0, std=1.0, scope=0)\n', (4791, 4819), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((4834, 4874), 'pytorch.GaussianNode', 'GaussianNode', ([], {'mean': '(1.0)', 'std': '(2.0)', 'scope': '(1)'}), '(mean=1.0, std=2.0, scope=1)\n', (4846, 4874), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((4889, 4929), 'pytorch.GaussianNode', 'GaussianNode', ([], {'mean': '(3.0)', 'std': '(2.0)', 'scope': '(0)'}), '(mean=3.0, std=2.0, scope=0)\n', (4901, 4929), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((4944, 4984), 'pytorch.GaussianNode', 'GaussianNode', ([], {'mean': '(5.0)', 'std': '(1.0)', 'scope': '(1)'}), '(mean=5.0, std=1.0, scope=1)\n', (4956, 4984), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((4998, 5030), 'pytorch.ProductNode', 'ProductNode', ([], {'children': '[g00, g10]'}), '(children=[g00, g10])\n', (5009, 5030), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((5044, 5076), 'pytorch.ProductNode', 'ProductNode', ([], {'children': '[g01, g11]'}), '(children=[g01, g11])\n', (5055, 5076), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((5096, 5142), 'pytorch.SumNode', 'SumNode', ([], {'weights': '[0.2, 0.8]', 'children': '[p0, p1]'}), '(weights=[0.2, 0.8], children=[p0, p1])\n', (5103, 5142), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((5165, 5183), 'torch.Tensor', 'torch.Tensor', (['data'], {}), '(data)\n', (5177, 5183), False, 'import torch\n'), ((5461, 5499), 'spn.structure.leaves.parametric.Parametric.Gaussian', 'Gaussian', ([], {'mean': '(0.0)', 'stdev': '(1.0)', 'scope': '(0)'}), '(mean=0.0, stdev=1.0, scope=0)\n', (5469, 5499), False, 'from spn.structure.leaves.parametric.Parametric import Gaussian, Categorical\n'), ((5513, 5543), 'spn.structure.leaves.parametric.Parametric.Categorical', 'Categorical', ([], {'p': '[0.1, 0.3, 0.6]'}), '(p=[0.1, 0.3, 0.6])\n', (5524, 5543), False, 'from spn.structure.leaves.parametric.Parametric import Gaussian, Categorical\n'), ((5557, 5609), 'spn.structure.Base.Sum', 'Sum', ([], {'weights': '[0.1, 0.2, 0.3, 0.4]', 'children': '[n0, n1]'}), '(weights=[0.1, 0.2, 0.3, 0.4], children=[n0, n1])\n', (5560, 5609), False, 'from spn.structure.Base import Product, Sum\n'), ((5623, 5649), 'spn.structure.Base.Product', 'Product', ([], {'children': '[n0, n1]'}), '(children=[n0, n1])\n', (5630, 5649), False, 'from spn.structure.Base import Product, Sum\n'), ((5670, 5695), 'pytorch.GaussianNode.from_spn', 'GaussianNode.from_spn', (['n0'], {}), '(n0)\n', (5691, 5695), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((5715, 5743), 'pytorch.CategoricalNode.from_spn', 'CategoricalNode.from_spn', (['n1'], {}), '(n1)\n', (5739, 5743), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((5763, 5783), 'pytorch.SumNode.from_spn', 'SumNode.from_spn', (['n2'], {}), '(n2)\n', (5779, 5783), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((5803, 5827), 'pytorch.ProductNode.from_spn', 'ProductNode.from_spn', (['n3'], {}), '(n3)\n', (5823, 5827), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((6328, 6393), 'sklearn.datasets.samples_generator.make_blobs', 'make_blobs', ([], {'n_samples': '(10)', 'centers': '(3)', 'n_features': '(2)', 'random_state': '(0)'}), '(n_samples=10, centers=3, n_features=2, random_state=0)\n', (6338, 6393), False, 'from sklearn.datasets.samples_generator import make_blobs\n'), ((6474, 6512), 'spn.structure.leaves.parametric.Parametric.Gaussian', 'Gaussian', ([], {'mean': '(0.0)', 'stdev': '(1.0)', 'scope': '(0)'}), '(mean=0.0, stdev=1.0, scope=0)\n', (6482, 6512), False, 'from spn.structure.leaves.parametric.Parametric import Gaussian, Categorical\n'), ((6527, 6565), 'spn.structure.leaves.parametric.Parametric.Gaussian', 'Gaussian', ([], {'mean': '(1.0)', 'stdev': '(2.0)', 'scope': '(1)'}), '(mean=1.0, stdev=2.0, scope=1)\n', (6535, 6565), False, 'from spn.structure.leaves.parametric.Parametric import Gaussian, Categorical\n'), ((6580, 6618), 'spn.structure.leaves.parametric.Parametric.Gaussian', 'Gaussian', ([], {'mean': '(3.0)', 'stdev': '(2.0)', 'scope': '(0)'}), '(mean=3.0, stdev=2.0, scope=0)\n', (6588, 6618), False, 'from spn.structure.leaves.parametric.Parametric import Gaussian, Categorical\n'), ((6633, 6671), 'spn.structure.leaves.parametric.Parametric.Gaussian', 'Gaussian', ([], {'mean': '(5.0)', 'stdev': '(1.0)', 'scope': '(1)'}), '(mean=5.0, stdev=1.0, scope=1)\n', (6641, 6671), False, 'from spn.structure.leaves.parametric.Parametric import Gaussian, Categorical\n'), ((6685, 6713), 'spn.structure.Base.Product', 'Product', ([], {'children': '[g00, g10]'}), '(children=[g00, g10])\n', (6692, 6713), False, 'from spn.structure.Base import Product, Sum\n'), ((6727, 6755), 'spn.structure.Base.Product', 'Product', ([], {'children': '[g01, g11]'}), '(children=[g01, g11])\n', (6734, 6755), False, 'from spn.structure.Base import Product, Sum\n'), ((6768, 6810), 'spn.structure.Base.Sum', 'Sum', ([], {'weights': '[0.2, 0.8]', 'children': '[p0, p1]'}), '(weights=[0.2, 0.8], children=[p0, p1])\n', (6771, 6810), False, 'from spn.structure.Base import Product, Sum\n'), ((6819, 6832), 'spn.structure.Base.assign_ids', 'assign_ids', (['s'], {}), '(s)\n', (6829, 6832), False, 'from spn.structure.Base import assign_ids, rebuild_scopes_bottom_up\n'), ((6841, 6868), 'spn.structure.Base.rebuild_scopes_bottom_up', 'rebuild_scopes_bottom_up', (['s'], {}), '(s)\n', (6865, 6868), False, 'from spn.structure.Base import assign_ids, rebuild_scopes_bottom_up\n'), ((6938, 6964), 'spn.gpu.TensorFlow.spn_to_tf_graph', 'spn_to_tf_graph', (['s'], {'data': 'X'}), '(s, data=X)\n', (6953, 6964), False, 'from spn.gpu.TensorFlow import spn_to_tf_graph, optimize_tf_graph\n'), ((6985, 7004), 'pytorch.SumNode.from_spn', 'SumNode.from_spn', (['s'], {}), '(s)\n', (7001, 7004), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((7064, 7090), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['lr'], {}), '(lr)\n', (7086, 7090), True, 'import tensorflow as tf\n'), ((7166, 7172), 'time.time', 'time', ([], {}), '()\n', (7170, 7172), False, 'from time import time\n'), ((7201, 7301), 'spn.gpu.TensorFlow.optimize_tf_graph', 'optimize_tf_graph', (['tf_spn', 'variable_dict', 'data_placeholder', 'X'], {'epochs': 'epochs', 'optimizer': 'tf_optim'}), '(tf_spn, variable_dict, data_placeholder, X, epochs=epochs,\n optimizer=tf_optim)\n', (7218, 7301), False, 'from spn.gpu.TensorFlow import spn_to_tf_graph, optimize_tf_graph\n'), ((7311, 7317), 'time.time', 'time', ([], {}), '()\n', (7315, 7317), False, 'from time import time\n'), ((7326, 7392), 'pytorch.optimize_torch', 'optimize_torch', (['torch_spn', 'X'], {'epochs': 'epochs', 'optimizer': 'torch_optim'}), '(torch_spn, X, epochs=epochs, optimizer=torch_optim)\n', (7340, 7392), False, 'from pytorch import CategoricalNode, GaussianNode, ProductNode, SumNode, optimize_torch\n'), ((7406, 7412), 'time.time', 'time', ([], {}), '()\n', (7410, 7412), False, 'from time import time\n'), ((3475, 3496), 'torch.Tensor', 'torch.Tensor', (['[1, 10]'], {}), '([1, 10])\n', (3487, 3496), False, 'import torch\n'), ((3536, 3557), 'torch.Tensor', 'torch.Tensor', (['[2, 20]'], {}), '([2, 20])\n', (3548, 3557), False, 'import torch\n'), ((3597, 3618), 'torch.Tensor', 'torch.Tensor', (['[3, 30]'], {}), '([3, 30])\n', (3609, 3618), False, 'import torch\n'), ((2369, 2384), 'numpy.log', 'np.log', (['sample1'], {}), '(sample1)\n', (2375, 2384), True, 'import numpy as np\n'), ((2394, 2409), 'numpy.log', 'np.log', (['sample2'], {}), '(sample2)\n', (2400, 2409), True, 'import numpy as np\n'), ((1562, 1587), 'numpy.sum', 'np.sum', (['(weights * sample1)'], {}), '(weights * sample1)\n', (1568, 1587), True, 'import numpy as np\n'), ((1589, 1614), 'numpy.sum', 'np.sum', (['(weights * sample2)'], {}), '(weights * sample2)\n', (1595, 1614), True, 'import numpy as np\n'), ((5312, 5324), 'numpy.array', 'np.array', (['ll'], {}), '(ll)\n', (5320, 5324), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/TerminologyCapabilities
Release: R4
Version: 4.0.1
Build ID: 9346c8cc45
Last updated: 2019-11-01T09:29:23.356+11:00
"""
import io
import json
import os
import unittest
import pytest
from .. import terminologycapabilities
from ..fhirdate import FHIRDate
from .fixtures import force_bytes
@pytest.mark.usefixtures("base_settings")
class TerminologyCapabilitiesTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get("FHIR_UNITTEST_DATADIR") or ""
with io.open(os.path.join(datadir, filename), "r", encoding="utf-8") as handle:
js = json.load(handle)
self.assertEqual("TerminologyCapabilities", js["resourceType"])
return terminologycapabilities.TerminologyCapabilities(js)
def testTerminologyCapabilities1(self):
inst = self.instantiate_from("terminologycapabilities-example.json")
self.assertIsNotNone(
inst, "Must have instantiated a TerminologyCapabilities instance"
)
self.implTerminologyCapabilities1(inst)
js = inst.as_json()
self.assertEqual("TerminologyCapabilities", js["resourceType"])
inst2 = terminologycapabilities.TerminologyCapabilities(js)
self.implTerminologyCapabilities1(inst2)
def implTerminologyCapabilities1(self, inst):
self.assertEqual(force_bytes(inst.codeSearch), force_bytes("explicit"))
self.assertEqual(
force_bytes(inst.contact[0].name), force_bytes("System Administrator")
)
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].system), force_bytes("email")
)
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].value), force_bytes("<EMAIL>")
)
self.assertEqual(inst.date.date, FHIRDate("2012-01-04").date)
self.assertEqual(inst.date.as_json(), "2012-01-04")
self.assertEqual(
force_bytes(inst.description),
force_bytes(
"This is the FHIR capability statement for the main EHR at ACME for the private interface - it does not describe the public interface"
),
)
self.assertTrue(inst.experimental)
self.assertEqual(force_bytes(inst.id), force_bytes("example"))
self.assertEqual(
force_bytes(inst.implementation.description),
force_bytes("Acme Terminology Server"),
)
self.assertEqual(
force_bytes(inst.implementation.url), force_bytes("http://example.org/tx")
)
self.assertEqual(force_bytes(inst.kind), force_bytes("instance"))
self.assertEqual(force_bytes(inst.name), force_bytes("ACME-EHR"))
self.assertEqual(force_bytes(inst.publisher), force_bytes("ACME Corporation"))
self.assertEqual(force_bytes(inst.software.name), force_bytes("TxServer"))
self.assertEqual(force_bytes(inst.software.version), force_bytes("0.1.2"))
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(
force_bytes(inst.title), force_bytes("ACME EHR capability statement")
)
self.assertEqual(
force_bytes(inst.url),
force_bytes("urn:uuid:68D043B5-9ECF-4559-A57A-396E0D452311"),
)
self.assertEqual(force_bytes(inst.version), force_bytes("20130510"))
| [
"json.load",
"os.environ.get",
"pytest.mark.usefixtures",
"os.path.join"
] | [((370, 410), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""base_settings"""'], {}), "('base_settings')\n", (393, 410), False, 'import pytest\n'), ((526, 565), 'os.environ.get', 'os.environ.get', (['"""FHIR_UNITTEST_DATADIR"""'], {}), "('FHIR_UNITTEST_DATADIR')\n", (540, 565), False, 'import os\n'), ((677, 694), 'json.load', 'json.load', (['handle'], {}), '(handle)\n', (686, 694), False, 'import json\n'), ((593, 624), 'os.path.join', 'os.path.join', (['datadir', 'filename'], {}), '(datadir, filename)\n', (605, 624), False, 'import os\n')] |
#! /usr/bin/env python
import pandas as pd
import click
'''
gene expression matrix, with gene id in first column,
gene expression level of each sample in othre columns.
'''
@click.group(chain=True, invoke_without_command=True)
@click.argument('exp_table', type=click.STRING, required=True)
@click.pass_context
def main(ctx, exp_table):
ctx.obj['exp_table'] = exp_table
@main.command('merge_by_group')
@click.option(
'-s',
'--sample_inf',
type=click.STRING,
required=True,
help='sample vs group file, with group id in first column,\
sample id in second column, seperated with tab.')
@click.option(
'-o',
'--output',
type=click.STRING,
default='genes.group.matrix.txt',
help='table with mean expression level of each group.')
@click.pass_context
def merge_by_group(ctx, sample_inf, output):
sample_df = pd.read_table(sample_inf, header=None, index_col=1)
gene_exp_df = pd.read_table(ctx.obj['exp_table'], index_col=0)
sample_df.columns = ['Group']
merged_df = pd.merge(
sample_df, gene_exp_df.T, left_index=True, right_index=True)
merged_df_group = merged_df.groupby(['Group'])
out_df = merged_df_group.mean().T
out_df.to_csv(output, sep='\t')
if __name__ == '__main__':
main(obj={})
| [
"click.argument",
"click.group",
"click.option",
"pandas.merge",
"pandas.read_table"
] | [((187, 239), 'click.group', 'click.group', ([], {'chain': '(True)', 'invoke_without_command': '(True)'}), '(chain=True, invoke_without_command=True)\n', (198, 239), False, 'import click\n'), ((242, 303), 'click.argument', 'click.argument', (['"""exp_table"""'], {'type': 'click.STRING', 'required': '(True)'}), "('exp_table', type=click.STRING, required=True)\n", (256, 303), False, 'import click\n'), ((429, 629), 'click.option', 'click.option', (['"""-s"""', '"""--sample_inf"""'], {'type': 'click.STRING', 'required': '(True)', 'help': '"""sample vs group file, with group id in first column, sample id in second column, seperated with tab."""'}), "('-s', '--sample_inf', type=click.STRING, required=True, help=\n 'sample vs group file, with group id in first column, sample id in second column, seperated with tab.'\n )\n", (441, 629), False, 'import click\n'), ((651, 800), 'click.option', 'click.option', (['"""-o"""', '"""--output"""'], {'type': 'click.STRING', 'default': '"""genes.group.matrix.txt"""', 'help': '"""table with mean expression level of each group."""'}), "('-o', '--output', type=click.STRING, default=\n 'genes.group.matrix.txt', help=\n 'table with mean expression level of each group.')\n", (663, 800), False, 'import click\n'), ((901, 952), 'pandas.read_table', 'pd.read_table', (['sample_inf'], {'header': 'None', 'index_col': '(1)'}), '(sample_inf, header=None, index_col=1)\n', (914, 952), True, 'import pandas as pd\n'), ((972, 1020), 'pandas.read_table', 'pd.read_table', (["ctx.obj['exp_table']"], {'index_col': '(0)'}), "(ctx.obj['exp_table'], index_col=0)\n", (985, 1020), True, 'import pandas as pd\n'), ((1073, 1142), 'pandas.merge', 'pd.merge', (['sample_df', 'gene_exp_df.T'], {'left_index': '(True)', 'right_index': '(True)'}), '(sample_df, gene_exp_df.T, left_index=True, right_index=True)\n', (1081, 1142), True, 'import pandas as pd\n')] |
# Copyright (C) 2017 <NAME> <<EMAIL>>
# This file is subject to the terms and conditions defined in
# file 'LICENSE', which is part of this source code package.
'''yaml checker'''
import yaml
from omnilint.error import Error
from omnilint.checkers import Checker
class Yaml(Checker):
extensions = ['yaml', 'yml']
def __init__(self):
super(Yaml, self).__init__()
def check(self, reporter, origname, tmpname, firstline, fd):
exc = None
try:
yaml.load(fd)
except yaml.YAMLError as e:
exc = e
if exc is None:
return
reporter.report(
Error(msg=exc.context + ' ' + exc.problem,
file=origname,
line=exc.problem_mark.line,
column=exc.problem_mark.column))
def register(omnilint):
'''Registration function, called by omnilint while loading the checker with
itself as argument'''
omnilint.register(Yaml)
| [
"yaml.load",
"omnilint.error.Error"
] | [((494, 507), 'yaml.load', 'yaml.load', (['fd'], {}), '(fd)\n', (503, 507), False, 'import yaml\n'), ((644, 766), 'omnilint.error.Error', 'Error', ([], {'msg': "(exc.context + ' ' + exc.problem)", 'file': 'origname', 'line': 'exc.problem_mark.line', 'column': 'exc.problem_mark.column'}), "(msg=exc.context + ' ' + exc.problem, file=origname, line=exc.\n problem_mark.line, column=exc.problem_mark.column)\n", (649, 766), False, 'from omnilint.error import Error\n')] |
from netcad.device.l2_interfaces import InterfaceL2Access, InterfaceL2Trunk
from netcad.device import PeerInterfaceId
from netcad_demo_meraki1.vlans import vlan_native_1
from .physical import port_UTP_1G
class AccessVlan1(InterfaceL2Access):
port_profile = port_UTP_1G
vlan = vlan_native_1
desc = PeerInterfaceId()
| [
"netcad.device.PeerInterfaceId"
] | [((312, 329), 'netcad.device.PeerInterfaceId', 'PeerInterfaceId', ([], {}), '()\n', (327, 329), False, 'from netcad.device import PeerInterfaceId\n')] |
"""
Ruuvi Counter (LoRaWAN) decoder
See: https://bitbucket.org/iotpetri/hki_kuva_iot/src/master/ESP32/LORA/ESP32_RuuviTagGW_Lora_v2/
"""
import json
import pytz
from dateutil.parser import parse
from broker.providers.decoder import DecoderProvider
from broker.utils import create_dataline
from fvhexperiments.parsers.ruuvicounter import parse_ruuvicounter
def parse_ruuvicounter_data(data, port, serialised_request):
"""
:param data: JSON object
:return: dict of parsed Ruuvitag values
"""
# Create object for all tags and gateway
lora = json.loads(serialised_request['request.body'].decode("utf-8"))
ruuvicounterdata = parse_ruuvicounter(data, port)
devid = serialised_request['devid']
timestamp = parse(lora['DevEUI_uplink']['Time']).astimezone(pytz.UTC)
dataline = create_dataline(timestamp, ruuvicounterdata['gateway'])
parsed_data = {}
parsed_data['gateway'] = {'devid': devid, 'datalines': [dataline]}
parsed_data['ruuvicounter'] = {'datalines': []}
for item in ruuvicounterdata['tags']:
mac = item.pop('mac')
dataline = create_dataline(timestamp, item, extra={'extratags': {'mac': mac}})
parsed_data['ruuvicounter']['datalines'].append(dataline)
return parsed_data
class RuuvicounterDecoder(DecoderProvider):
description = 'Decode Ruuvicounter payload'
def decode_payload(self, payload, port, **kwargs):
# serialised_request is needed to parse data correctly
serialised_request = kwargs.get('serialised_request')
data = parse_ruuvicounter_data(payload, port, serialised_request)
return data
| [
"dateutil.parser.parse",
"fvhexperiments.parsers.ruuvicounter.parse_ruuvicounter",
"broker.utils.create_dataline"
] | [((652, 682), 'fvhexperiments.parsers.ruuvicounter.parse_ruuvicounter', 'parse_ruuvicounter', (['data', 'port'], {}), '(data, port)\n', (670, 682), False, 'from fvhexperiments.parsers.ruuvicounter import parse_ruuvicounter\n'), ((812, 867), 'broker.utils.create_dataline', 'create_dataline', (['timestamp', "ruuvicounterdata['gateway']"], {}), "(timestamp, ruuvicounterdata['gateway'])\n", (827, 867), False, 'from broker.utils import create_dataline\n'), ((1103, 1170), 'broker.utils.create_dataline', 'create_dataline', (['timestamp', 'item'], {'extra': "{'extratags': {'mac': mac}}"}), "(timestamp, item, extra={'extratags': {'mac': mac}})\n", (1118, 1170), False, 'from broker.utils import create_dataline\n'), ((739, 775), 'dateutil.parser.parse', 'parse', (["lora['DevEUI_uplink']['Time']"], {}), "(lora['DevEUI_uplink']['Time'])\n", (744, 775), False, 'from dateutil.parser import parse\n')] |
from bot import __version__
from setuptools import setup, find_packages
REQUIREMENTS = [line.strip() for line in open("requirements.txt").readlines()]
setup(name='guldai-telegram-bot',
version=__version__,
description='Telegram interface for the guldai bot',
author='isysd',
author_email='<EMAIL>',
license='MIT',
url='https://guld.io/',
py_modules = ['bot'],
# packages=find_packages(exclude=['tests', 'tests.*']),
# zip_safe=False,
# include_package_data=True,
install_requires=REQUIREMENTS,
classifiers=[
'Topic :: Communications :: ChatBot',
'Development Status :: 4 - Beta',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Internet'
])
| [
"setuptools.setup"
] | [((153, 698), 'setuptools.setup', 'setup', ([], {'name': '"""guldai-telegram-bot"""', 'version': '__version__', 'description': '"""Telegram interface for the guldai bot"""', 'author': '"""isysd"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'url': '"""https://guld.io/"""', 'py_modules': "['bot']", 'install_requires': 'REQUIREMENTS', 'classifiers': "['Topic :: Communications :: ChatBot', 'Development Status :: 4 - Beta',\n 'Intended Audience :: End Users/Desktop',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.4', 'Topic :: Internet']"}), "(name='guldai-telegram-bot', version=__version__, description=\n 'Telegram interface for the guldai bot', author='isysd', author_email=\n '<EMAIL>', license='MIT', url='https://guld.io/', py_modules=['bot'],\n install_requires=REQUIREMENTS, classifiers=[\n 'Topic :: Communications :: ChatBot', 'Development Status :: 4 - Beta',\n 'Intended Audience :: End Users/Desktop',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.4', 'Topic :: Internet'])\n", (158, 698), False, 'from setuptools import setup, find_packages\n')] |
import dash_mantine_components as dmc
component = dmc.Spoiler(
showLabel="Show more",
hideLabel="Hide",
maxHeight=50,
children=[
dmc.Text(
"""We Butter the Bread with Butter was founded in 2007 by <NAME>, who was originally guitarist
for <NAME>'s band, and <NAME>. The band was originally meant as a joke, but progressed
into being a more serious musical duo. The name for the band has no particular meaning, although its
origins were suggested from when the two original members were driving in a car operated by Marcel
Neumann and an accident almost occurred. Neumann found Schultka "so funny that he briefly lost control of
the vehicle." Many of their songs from this point were covers of German folk tales and nursery rhymes. """
)
],
)
| [
"dash_mantine_components.Text"
] | [((154, 833), 'dash_mantine_components.Text', 'dmc.Text', (['"""We Butter the Bread with Butter was founded in 2007 by <NAME>, who was originally guitarist \n for <NAME>\'s band, and <NAME>. The band was originally meant as a joke, but progressed \n into being a more serious musical duo. The name for the band has no particular meaning, although its \n origins were suggested from when the two original members were driving in a car operated by Marcel \n Neumann and an accident almost occurred. Neumann found Schultka "so funny that he briefly lost control of \n the vehicle." Many of their songs from this point were covers of German folk tales and nursery rhymes. """'], {}), '(\n """We Butter the Bread with Butter was founded in 2007 by <NAME>, who was originally guitarist \n for <NAME>\'s band, and <NAME>. The band was originally meant as a joke, but progressed \n into being a more serious musical duo. The name for the band has no particular meaning, although its \n origins were suggested from when the two original members were driving in a car operated by Marcel \n Neumann and an accident almost occurred. Neumann found Schultka "so funny that he briefly lost control of \n the vehicle." Many of their songs from this point were covers of German folk tales and nursery rhymes. """\n )\n', (162, 833), True, 'import dash_mantine_components as dmc\n')] |
import json
from mock import patch
import jenkins
from tests.base import JenkinsTestBase
class JenkinsCredentialTestBase(JenkinsTestBase):
config_xml = """<com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl>
<scope>GLOBAL</scope>
<id>Test Credential</id>
<username>Test-User</username>
<password><PASSWORD></password>
</com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl>"""
class JenkinsGetTagTextTest(JenkinsCredentialTestBase):
def test_simple(self):
name_to_return = self.j._get_tag_text('id', self.config_xml)
self.assertEqual('Test Credential', name_to_return)
def test_failed(self):
with self.assertRaises(jenkins.JenkinsException) as context_manager:
self.j._get_tag_text('id', '<xml></xml>')
self.assertEqual(str(context_manager.exception),
'tag[id] is invalidated')
with self.assertRaises(jenkins.JenkinsException) as context_manager:
self.j._get_tag_text('id', '<xml><id></id></xml>')
self.assertEqual(str(context_manager.exception),
'tag[id] is invalidated')
with self.assertRaises(jenkins.JenkinsException) as context_manager:
self.j._get_tag_text('id', '<xml><id> </id></xml>')
self.assertEqual(str(context_manager.exception),
'tag[id] is invalidated')
class JenkinsIsFolderTest(JenkinsCredentialTestBase):
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_is_folder(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
]
self.assertTrue(self.j.is_folder('Test Folder'))
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_is_not_folder(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'org.jenkinsci.plugins.workflow.job.WorkflowJob'}),
]
self.assertFalse(self.j.is_folder('Test Job'))
class JenkinsAssertFolderTest(JenkinsCredentialTestBase):
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_is_folder(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
]
self.j.assert_folder('Test Folder')
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_is_not_folder(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'org.jenkinsci.plugins.workflow.job.WorkflowJob'}),
]
with self.assertRaises(jenkins.JenkinsException) as context_manager:
self.j.assert_folder('Test Job')
self.assertEqual(str(context_manager.exception),
'job[Test Job] is not a folder')
class JenkinsAssertCredentialTest(JenkinsCredentialTestBase):
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_credential_missing(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
jenkins.NotFoundException()
]
with self.assertRaises(jenkins.JenkinsException) as context_manager:
self.j.assert_credential_exists('NonExistent', 'TestFoler')
self.assertEqual(
str(context_manager.exception),
'credential[NonExistent] does not exist'
' in the domain[_] of [TestFoler]')
self._check_requests(jenkins_mock.call_args_list)
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_credential_exists(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
json.dumps({'id': 'ExistingCredential'})
]
self.j.assert_credential_exists('ExistingCredential', 'TestFoler')
self._check_requests(jenkins_mock.call_args_list)
class JenkinsCredentialExistsTest(JenkinsCredentialTestBase):
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_credential_missing(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
jenkins.NotFoundException()
]
self.assertEqual(self.j.credential_exists('NonExistent', 'TestFolder'),
False)
self._check_requests(jenkins_mock.call_args_list)
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_credential_exists(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
json.dumps({'id': 'ExistingCredential'})
]
self.assertEqual(self.j.credential_exists('ExistingCredential',
'TestFolder'),
True)
self._check_requests(jenkins_mock.call_args_list)
class JenkinsGetCredentialInfoTest(JenkinsCredentialTestBase):
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_simple(self, jenkins_mock):
credential_info_to_return = {'id': 'ExistingCredential'}
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
json.dumps(credential_info_to_return)
]
credential_info = self.j.get_credential_info('ExistingCredential', 'TestFolder')
self.assertEqual(credential_info, credential_info_to_return)
self.assertEqual(
jenkins_mock.call_args[0][0].url,
self.make_url('job/TestFolder/credentials/store/folder/'
'domain/_/credential/ExistingCredential/api/json?depth=0'))
self._check_requests(jenkins_mock.call_args_list)
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_nonexistent(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
None,
]
with self.assertRaises(jenkins.JenkinsException) as context_manager:
self.j.get_credential_info('NonExistent', 'TestFolder')
self.assertEqual(
str(context_manager.exception),
'credential[NonExistent] does not exist '
'in the domain[_] of [TestFolder]')
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_invalid_json(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
'{invalid_json}'
]
with self.assertRaises(jenkins.JenkinsException) as context_manager:
self.j.get_credential_info('NonExistent', 'TestFolder')
self.assertEqual(
str(context_manager.exception),
'Could not parse JSON info for credential[NonExistent]'
' in the domain[_] of [TestFolder]')
class JenkinsGetCredentialConfigTest(JenkinsCredentialTestBase):
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_encodes_credential_name(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
None,
]
self.j.get_credential_config(u'Test Credential', u'Test Folder')
self.assertEqual(
jenkins_mock.call_args_list[1][0][0].url,
self.make_url('job/Test%20Folder/credentials/store/folder/domain/'
'_/credential/Test%20Credential/config.xml'))
self._check_requests(jenkins_mock.call_args_list)
class JenkinsCreateCredentialTest(JenkinsCredentialTestBase):
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_simple(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
jenkins.NotFoundException(),
None,
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
json.dumps({'id': 'Test Credential'}),
]
self.j.create_credential('Test Folder', self.config_xml)
self.assertEqual(
jenkins_mock.call_args_list[1][0][0].url,
self.make_url('job/Test%20Folder/credentials/store/folder/'
'domain/_/credential/Test%20Credential/api/json?depth=0'))
self.assertEqual(
jenkins_mock.call_args_list[2][0][0].url,
self.make_url('job/Test%20Folder/credentials/store/folder/'
'domain/_/createCredentials'))
self.assertEqual(
jenkins_mock.call_args_list[4][0][0].url,
self.make_url('job/Test%20Folder/credentials/store/folder/'
'domain/_/credential/Test%20Credential/api/json?depth=0'))
self._check_requests(jenkins_mock.call_args_list)
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_already_exists(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
json.dumps({'id': 'Test Credential'}),
]
with self.assertRaises(jenkins.JenkinsException) as context_manager:
self.j.create_credential('Test Folder', self.config_xml)
self.assertEqual(
jenkins_mock.call_args_list[1][0][0].url,
self.make_url('job/Test%20Folder/credentials/store/folder/'
'domain/_/credential/Test%20Credential/api/json?depth=0'))
self.assertEqual(
str(context_manager.exception),
'credential[Test Credential] already exists'
' in the domain[_] of [Test Folder]')
self._check_requests(jenkins_mock.call_args_list)
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_failed(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
jenkins.NotFoundException(),
None,
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
None,
]
with self.assertRaises(jenkins.JenkinsException) as context_manager:
self.j.create_credential('Test Folder', self.config_xml)
self.assertEqual(
jenkins_mock.call_args_list[1][0][0].url,
self.make_url('job/Test%20Folder/credentials/store/folder/'
'domain/_/credential/Test%20Credential/api/json?depth=0'))
self.assertEqual(
jenkins_mock.call_args_list[2][0][0].url,
self.make_url('job/Test%20Folder/credentials/store/'
'folder/domain/_/createCredentials'))
self.assertEqual(
jenkins_mock.call_args_list[4][0][0].url,
self.make_url('job/Test%20Folder/credentials/store/folder/'
'domain/_/credential/Test%20Credential/api/json?depth=0'))
self.assertEqual(
str(context_manager.exception),
'create[Test Credential] failed in the domain[_] of [Test Folder]')
self._check_requests(jenkins_mock.call_args_list)
class JenkinsDeleteCredentialTest(JenkinsCredentialTestBase):
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_simple(self, jenkins_mock):
jenkins_mock.side_effect = [
True,
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
jenkins.NotFoundException(),
]
self.j.delete_credential(u'Test Credential', 'TestFolder')
self.assertEqual(
jenkins_mock.call_args_list[0][0][0].url,
self.make_url('job/TestFolder/credentials/store/folder/domain/'
'_/credential/Test%20Credential/config.xml'))
self._check_requests(jenkins_mock.call_args_list)
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_failed(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'id': 'ExistingCredential'}),
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
json.dumps({'id': 'ExistingCredential'})
]
with self.assertRaises(jenkins.JenkinsException) as context_manager:
self.j.delete_credential(u'ExistingCredential', 'TestFolder')
self.assertEqual(
jenkins_mock.call_args_list[0][0][0].url,
self.make_url('job/TestFolder/credentials/store/folder/'
'domain/_/credential/ExistingCredential/config.xml'))
self.assertEqual(
str(context_manager.exception),
'delete credential[ExistingCredential] from '
'domain[_] of [TestFolder] failed')
self._check_requests(jenkins_mock.call_args_list)
class JenkinsReconfigCredentialTest(JenkinsCredentialTestBase):
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_simple(self, jenkins_mock):
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
json.dumps({'id': 'Test Credential'}),
None
]
self.j.reconfig_credential(u'Test Folder', self.config_xml)
self.assertEqual(
jenkins_mock.call_args_list[1][0][0].url,
self.make_url('job/Test%20Folder/credentials/store/folder/domain/'
'_/credential/Test%20Credential/api/json?depth=0'))
self.assertEqual(
jenkins_mock.call_args_list[2][0][0].url,
self.make_url('job/Test%20Folder/credentials/store/folder/domain/'
'_/credential/Test%20Credential/config.xml'))
self._check_requests(jenkins_mock.call_args_list)
class JenkinsListCredentialConfigTest(JenkinsCredentialTestBase):
@patch.object(jenkins.Jenkins, 'jenkins_open')
def test_simple(self, jenkins_mock):
credentials_to_return = [{'id': 'Test Credential'}]
jenkins_mock.side_effect = [
json.dumps({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}),
json.dumps({'credentials': [{'id': 'Test Credential'}]}),
]
credentials = self.j.list_credentials(u'Test Folder')
self.assertEqual(credentials, credentials_to_return)
self.assertEqual(
jenkins_mock.call_args_list[1][0][0].url,
self.make_url('job/Test%20Folder/credentials/store/folder/domain/'
'_/api/json?tree=credentials[id]'))
self._check_requests(jenkins_mock.call_args_list)
| [
"mock.patch.object",
"json.dumps",
"jenkins.NotFoundException"
] | [((1505, 1550), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (1517, 1550), False, 'from mock import patch\n'), ((1787, 1832), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (1799, 1832), False, 'from mock import patch\n'), ((2135, 2180), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (2147, 2180), False, 'from mock import patch\n'), ((2404, 2449), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (2416, 2449), False, 'from mock import patch\n'), ((2938, 2983), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (2950, 2983), False, 'from mock import patch\n'), ((3591, 3636), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (3603, 3636), False, 'from mock import patch\n'), ((4074, 4119), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (4086, 4119), False, 'from mock import patch\n'), ((4519, 4564), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (4531, 4564), False, 'from mock import patch\n'), ((5097, 5142), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (5109, 5142), False, 'from mock import patch\n'), ((5879, 5924), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (5891, 5924), False, 'from mock import patch\n'), ((6443, 6488), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (6455, 6488), False, 'from mock import patch\n'), ((7101, 7146), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (7113, 7146), False, 'from mock import patch\n'), ((7785, 7830), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (7797, 7830), False, 'from mock import patch\n'), ((9008, 9053), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (9020, 9053), False, 'from mock import patch\n'), ((9909, 9954), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (9921, 9954), False, 'from mock import patch\n'), ((11392, 11437), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (11404, 11437), False, 'from mock import patch\n'), ((12028, 12073), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (12040, 12073), False, 'from mock import patch\n'), ((13038, 13083), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (13050, 13083), False, 'from mock import patch\n'), ((13992, 14037), 'mock.patch.object', 'patch.object', (['jenkins.Jenkins', '"""jenkins_open"""'], {}), "(jenkins.Jenkins, 'jenkins_open')\n", (14004, 14037), False, 'from mock import patch\n'), ((1644, 1712), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (1654, 1712), False, 'import json\n'), ((1930, 2002), 'json.dumps', 'json.dumps', (["{'_class': 'org.jenkinsci.plugins.workflow.job.WorkflowJob'}"], {}), "({'_class': 'org.jenkinsci.plugins.workflow.job.WorkflowJob'})\n", (1940, 2002), False, 'import json\n'), ((2274, 2342), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (2284, 2342), False, 'import json\n'), ((2547, 2619), 'json.dumps', 'json.dumps', (["{'_class': 'org.jenkinsci.plugins.workflow.job.WorkflowJob'}"], {}), "({'_class': 'org.jenkinsci.plugins.workflow.job.WorkflowJob'})\n", (2557, 2619), False, 'import json\n'), ((3086, 3154), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (3096, 3154), False, 'import json\n'), ((3168, 3195), 'jenkins.NotFoundException', 'jenkins.NotFoundException', ([], {}), '()\n', (3193, 3195), False, 'import jenkins\n'), ((3738, 3806), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (3748, 3806), False, 'import json\n'), ((3820, 3860), 'json.dumps', 'json.dumps', (["{'id': 'ExistingCredential'}"], {}), "({'id': 'ExistingCredential'})\n", (3830, 3860), False, 'import json\n'), ((4222, 4290), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (4232, 4290), False, 'import json\n'), ((4304, 4331), 'jenkins.NotFoundException', 'jenkins.NotFoundException', ([], {}), '()\n', (4329, 4331), False, 'import jenkins\n'), ((4666, 4734), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (4676, 4734), False, 'import json\n'), ((4748, 4788), 'json.dumps', 'json.dumps', (["{'id': 'ExistingCredential'}"], {}), "({'id': 'ExistingCredential'})\n", (4758, 4788), False, 'import json\n'), ((5298, 5366), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (5308, 5366), False, 'import json\n'), ((5380, 5417), 'json.dumps', 'json.dumps', (['credential_info_to_return'], {}), '(credential_info_to_return)\n', (5390, 5417), False, 'import json\n'), ((6020, 6088), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (6030, 6088), False, 'import json\n'), ((6585, 6653), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (6595, 6653), False, 'import json\n'), ((7254, 7322), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (7264, 7322), False, 'import json\n'), ((7921, 7989), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (7931, 7989), False, 'import json\n'), ((8003, 8030), 'jenkins.NotFoundException', 'jenkins.NotFoundException', ([], {}), '()\n', (8028, 8030), False, 'import jenkins\n'), ((8062, 8130), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (8072, 8130), False, 'import json\n'), ((8144, 8181), 'json.dumps', 'json.dumps', (["{'id': 'Test Credential'}"], {}), "({'id': 'Test Credential'})\n", (8154, 8181), False, 'import json\n'), ((9152, 9220), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (9162, 9220), False, 'import json\n'), ((9234, 9271), 'json.dumps', 'json.dumps', (["{'id': 'Test Credential'}"], {}), "({'id': 'Test Credential'})\n", (9244, 9271), False, 'import json\n'), ((10045, 10113), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (10055, 10113), False, 'import json\n'), ((10127, 10154), 'jenkins.NotFoundException', 'jenkins.NotFoundException', ([], {}), '()\n', (10152, 10154), False, 'import jenkins\n'), ((10186, 10254), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (10196, 10254), False, 'import json\n'), ((11546, 11614), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (11556, 11614), False, 'import json\n'), ((11628, 11655), 'jenkins.NotFoundException', 'jenkins.NotFoundException', ([], {}), '()\n', (11653, 11655), False, 'import jenkins\n'), ((12164, 12204), 'json.dumps', 'json.dumps', (["{'id': 'ExistingCredential'}"], {}), "({'id': 'ExistingCredential'})\n", (12174, 12204), False, 'import json\n'), ((12218, 12286), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (12228, 12286), False, 'import json\n'), ((12300, 12340), 'json.dumps', 'json.dumps', (["{'id': 'ExistingCredential'}"], {}), "({'id': 'ExistingCredential'})\n", (12310, 12340), False, 'import json\n'), ((13174, 13242), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (13184, 13242), False, 'import json\n'), ((13256, 13293), 'json.dumps', 'json.dumps', (["{'id': 'Test Credential'}"], {}), "({'id': 'Test Credential'})\n", (13266, 13293), False, 'import json\n'), ((14188, 14256), 'json.dumps', 'json.dumps', (["{'_class': 'com.cloudbees.hudson.plugins.folder.Folder'}"], {}), "({'_class': 'com.cloudbees.hudson.plugins.folder.Folder'})\n", (14198, 14256), False, 'import json\n'), ((14270, 14326), 'json.dumps', 'json.dumps', (["{'credentials': [{'id': 'Test Credential'}]}"], {}), "({'credentials': [{'id': 'Test Credential'}]})\n", (14280, 14326), False, 'import json\n')] |
import logging
import pytest
from pathlib import Path
from leaf_focus.pdf.images.component import Component
from tests.base_test import BaseTest
class TestPdfImagesComponent(BaseTest):
def test_no_exe(self):
with pytest.raises(ValueError, match="Must supply exe file."):
Component(logging.getLogger(), None)
def test_exe_not_found(self):
path = "this-path-does-not-exist"
with pytest.raises(
FileNotFoundError,
match=f"Exe file does not exist '{path}'.",
):
Component(logging.getLogger(), Path(path))
def test_found_exe(self, tmp_path):
tmp_file = tmp_path / "example"
tmp_file.touch()
Component(logging.getLogger(), tmp_file)
@pytest.mark.needs_exe
def test_create_read(self, tmp_path, exe_pdf_image):
c = Component(logging.getLogger(), exe_pdf_image)
image_prefix_path = Path(tmp_path, "image")
c.create(self.example1_path(".pdf"), image_prefix_path)
expected = Path(str(image_prefix_path) + "-000001.png")
assert expected.exists()
assert expected.stat().st_size == 21703
assert Path(str(image_prefix_path) + "-000001.png").exists()
assert not Path(str(image_prefix_path) + "-000002.png").exists()
| [
"logging.getLogger",
"pytest.raises",
"pathlib.Path"
] | [((919, 942), 'pathlib.Path', 'Path', (['tmp_path', '"""image"""'], {}), "(tmp_path, 'image')\n", (923, 942), False, 'from pathlib import Path\n'), ((227, 283), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Must supply exe file."""'}), "(ValueError, match='Must supply exe file.')\n", (240, 283), False, 'import pytest\n'), ((424, 500), 'pytest.raises', 'pytest.raises', (['FileNotFoundError'], {'match': 'f"""Exe file does not exist \'{path}\'."""'}), '(FileNotFoundError, match=f"Exe file does not exist \'{path}\'.")\n', (437, 500), False, 'import pytest\n'), ((716, 735), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (733, 735), False, 'import logging\n'), ((854, 873), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (871, 873), False, 'import logging\n'), ((307, 326), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (324, 326), False, 'import logging\n'), ((559, 578), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (576, 578), False, 'import logging\n'), ((580, 590), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (584, 590), False, 'from pathlib import Path\n')] |
import pynput
from pynput.keyboard import Key, Listener
def on_press(key):
print(key + " was pressed")
def on_release(key):
if key == key.esc:
return False
with Listener(on_press=on_press, on_release=on_release) as listener:
listener.join() | [
"pynput.keyboard.Listener"
] | [((186, 236), 'pynput.keyboard.Listener', 'Listener', ([], {'on_press': 'on_press', 'on_release': 'on_release'}), '(on_press=on_press, on_release=on_release)\n', (194, 236), False, 'from pynput.keyboard import Key, Listener\n')] |
import pstats
p = pstats.Stats('output.prof')
p.sort_stats('cumulative').print_stats(15)
| [
"pstats.Stats"
] | [((23, 50), 'pstats.Stats', 'pstats.Stats', (['"""output.prof"""'], {}), "('output.prof')\n", (35, 50), False, 'import pstats\n')] |
from django.db import models
class Restaurant(models.Model):
"""
Restaurants' Model.
"""
id = models.CharField(primary_key = True, editable = True, max_length = 255, verbose_name = u'Id')
rating = models.IntegerField(verbose_name = u'Rating')
name = models.CharField(max_length = 255, verbose_name = u'Nombre')
site = models.CharField(max_length = 255, verbose_name = u'Sitio')
email = models.CharField(max_length = 255, verbose_name = u'Email')
phone = models.CharField(max_length = 255, verbose_name = u'Telefono')
street = models.CharField(max_length = 255, verbose_name = u'Calle')
city = models.CharField(max_length = 255, verbose_name = u'Ciudad')
state = models.CharField(max_length = 255, verbose_name = u'Estado')
lat = models.FloatField(verbose_name = u'Latitud')
long = models.FloatField(verbose_name = u'Longitud')
def __unicode__(self):
return '%s' % (self.name)
class Meta:
ordering = ('id', 'rating') | [
"django.db.models.FloatField",
"django.db.models.CharField",
"django.db.models.IntegerField"
] | [((111, 200), 'django.db.models.CharField', 'models.CharField', ([], {'primary_key': '(True)', 'editable': '(True)', 'max_length': '(255)', 'verbose_name': 'u"""Id"""'}), "(primary_key=True, editable=True, max_length=255,\n verbose_name=u'Id')\n", (127, 200), False, 'from django.db import models\n'), ((218, 261), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'verbose_name': 'u"""Rating"""'}), "(verbose_name=u'Rating')\n", (237, 261), False, 'from django.db import models\n'), ((275, 331), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': 'u"""Nombre"""'}), "(max_length=255, verbose_name=u'Nombre')\n", (291, 331), False, 'from django.db import models\n'), ((347, 402), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': 'u"""Sitio"""'}), "(max_length=255, verbose_name=u'Sitio')\n", (363, 402), False, 'from django.db import models\n'), ((419, 474), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': 'u"""Email"""'}), "(max_length=255, verbose_name=u'Email')\n", (435, 474), False, 'from django.db import models\n'), ((491, 549), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': 'u"""Telefono"""'}), "(max_length=255, verbose_name=u'Telefono')\n", (507, 549), False, 'from django.db import models\n'), ((567, 622), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': 'u"""Calle"""'}), "(max_length=255, verbose_name=u'Calle')\n", (583, 622), False, 'from django.db import models\n'), ((638, 694), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': 'u"""Ciudad"""'}), "(max_length=255, verbose_name=u'Ciudad')\n", (654, 694), False, 'from django.db import models\n'), ((711, 767), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': 'u"""Estado"""'}), "(max_length=255, verbose_name=u'Estado')\n", (727, 767), False, 'from django.db import models\n'), ((782, 824), 'django.db.models.FloatField', 'models.FloatField', ([], {'verbose_name': 'u"""Latitud"""'}), "(verbose_name=u'Latitud')\n", (799, 824), False, 'from django.db import models\n'), ((838, 881), 'django.db.models.FloatField', 'models.FloatField', ([], {'verbose_name': 'u"""Longitud"""'}), "(verbose_name=u'Longitud')\n", (855, 881), False, 'from django.db import models\n')] |
"""
Useful functions for the admin panel of ImageLabeller. In particular:
* Download labels from database to json or csv
* Upload images to the database, from json (catalogue of image locations) or zip archive
NOTE:
When uploading files, or an archive full of files, we will attempt to
match the filename to a regex with longitute_latitude_date (with date in ISO3 format)
and upload those values to the database
"""
import os
import sys
import json
import re
from flask import current_app
from datetime import datetime
from image_labeller import db
from image_labeller.schema import Label, User, Image, Category
REGEX = "([\d]{1,3}\.[\d]+)_([\d]{1,3}\.[\d]+)_([\d]{4}-[0-1][\d]{1}-[0-3][\d]{1})"
def prep_data():
"""
query the database label table and return results as a dict
"""
all_results = []
results = Label.query.all()
print("We have {} results".format(len(results)))
for result in results:
result_dict = {}
result_dict["image_name"] = result.image.image_location
result_dict["username"] = result.user.username
result_dict["category"] = result.category.category_name
result_dict["notes"] = result.notes
if result.image.image_longitude:
result_dict["longitude"] = result.image.image_longitude
if result.image.image_latitude:
result_dict["latitude"] = result.image.image_latitude
if result.image.image_time:
dt = result.image.image_time
# convert datetime to a string
if isinstance(dt, datetime):
dt = dt.isoformat().split("T")[0]
result_dict["time"] = dt
all_results.append(result_dict)
return all_results
def prep_csv(filename, tmpdir):
"""
write a CSV file to temp dir
"""
if not filename.endswith(".csv"):
filename += ".csv"
results = prep_data()
# tmpdir = os.path.join(os.getcwd(), tmpdir)
os.makedirs(tmpdir, exist_ok=True)
tmp_filename = os.path.join(tmpdir, filename)
tmp_file = open(tmp_filename, "w")
# should be empty string if no results found
headers = results[0].keys() if len(results)>0 else []
first_line = ""
for header in headers:
first_line += header+","
first_line = first_line[:-1]+"\n"
tmp_file.write(first_line)
for result in results:
line = ""
for header in headers:
line += str(result[header]) + ","
line = line[:-1]
tmp_file.write(line+"\n")
return filename
def prep_json(filename, tmpdir):
"""
write a json file to temp dir and return its location
"""
if not filename.endswith(".json"):
filename += ".json"
# tmpdir = os.path.join(os.getcwd(), tmpdir)
os.makedirs(tmpdir, exist_ok=True)
tmp_filename = os.path.join(tmpdir, filename)
results = prep_data()
with open(tmp_filename,"w") as outfile:
json.dump(results, outfile)
return filename
def upload_image(image_dict):
"""
Upload a single image to the database
"""
if (not "image_location" in image_dict.keys()) or \
(not "image_location_is_url" in image_dict.keys()):
raise RuntimError("Need to specify image_location and image_location_is_url")
img = Image()
# print("Uploading image {}".format(image_dict["image_location"]),file=sys.stderr)
img.image_location = image_dict["image_location"]
img.image_location_is_url = image_dict["image_location_is_url"]
for k in ["image_longitude","image_latitude","image_time"]:
if k in image_dict.keys():
img.__setattr__(k, image_dict[k])
db.session.add(img)
db.session.commit()
def upload_images_from_catalogue(catalogue_file):
"""
Upload a set of images to the database - given a json file containing
[{image_location:<loc>,image_location_is_url}, ...]
"""
image_catalogue = json.load(open(catalogue_file))
if (not isinstance(image_catalogue, list)) or \
(not isinstance(image_catalogue[0], dict)):
raise RuntimeError("upload_images needs to be given a list of dictionaries")
for image_dict in image_catalogue:
upload_image(image_dict)
def upload_images_from_archive(archive_file):
"""
Unpack a zipfile or tarfile to the right directory,
and upload details to the database.
"""
# make a directory with the current timestamp name
timestring = str(datetime.timestamp(datetime.now())).split(".")[0]
# "upload_dir" is the absolute file path, where we will unzip files to.
upload_dir = os.path.join(current_app.config["IMAGE_FULLPATH"],timestring)
os.makedirs(upload_dir)
# location_dir is the last part of this, to be used in the URL
location_dir = os.path.join(current_app.config["IMAGE_PATH"],timestring)
if archive_file.endswith(".zip"):
os.system("unzip {} -d {}".format(archive_file, upload_dir))
## list the files in the directory
filenames = os.listdir(upload_dir)
for filename in filenames:
image_dict = {}
image_dict["image_location"] = os.path.join(location_dir,
filename)
image_dict["image_location_is_url"] = False
# see if we can extract latitude_longitude_date from the filename
match = re.search(REGEX, filename)
if match:
longitude, latitude, date = match.groups()
image_dict["image_latitude"] = latitude
image_dict["image_longitude"] = longitude
image_dict["image_time"] = date
upload_image(image_dict)
def upload_images(filename):
"""
Depending on the file extension upload either a zipfile of files
to local disk, or a json of file locations (URLs).
"""
if filename.lower().endswith(".json"):
upload_images_from_catalogue(filename)
elif filename.lower().endswith(".zip"):
upload_images_from_archive(filename)
else:
print("Filetype not implemented yet")
def allowed_file(filename):
"""
Check an uploaded filename is of an allowed type.
"""
allowed_extensions = {'.json', '.zip', '.png'}
for ext in allowed_extensions:
if filename.endswith(ext):
return True
return False
| [
"re.search",
"image_labeller.db.session.commit",
"os.listdir",
"os.makedirs",
"image_labeller.schema.Image",
"os.path.join",
"image_labeller.schema.Label.query.all",
"datetime.datetime.now",
"json.dump",
"image_labeller.db.session.add"
] | [((835, 852), 'image_labeller.schema.Label.query.all', 'Label.query.all', ([], {}), '()\n', (850, 852), False, 'from image_labeller.schema import Label, User, Image, Category\n'), ((1937, 1971), 'os.makedirs', 'os.makedirs', (['tmpdir'], {'exist_ok': '(True)'}), '(tmpdir, exist_ok=True)\n', (1948, 1971), False, 'import os\n'), ((1991, 2021), 'os.path.join', 'os.path.join', (['tmpdir', 'filename'], {}), '(tmpdir, filename)\n', (2003, 2021), False, 'import os\n'), ((2746, 2780), 'os.makedirs', 'os.makedirs', (['tmpdir'], {'exist_ok': '(True)'}), '(tmpdir, exist_ok=True)\n', (2757, 2780), False, 'import os\n'), ((2800, 2830), 'os.path.join', 'os.path.join', (['tmpdir', 'filename'], {}), '(tmpdir, filename)\n', (2812, 2830), False, 'import os\n'), ((3259, 3266), 'image_labeller.schema.Image', 'Image', ([], {}), '()\n', (3264, 3266), False, 'from image_labeller.schema import Label, User, Image, Category\n'), ((3624, 3643), 'image_labeller.db.session.add', 'db.session.add', (['img'], {}), '(img)\n', (3638, 3643), False, 'from image_labeller import db\n'), ((3648, 3667), 'image_labeller.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3665, 3667), False, 'from image_labeller import db\n'), ((4562, 4624), 'os.path.join', 'os.path.join', (["current_app.config['IMAGE_FULLPATH']", 'timestring'], {}), "(current_app.config['IMAGE_FULLPATH'], timestring)\n", (4574, 4624), False, 'import os\n'), ((4628, 4651), 'os.makedirs', 'os.makedirs', (['upload_dir'], {}), '(upload_dir)\n', (4639, 4651), False, 'import os\n'), ((4738, 4796), 'os.path.join', 'os.path.join', (["current_app.config['IMAGE_PATH']", 'timestring'], {}), "(current_app.config['IMAGE_PATH'], timestring)\n", (4750, 4796), False, 'import os\n'), ((4958, 4980), 'os.listdir', 'os.listdir', (['upload_dir'], {}), '(upload_dir)\n', (4968, 4980), False, 'import os\n'), ((2909, 2936), 'json.dump', 'json.dump', (['results', 'outfile'], {}), '(results, outfile)\n', (2918, 2936), False, 'import json\n'), ((5075, 5111), 'os.path.join', 'os.path.join', (['location_dir', 'filename'], {}), '(location_dir, filename)\n', (5087, 5111), False, 'import os\n'), ((5306, 5332), 're.search', 're.search', (['REGEX', 'filename'], {}), '(REGEX, filename)\n', (5315, 5332), False, 'import re\n'), ((4437, 4451), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4449, 4451), False, 'from datetime import datetime\n')] |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Command line tool to run tests.
This file is called `check` instead of `test` to prevent confusion
for developers and the test runner from including it in test collection.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from ..utils.scripts import get_parser
__all__ = ['check']
def main(args=None):
parser = get_parser(check)
parser.add_argument('--package', type=str, default=None,
help='Package to test')
args = parser.parse_args(args)
check(**vars(args))
def check(package):
"""Run gammapy unit tests."""
import logging
logging.basicConfig(level=logging.DEBUG, format='%(levelname)s - %(message)s')
import gammapy
gammapy.test(package, verbose=True)
| [
"logging.basicConfig",
"gammapy.test"
] | [((714, 792), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""%(levelname)s - %(message)s"""'}), "(level=logging.DEBUG, format='%(levelname)s - %(message)s')\n", (733, 792), False, 'import logging\n'), ((817, 852), 'gammapy.test', 'gammapy.test', (['package'], {'verbose': '(True)'}), '(package, verbose=True)\n', (829, 852), False, 'import gammapy\n')] |
import json
from abc import ABC, abstractmethod
from construct import Struct, Container
class StateCore(ABC):
layout: Struct = None
@classmethod
@abstractmethod
def from_container(cls, container: Container):
pass
@classmethod
def parse(cls, bytes_data: bytes, factor: int):
container = cls.layout.parse(bytes_data)
obj = cls.from_container(container=container)
obj = obj.parse_precision(factor=factor)
return obj
@abstractmethod
def parse_precision(self, factor: int):
pass
@abstractmethod
def to_dict(self) -> dict:
pass
def __str__(self):
my_dict = self.to_dict()
return json.dumps(my_dict, sort_keys=False, indent=4) | [
"json.dumps"
] | [((696, 742), 'json.dumps', 'json.dumps', (['my_dict'], {'sort_keys': '(False)', 'indent': '(4)'}), '(my_dict, sort_keys=False, indent=4)\n', (706, 742), False, 'import json\n')] |
from conans import ConanFile, tools
import os
class ClaraConan(ConanFile):
name = "clara"
version = "1.1.5"
description = "A simple to use, composable, command line parser for C++ 11 and beyond"
url = "https://github.com/bincrafters/conan-clara"
homepage = "https://github.com/catchorg/Clara"
topics = ("conan", "clara", "cli", "cpp11", "command-parser")
license = "BSL-1.0"
_source_subfolder = "source_subfolder"
exports = ["LICENSE.md"]
no_copy_source = True
def source(self):
tools.get("{0}/archive/v{1}.zip".format(self.homepage, self.version))
extracted_dir = self.name + "-" + self.version
os.rename(extracted_dir.capitalize(), self._source_subfolder)
def package(self):
include_folder_src = os.path.join(self._source_subfolder, "single_include")
self.copy(pattern="LICENSE.txt", dst="license", src=self._source_subfolder)
self.copy(pattern="*.hpp", dst="include", src=include_folder_src)
def package_id(self):
self.info.header_only()
| [
"os.path.join"
] | [((782, 836), 'os.path.join', 'os.path.join', (['self._source_subfolder', '"""single_include"""'], {}), "(self._source_subfolder, 'single_include')\n", (794, 836), False, 'import os\n')] |
# -*- coding: utf-8 -*-
# @Time : 2019-09-01 17:49
# @Author : EchoShoot
# @Email : <EMAIL>
# @URL : https://github.com/EchoShoot
# @File : test_others.py
# @Explain :
from sheen import Str
import pytest
class TestOthers(object):
raw = 'xxooAß西xoox'
obj = Str.red(raw)
obj[2:-2] = Str.green
obj[4:-4] = Str.blue
obj[5:-5] = Str.magenta
def test_upper(self):
assert self.obj.upper() == self.raw.upper()
assert self.obj.upper() != Str(self.raw).upper()
def test_lower(self):
assert self.obj.lower() == self.raw.lower()
assert self.obj.lower() != Str(self.raw).lower()
def test_swapcase(self):
assert self.obj.swapcase() == self.raw.swapcase()
assert self.obj.swapcase() != Str(self.raw).swapcase()
def test_title(self):
assert self.obj.title() == self.raw.title()
assert self.obj.title() != Str(self.raw).title()
def test_capitalize(self):
assert self.obj.capitalize() == self.raw.capitalize()
assert self.obj.capitalize() != Str(self.raw).capitalize()
def test_casefold(self):
assert self.obj.casefold() == self.raw.casefold()
assert self.obj.casefold() != Str(self.raw).casefold()
def test_startswith(self):
assert self.obj.startswith('xxoo') is True
assert self.obj.startswith('xoox') is False
assert self.obj.startswith(Str.green('xx') + Str.red('oo')) is False
assert self.obj.startswith(Str.red('xx') + Str.green('oo')) is True
def test_endswith(self):
assert self.obj.endswith('xxoo') is False
assert self.obj.endswith('xoox') is True
assert self.obj.endswith(Str.green('xo') + Str.red('ox')) is True
assert self.obj.endswith(Str.red('xo') + Str.green('ox')) is False
def test_zfill(self):
assert self.obj.zfill(-1) == self.raw.zfill(-1)
assert self.obj.zfill(0) == self.raw.zfill(0)
assert self.obj.zfill(30) == self.raw.zfill(30)
def test_encode(self):
assert self.obj.encode() == self.raw.encode()
assert self.obj.encode(encoding='gbk', errors='ignore') == self.raw.encode(encoding='gbk', errors='ignore')
assert self.obj.encode(encoding='gbk', errors='replace') == self.raw.encode(encoding='gbk', errors='replace')
with pytest.raises(UnicodeEncodeError):
self.obj.encode(encoding='gbk', errors='strict')
with pytest.raises(UnicodeEncodeError):
self.raw.encode(encoding='gbk', errors='strict')
| [
"sheen.Str.green",
"sheen.Str",
"sheen.Str.red",
"pytest.raises"
] | [((280, 292), 'sheen.Str.red', 'Str.red', (['raw'], {}), '(raw)\n', (287, 292), False, 'from sheen import Str\n'), ((2331, 2364), 'pytest.raises', 'pytest.raises', (['UnicodeEncodeError'], {}), '(UnicodeEncodeError)\n', (2344, 2364), False, 'import pytest\n'), ((2440, 2473), 'pytest.raises', 'pytest.raises', (['UnicodeEncodeError'], {}), '(UnicodeEncodeError)\n', (2453, 2473), False, 'import pytest\n'), ((486, 499), 'sheen.Str', 'Str', (['self.raw'], {}), '(self.raw)\n', (489, 499), False, 'from sheen import Str\n'), ((622, 635), 'sheen.Str', 'Str', (['self.raw'], {}), '(self.raw)\n', (625, 635), False, 'from sheen import Str\n'), ((770, 783), 'sheen.Str', 'Str', (['self.raw'], {}), '(self.raw)\n', (773, 783), False, 'from sheen import Str\n'), ((909, 922), 'sheen.Str', 'Str', (['self.raw'], {}), '(self.raw)\n', (912, 922), False, 'from sheen import Str\n'), ((1065, 1078), 'sheen.Str', 'Str', (['self.raw'], {}), '(self.raw)\n', (1068, 1078), False, 'from sheen import Str\n'), ((1218, 1231), 'sheen.Str', 'Str', (['self.raw'], {}), '(self.raw)\n', (1221, 1231), False, 'from sheen import Str\n'), ((1413, 1428), 'sheen.Str.green', 'Str.green', (['"""xx"""'], {}), "('xx')\n", (1422, 1428), False, 'from sheen import Str\n'), ((1431, 1444), 'sheen.Str.red', 'Str.red', (['"""oo"""'], {}), "('oo')\n", (1438, 1444), False, 'from sheen import Str\n'), ((1490, 1503), 'sheen.Str.red', 'Str.red', (['"""xx"""'], {}), "('xx')\n", (1497, 1503), False, 'from sheen import Str\n'), ((1506, 1521), 'sheen.Str.green', 'Str.green', (['"""oo"""'], {}), "('oo')\n", (1515, 1521), False, 'from sheen import Str\n'), ((1693, 1708), 'sheen.Str.green', 'Str.green', (['"""xo"""'], {}), "('xo')\n", (1702, 1708), False, 'from sheen import Str\n'), ((1711, 1724), 'sheen.Str.red', 'Str.red', (['"""ox"""'], {}), "('ox')\n", (1718, 1724), False, 'from sheen import Str\n'), ((1767, 1780), 'sheen.Str.red', 'Str.red', (['"""xo"""'], {}), "('xo')\n", (1774, 1780), False, 'from sheen import Str\n'), ((1783, 1798), 'sheen.Str.green', 'Str.green', (['"""ox"""'], {}), "('ox')\n", (1792, 1798), False, 'from sheen import Str\n')] |
"""
Validate CAL DAC settings XML files. The command line is:
valDACsettings [-V] [-r] [-R <root_file>] [-L <log_file>] FLE|FHE|LAC|ULD <MeV | margin> <dac_slopes_file> <dac_xml_file>
where:
-r = generate ROOT output with default name
-R <root_file> = output validation diagnostics in ROOT file
-L <log_file> = save console output to log text file
-V = verbose; turn on debug output
FLE|FHE|LAC = DAC type to validate
<MeV> = The threshold energy in MeV units (FLE, FHE, LAC).
<margin> = The ULD saturation margin percentage (ULD).
<dac_slopes_file> = The CAL_DacSlopes calibration file.
<dac_xml_file> = The DAC settings XML file to validate.
"""
__facility__ = "Offline"
__abstract__ = "Validate CAL DAC settings XML files."
__author__ = "D.L.Wood"
__date__ = "$Date: 2008/02/03 00:51:50 $"
__version__ = "$Revision: 1.8 $, $Author: fewtrell $"
__release__ = "$Name: $"
__credits__ = "NRL code 7650"
import sys, os
import logging
import getopt
import numarray
import calCalibXML
import calDacXML
import calConstant
# validation limits
uldWarnLimit = 0.005
uldErrLimit = 0.010
def rootHistsDAC(engData, fileName):
# create summary histogram
cs = ROOT.TCanvas('c_Summary', 'Summary', -1)
cs.SetGrid()
cs.SetLogy()
hName = "h_Summary"
hs = ROOT.TH1F(hName, "DAC_Val_%s: %s" % (dacType, fileName), 100, MeV - (errLimit * 2), MeV + (errLimit * 2))
axis = hs.GetXaxis()
axis.SetTitle('Threshold Energy (MeV)')
axis.CenterTitle()
axis = hs.GetYaxis()
axis.SetTitle('Counts')
axis.CenterTitle()
for tem in twrs:
for row in range(calConstant.NUM_ROW):
for end in range(calConstant.NUM_END):
for fe in range(calConstant.NUM_FE):
e = engData[tem, row, end, fe]
hs.Fill(e)
hs.Draw()
cs.Update()
cs.Write()
def engValDAC(dacData, errData):
valStatus = 0
# sanity check on DAC settings
for tem in twrs:
for row in range(calConstant.NUM_ROW):
for end in range(calConstant.NUM_END):
for fe in range(calConstant.NUM_FE):
dac = dacData[tem, row, end, fe]
if dac > 63:
log.error('setting %d > 63 for T%d,%s%s,%d', dac, tem, calConstant.CROW[row],
calConstant.CPM[end], fe)
valStatus = 1
elif dac == 63:
log.warning('setting %d == 63 for T%d,%s%s,%d', dac, tem, calConstant.CROW[row],
calConstant.CPM[end], fe)
elif dac == 0:
log.error('setting %d == 0 for T%d,%s%s,%d', dac, tem, calConstant.CROW[row],
calConstant.CPM[end], fe)
valStatus = 1
# check for ADC->energy conversion error
for tem in twrs:
for row in range(calConstant.NUM_ROW):
for end in range(calConstant.NUM_END):
for fe in range(calConstant.NUM_FE):
err = errData[tem, row, end, fe]
if err > warnLimit:
if err > errLimit:
log.error('err %0.2f > %0.2f for T%d,%s%s,%d', err, errLimit, tem, calConstant.CROW[row],
calConstant.CPM[end], fe)
valStatus = 1
else:
log.warning('err %0.2f > %0.2f for T%d,%s%s,%d', err, warnLimit, tem, calConstant.CROW[row],
calConstant.CPM[end], fe)
return valStatus
def engValULD(dacData, engData, saturation):
valStatus = 0
# sanity check on DAC settings
for tem in twrs:
for row in range(calConstant.NUM_ROW):
for end in range(calConstant.NUM_END):
for fe in range(calConstant.NUM_FE):
dac = dacData[tem, row, end, fe]
if dac > 63:
log.error('setting %d > 63 for T%d,%s%s,%d', dac, tem, calConstant.CROW[row],
calConstant.CPM[end], fe)
valStatus = 1
elif dac == 63:
log.warning('setting %d == 63 for T%d,%s%s,%d', dac, tem, calConstant.CROW[row],
calConstant.CPM[end], fe)
elif dac == 0:
log.error('setting %d == 0 for T%d,%s%s,%d', dac, tem, calConstant.CROW[row],
calConstant.CPM[end], fe)
valStatus = 1
# check ULD threshold verses saturation to see if margin is kept
for tem in twrs:
for row in range(calConstant.NUM_ROW):
for end in range(calConstant.NUM_END):
for fe in range(calConstant.NUM_FE):
for erng in range(3):
eng = engData[erng, tem, row, end, fe]
sat = saturation[erng, tem, row, end, fe]
margin = (sat - eng) / sat
if margin < MeV:
err = abs(MeV - margin)
if err > uldWarnLimit:
if err > uldErrLimit:
log.error("err %0.3f > %0.3f for T%d,%s%s,%d,%s", err, uldErrLimit, tem, calConstant.CROW[row],
calConstant.CPM[end], fe, calConstant.CRNG[erng])
valStatus = 1
else:
log.warning("err %0.3f > %0.3f for T%d,%s%s,%d,%s", err, uldWarnLimit, tem, calConstant.CROW[row],
calConstant.CPM[end], fe, calConstant.CRNG[erng])
return valStatus
if __name__ == '__main__':
rootOutput = False
logName = None
# setup logger
logging.basicConfig()
log = logging.getLogger('valDACsettings')
log.setLevel(logging.INFO)
# check command line
try:
opts = getopt.getopt(sys.argv[1:], "-R:-L:-V-r")
except getopt.GetoptError:
log.error(__doc__)
sys.exit(1)
optList = opts[0]
for o in optList:
if o[0] == '-V':
log.setLevel(logging.DEBUG)
elif o[0] == '-R':
rootName = o[1]
rootOutput = True
elif o[0] == '-r':
rootName = None
rootOutput = True
elif o[0] == '-L':
logName = o[1]
args = opts[1]
if len(args) != 4:
log.error(__doc__)
sys.exit(1)
dacType = args[0]
MeV = float(args[1])
slopesName = args[2]
dacName = args[3]
ext = os.path.splitext(dacName)
if rootOutput and rootName is None:
rootName = "%s.val.root" % ext[0]
if logName is None:
logName = "%s.val.log" % ext[0]
if os.path.exists(logName):
log.debug('Removing old log file %s', logName)
os.remove(logName)
hdl = logging.FileHandler(logName)
fmt = logging.Formatter('%(levelname)s %(message)s')
hdl.setFormatter(fmt)
log.addHandler(hdl)
# set limits base on DAC type
if dacType == 'FLE':
ftype = 'fle_dac'
warnLimit = 5.0
errLimit = 10.0
elif dacType == 'FHE':
ftype = 'fhe_dac'
warnLimit = 50.0
errLimit = 100.0
elif dacType == 'LAC':
ftype = 'log_acpt'
warnLimit = 0.5
errLimit = 1.0
elif dacType == 'ULD':
ftype = 'rng_uld_dac'
warnLimit = 0.005
errLimit = 0.010
MeV /= 100.0
else:
log.error("DAC type %s not supported", dacType)
sys.exit(1)
log.debug('Using error limit %f', errLimit)
log.debug('Using warning limit %f', warnLimit)
log.debug('Using DAC type %s', ftype)
log.debug('Using threshold %f MeV', MeV)
# read dacSlopes gain file
log.info("Reading file %s", slopesName)
slopeFile = calCalibXML.calDacSlopesCalibXML(slopesName)
(dacSlope, uldSlope, rangeData) = slopeFile.read()
towers = slopeFile.getTowers()
slopeFile.close()
# read DAC settings file
log.info('Reading file %s', dacName)
fio = calDacXML.calSettingsXML(dacName, ftype)
twrs = set(fio.getTowers())
if not twrs.issubset(towers):
log.error("%s data not found in file %s", twrs, dacName)
sys.exit(1)
log.debug("settings available for towers: %s", twrs)
dacData = fio.read()
fio.close()
# account for ranges and type
if dacType == 'LAC':
slope = dacSlope[...,0]
offset = dacSlope[...,1]
ranges = rangeData[...,0]
elif dacType == 'FLE':
slope = dacSlope[...,2]
offset = dacSlope[...,3]
ranges = rangeData[...,1]
elif dacType == 'FHE':
slope = dacSlope[...,4]
offset = dacSlope[...,5]
ranges = rangeData[...,2]
elif dacType == 'ULD':
slope = uldSlope[...,0]
offset = uldSlope[...,1]
saturation = uldSlope[...,2]
origDacData = dacData
# handle FLE, FHE, and LAC cases
if dacType != 'ULD':
dacData = numarray.where(ranges, (dacData - 64), dacData)
# convert to MeV
eng = (slope * dacData) + offset
# calculate errors
err = abs(eng - MeV)
# do validation
valStatus = engValDAC(dacData, err)
# handle ULD case
else:
dacData = dacData - 64
# convert to MeV
eng = (slope * dacData) + offset
# do validation
valStatus = engValULD(dacData, eng, saturation)
# create ROOT output file
if rootOutput:
import ROOT
log.info('Creating file %s' % rootName)
ROOT.gROOT.Reset()
rootFile = ROOT.TFile(rootName, "recreate")
# write error histograms
if dacType != 'ULD':
rootHistsDAC(eng, dacName)
# clean up
rootFile.Close()
# do simple stats
sf = (origDacData < 64)
sc = numarray.logical_not(sf)
fineCount = numarray.sum(numarray.ravel(sf))
coarseCount = numarray.sum(numarray.ravel(sc))
log.info("FINE count = %d, COARSE count = %d", fineCount, coarseCount)
data = numarray.compress(numarray.ravel(sf), numarray.ravel(origDacData))
if len(data) == 0:
av = 0
mn = 0
mx = 0
else:
av = numarray.average(data.astype(numarray.Float32))
mn = min(data)
mx = max(data)
log.info("FINE setting average = %0.2f", av)
log.info("FINE setting minimum = %d, maximum = %d", mn, mx)
data = numarray.compress(numarray.ravel(sc), numarray.ravel(origDacData))
if len(data) == 0:
av = 0
mn = 0
mx = 0
else:
av = numarray.average(data.astype(numarray.Float32))
mn = min(data)
mx = max(data)
log.info("COARSE setting average = %0.2f", av)
log.info("COARSE setting minimum = %d, maximum = %d", mn, mx)
# report results
if valStatus == 0:
statusStr = 'PASSED'
else:
statusStr = 'FAILED'
log.info('Validation %s for file %s', statusStr, dacName)
sys.exit(valStatus)
| [
"logging.basicConfig",
"logging.getLogger",
"os.path.exists",
"calDacXML.calSettingsXML",
"getopt.getopt",
"ROOT.TH1F",
"logging.Formatter",
"os.path.splitext",
"numarray.logical_not",
"logging.FileHandler",
"calCalibXML.calDacSlopesCalibXML",
"numarray.where",
"sys.exit",
"ROOT.TFile",
... | [((1320, 1360), 'ROOT.TCanvas', 'ROOT.TCanvas', (['"""c_Summary"""', '"""Summary"""', '(-1)'], {}), "('c_Summary', 'Summary', -1)\n", (1332, 1360), False, 'import ROOT\n'), ((1435, 1541), 'ROOT.TH1F', 'ROOT.TH1F', (['hName', "('DAC_Val_%s: %s' % (dacType, fileName))", '(100)', '(MeV - errLimit * 2)', '(MeV + errLimit * 2)'], {}), "(hName, 'DAC_Val_%s: %s' % (dacType, fileName), 100, MeV - \n errLimit * 2, MeV + errLimit * 2)\n", (1444, 1541), False, 'import ROOT\n'), ((6634, 6655), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (6653, 6655), False, 'import logging\n'), ((6666, 6701), 'logging.getLogger', 'logging.getLogger', (['"""valDACsettings"""'], {}), "('valDACsettings')\n", (6683, 6701), False, 'import logging\n'), ((7459, 7484), 'os.path.splitext', 'os.path.splitext', (['dacName'], {}), '(dacName)\n', (7475, 7484), False, 'import sys, os\n'), ((7639, 7662), 'os.path.exists', 'os.path.exists', (['logName'], {}), '(logName)\n', (7653, 7662), False, 'import sys, os\n'), ((7757, 7785), 'logging.FileHandler', 'logging.FileHandler', (['logName'], {}), '(logName)\n', (7776, 7785), False, 'import logging\n'), ((7796, 7842), 'logging.Formatter', 'logging.Formatter', (['"""%(levelname)s %(message)s"""'], {}), "('%(levelname)s %(message)s')\n", (7813, 7842), False, 'import logging\n'), ((8736, 8780), 'calCalibXML.calDacSlopesCalibXML', 'calCalibXML.calDacSlopesCalibXML', (['slopesName'], {}), '(slopesName)\n', (8768, 8780), False, 'import calCalibXML\n'), ((8975, 9015), 'calDacXML.calSettingsXML', 'calDacXML.calSettingsXML', (['dacName', 'ftype'], {}), '(dacName, ftype)\n', (8999, 9015), False, 'import calDacXML\n'), ((10953, 10977), 'numarray.logical_not', 'numarray.logical_not', (['sf'], {}), '(sf)\n', (10973, 10977), False, 'import numarray\n'), ((12119, 12138), 'sys.exit', 'sys.exit', (['valStatus'], {}), '(valStatus)\n', (12127, 12138), False, 'import sys, os\n'), ((6785, 6826), 'getopt.getopt', 'getopt.getopt', (['sys.argv[1:]', '"""-R:-L:-V-r"""'], {}), "(sys.argv[1:], '-R:-L:-V-r')\n", (6798, 6826), False, 'import getopt\n'), ((7337, 7348), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7345, 7348), False, 'import sys, os\n'), ((7727, 7745), 'os.remove', 'os.remove', (['logName'], {}), '(logName)\n', (7736, 7745), False, 'import sys, os\n'), ((9155, 9166), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (9163, 9166), False, 'import sys, os\n'), ((9969, 10014), 'numarray.where', 'numarray.where', (['ranges', '(dacData - 64)', 'dacData'], {}), '(ranges, dacData - 64, dacData)\n', (9983, 10014), False, 'import numarray\n'), ((10642, 10660), 'ROOT.gROOT.Reset', 'ROOT.gROOT.Reset', ([], {}), '()\n', (10658, 10660), False, 'import ROOT\n'), ((10680, 10712), 'ROOT.TFile', 'ROOT.TFile', (['rootName', '"""recreate"""'], {}), "(rootName, 'recreate')\n", (10690, 10712), False, 'import ROOT\n'), ((11013, 11031), 'numarray.ravel', 'numarray.ravel', (['sf'], {}), '(sf)\n', (11027, 11031), False, 'import numarray\n'), ((11064, 11082), 'numarray.ravel', 'numarray.ravel', (['sc'], {}), '(sc)\n', (11078, 11082), False, 'import numarray\n'), ((11195, 11213), 'numarray.ravel', 'numarray.ravel', (['sf'], {}), '(sf)\n', (11209, 11213), False, 'import numarray\n'), ((11215, 11242), 'numarray.ravel', 'numarray.ravel', (['origDacData'], {}), '(origDacData)\n', (11229, 11242), False, 'import numarray\n'), ((11576, 11594), 'numarray.ravel', 'numarray.ravel', (['sc'], {}), '(sc)\n', (11590, 11594), False, 'import numarray\n'), ((11596, 11623), 'numarray.ravel', 'numarray.ravel', (['origDacData'], {}), '(origDacData)\n', (11610, 11623), False, 'import numarray\n'), ((6893, 6904), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6901, 6904), False, 'import sys, os\n'), ((8444, 8455), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8452, 8455), False, 'import sys, os\n')] |
"""!
@brief Test templates for ROCK clustering module.
@authors <NAME> (<EMAIL>)
@date 2014-2020
@copyright BSD-3-Clause
"""
from pyclustering.cluster.rock import rock;
from pyclustering.utils import read_sample;
from random import random;
class RockTestTemplates:
@staticmethod
def templateLengthProcessData(path_to_file, radius, cluster_numbers, threshold, expected_cluster_length, ccore):
sample = read_sample(path_to_file);
rock_instance = rock(sample, radius, cluster_numbers, threshold, ccore);
rock_instance.process();
clusters = rock_instance.get_clusters();
length = sum([len(cluster) for cluster in clusters]);
assert len(sample) == length;
obtained_cluster_sizes = [len(cluster) for cluster in clusters];
obtained_cluster_sizes.sort();
expected_cluster_length.sort();
assert obtained_cluster_sizes == expected_cluster_length;
@staticmethod
def templateClusterAllocationOneDimensionData(ccore_flag):
input_data = [ [random()] for i in range(10) ] + [ [random() + 3] for i in range(10) ] + [ [random() + 5] for i in range(10) ] + [ [random() + 8] for i in range(10) ];
rock_instance = rock(input_data, 1, 4, 0.5, ccore_flag);
rock_instance.process();
clusters = rock_instance.get_clusters();
assert len(clusters) == 4;
for cluster in clusters:
assert len(cluster) == 10;
| [
"pyclustering.utils.read_sample",
"random.random",
"pyclustering.cluster.rock.rock"
] | [((447, 472), 'pyclustering.utils.read_sample', 'read_sample', (['path_to_file'], {}), '(path_to_file)\n', (458, 472), False, 'from pyclustering.utils import read_sample\n'), ((509, 564), 'pyclustering.cluster.rock.rock', 'rock', (['sample', 'radius', 'cluster_numbers', 'threshold', 'ccore'], {}), '(sample, radius, cluster_numbers, threshold, ccore)\n', (513, 564), False, 'from pyclustering.cluster.rock import rock\n'), ((1303, 1342), 'pyclustering.cluster.rock.rock', 'rock', (['input_data', '(1)', '(4)', '(0.5)', 'ccore_flag'], {}), '(input_data, 1, 4, 0.5, ccore_flag)\n', (1307, 1342), False, 'from pyclustering.cluster.rock import rock\n'), ((1232, 1240), 'random.random', 'random', ([], {}), '()\n', (1238, 1240), False, 'from random import random\n'), ((1116, 1124), 'random.random', 'random', ([], {}), '()\n', (1122, 1124), False, 'from random import random\n'), ((1192, 1200), 'random.random', 'random', ([], {}), '()\n', (1198, 1200), False, 'from random import random\n'), ((1152, 1160), 'random.random', 'random', ([], {}), '()\n', (1158, 1160), False, 'from random import random\n')] |
from ryu.app.wsgi import WSGIApplication
from ryu.base import app_manager
from ryu.lib import hub
from ryu.lib.dpid import dpid_to_str
from ryu.controller.handler import MAIN_DISPATCHER, DEAD_DISPATCHER, CONFIG_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.controller import ofp_event
from ryu.lib.packet.ether_types import ETH_TYPE_LLDP
from ryu.lib.packet import (packet, ethernet)
from socket import error as SocketError
from tinyrpc.exc import InvalidReplyError
import sys
import process_stats_port as process
import process_stats_flow as processf
from performance_server import PerformanceServerController
import calendar, time
class PerformanceServerApp(app_manager.RyuApp):
_CONTEXTS = {
'wsgi': WSGIApplication,
}
def __init__(self, *args, **kwargs):
super(PerformanceServerApp, self).__init__(*args, **kwargs)
wsgi = kwargs['wsgi']
wsgi.register(PerformanceServerController, {'performance_app': self})
# Register the controller class below, allowing it to listen to controller events
print('Making VisServerApp')
self.rpc_clients = [] # list of all the connected clients to update
self.datapaths = {} # list of switches
self.monitor_thread = hub.spawn(self.monitor)
# tasklet to monitor schedule statistic requests
self.req_count = 0 # count the number of requests made
# These are assigned in _port_stats_reply_handler()
self.prevreadings = {} # previous network readings
self.currentstats = {} # current network statistics
self.logging = False
self.waittime = 1
self.placeholder = 'loading'
self.statstype = 'port' # 'port' or 'flow', depending on the desired statistics
self.dp_packet_in = {} # {dpid:, count:}
# controller stats
self.total_packet_in = 0
self.prev_packet_in = 0
self.start_time = calendar.timegm(time.gmtime())
self.prev_time = calendar.timegm(time.gmtime())
@set_ev_cls(ofp_event.EventOFPStateChange, [MAIN_DISPATCHER, DEAD_DISPATCHER])
def _state_change_handler(self, ev):
"""Adds and removes the existing switches from this classes records.
On first run, populates self.datapaths:"""
datapath = ev.datapath
if ev.state == MAIN_DISPATCHER: # Existing switches
if not datapath.id in self.datapaths:
self.logger.debug('register datapath: %016x', datapath.id)
self.datapaths[datapath.id] = datapath
self.prevreadings[dpid_to_str(datapath.id)] = self.placeholder
# self.currentstats[dpid_to_str(datapath.id)] = self.placeholder
elif ev.state == DEAD_DISPATCHER: # Removed switches
if datapath.id in self.datapaths:
self.logger.debug('unregister datapath: %016x', datapath.id)
del self.datapaths[datapath.id]
# del self.currentstats[datapath.id]
"""Requests statistics for each switch stored
Instated by self.monitor_thread"""
def monitor(self):
self.logger.info("Starting stats monitor")
while True:
count = 0
for dp in self.datapaths.values():
if (self.statstype == 'flow'):
self.send_flow_stats_request(dp)
elif (self.statstype == 'port'):
self.send_port_stats_request(dp)
count += 1
## topology experimenting
# links = get_link(self)
# body = json.dumps([link.to_dict() for link in links])
# print("%d %s" % (dp.id, body))
if self.datapaths.values():
self.req_count += 1
# print("Counted %d datapaths. Request #%d sent" % (count, self.req_count))
sys.stdout.write('Counted %d datapaths. Request #%d sent. Packet_in: %d \r' % (count, self.req_count, self.total_packet_in))
sys.stdout.flush();
hub.sleep(self.waittime)
self.rpc_broadcall('event_update_controller',self.controller_stats())
self.rpc_broadcall('event_update_statistics',self.currentstats)
def rpc_broadcall(self, func_name, msg):
"""Copied from ws_topology"""
disconnected_clients = []
self.data=msg
for rpc_client in self.rpc_clients:
# NOTE: Although broadcasting is desired,
# RPCClient#get_proxy(one_way=True) does not work well
rpc_server = rpc_client.get_proxy()
try:
getattr(rpc_server, func_name)(msg)
except SocketError:
self.logger.debug('WebSocket disconnected: %s', rpc_client.ws)
disconnected_clients.append(rpc_client)
except InvalidReplyError as e:
self.logger.debug("InvalidReplyError: ")
self.logger.error(e)
for client in disconnected_clients:
self.rpc_clients.remove(client)
"""Sends a port statistics request for all ports on datapath"""
def send_port_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPPortStatsRequest(datapath, 0, ofp.OFPP_ANY)
datapath.send_msg(req)
@set_ev_cls(ofp_event.EventOFPPortStatsReply, MAIN_DISPATCHER)
def _port_stats_reply_handler(self, ev):
current_data = process.stats_event(ev, self.logging)
dp = current_data['datapath']
current_stats = process.avg_rates(current_data, self.prevreadings[dp], self.placeholder)
# include the Pnf value
# if current_stats['total_tx'] == 0:
# current_stats['pnf'] = 0
# else:
# current_stats['pnf'] = dp_packet_in[dp] / current_stats['total_tx']
self.prevreadings[dp] = current_data['ports']
self.currentstats[dp] = current_stats
""" Sends a statistics request for flows """
def send_flow_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
cookie = cookie_mask = 0
match = ofp_parser.OFPMatch()
req = ofp_parser.OFPFlowStatsRequest(datapath, 0, ofp.OFPTT_ALL, # any table, port, group
ofp.OFPP_ANY, ofp.OFPG_ANY, cookie, cookie_mask, match)
datapath.send_msg(req)
@set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER)
def flow_stats_reply_handler(self, ev):
current_flows = processf.stats_event(ev, self.logging)
dp = current_flows['datapath']
current_stats = processf.avg_rates(current_flows, self.prevreadings[dp], self.placeholder)
self.prevreadings[dp] = current_flows['flows']
self.currentstats[dp] = current_stats
##
## Controller statistics monitoring
##
## Uses
## * self.dp_packet_in = {}
## * self.total_packet_in = int
##
@set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)
def switch_features_handler(self, ev):
dp = ev.msg.datapath
if dp.id not in self.dp_packet_in:
self.dp_packet_in[dpid_to_str(dp.id)] = 0
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def _packet_in_handler(self, ev):
''' Counts the packet_in requests '''
dp = ev.msg.datapath
dpid = dpid_to_str(dp.id)
# # filter LLDP packets.. ## or not, because while there isn't a reply, they place a load on the controller.
pkt = packet.Packet(ev.msg.data)
eth = pkt.get_protocols(ethernet.ethernet)[0]
if eth.ethertype == ETH_TYPE_LLDP:
return
# controller_count++
self.total_packet_in = 1 + self.total_packet_in
# switch_count++
if dpid in self.dp_packet_in:
self.dp_packet_in[dpid] = 1 + self.dp_packet_in[dpid]
else:
self.dp_packet_in[dpid] = 1
# print ('dpid: %s, %d total: %d' % (dpid, self.dp_packet_in[dpid], self.total_packet_in))
'''Controller broadcast not in use'''
def get_ctrl_switches(self, dp='all'):
switches = []
if dp == 'all':
for switch in self.dp_packet_in:
s = {}
s['dpid'] = switch
s['total_packet_in'] = self.dp_packet_in[switch]
switches.append(s)
else:
s = {}
s['dpid'] = dpid_to_str(dp)
s['total_packet_in'] = self.dp_packet_in[dp]
switches.append(s)
return switches
def controller_stats(self):
current_time = calendar.timegm(time.gmtime())
rv = {}
# times
rv['up_time'] = current_time - self.start_time
rv['duration'] = current_time - self.prev_time
self.prev_time = current_time
# stats
rv['packet_in_total'] = self.total_packet_in
rv['packet_in_delta'] = self.total_packet_in - self.prev_packet_in
rv['switches'] = self.get_ctrl_switches()
# rv['service_rate'] = '0'
self.prev_packet_in = self.total_packet_in
# print('controller %d' % (self.total_packet_in))
# sys.stdout.write('controller %d \n' % (self.total_packet_in))
# sys.stdout.flush();
return rv
app_manager.require_app('ryu.app.simple_switch_13_lldp')
app_manager.require_app('ryu.app.rest_topology')
app_manager.require_app('ryu.app.ws_topology') | [
"process_stats_flow.avg_rates",
"ryu.lib.packet.packet.Packet",
"process_stats_port.avg_rates",
"ryu.base.app_manager.require_app",
"process_stats_port.stats_event",
"ryu.lib.hub.spawn",
"ryu.controller.handler.set_ev_cls",
"process_stats_flow.stats_event",
"ryu.lib.dpid.dpid_to_str",
"ryu.lib.hub... | [((9680, 9736), 'ryu.base.app_manager.require_app', 'app_manager.require_app', (['"""ryu.app.simple_switch_13_lldp"""'], {}), "('ryu.app.simple_switch_13_lldp')\n", (9703, 9736), False, 'from ryu.base import app_manager\n'), ((9737, 9785), 'ryu.base.app_manager.require_app', 'app_manager.require_app', (['"""ryu.app.rest_topology"""'], {}), "('ryu.app.rest_topology')\n", (9760, 9785), False, 'from ryu.base import app_manager\n'), ((9786, 9832), 'ryu.base.app_manager.require_app', 'app_manager.require_app', (['"""ryu.app.ws_topology"""'], {}), "('ryu.app.ws_topology')\n", (9809, 9832), False, 'from ryu.base import app_manager\n'), ((2164, 2241), 'ryu.controller.handler.set_ev_cls', 'set_ev_cls', (['ofp_event.EventOFPStateChange', '[MAIN_DISPATCHER, DEAD_DISPATCHER]'], {}), '(ofp_event.EventOFPStateChange, [MAIN_DISPATCHER, DEAD_DISPATCHER])\n', (2174, 2241), False, 'from ryu.controller.handler import set_ev_cls\n'), ((5558, 5619), 'ryu.controller.handler.set_ev_cls', 'set_ev_cls', (['ofp_event.EventOFPPortStatsReply', 'MAIN_DISPATCHER'], {}), '(ofp_event.EventOFPPortStatsReply, MAIN_DISPATCHER)\n', (5568, 5619), False, 'from ryu.controller.handler import set_ev_cls\n'), ((6677, 6738), 'ryu.controller.handler.set_ev_cls', 'set_ev_cls', (['ofp_event.EventOFPFlowStatsReply', 'MAIN_DISPATCHER'], {}), '(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER)\n', (6687, 6738), False, 'from ryu.controller.handler import set_ev_cls\n'), ((7265, 7328), 'ryu.controller.handler.set_ev_cls', 'set_ev_cls', (['ofp_event.EventOFPSwitchFeatures', 'CONFIG_DISPATCHER'], {}), '(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)\n', (7275, 7328), False, 'from ryu.controller.handler import set_ev_cls\n'), ((7502, 7557), 'ryu.controller.handler.set_ev_cls', 'set_ev_cls', (['ofp_event.EventOFPPacketIn', 'MAIN_DISPATCHER'], {}), '(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)\n', (7512, 7557), False, 'from ryu.controller.handler import set_ev_cls\n'), ((1296, 1319), 'ryu.lib.hub.spawn', 'hub.spawn', (['self.monitor'], {}), '(self.monitor)\n', (1305, 1319), False, 'from ryu.lib import hub\n'), ((5688, 5725), 'process_stats_port.stats_event', 'process.stats_event', (['ev', 'self.logging'], {}), '(ev, self.logging)\n', (5707, 5725), True, 'import process_stats_port as process\n'), ((5797, 5869), 'process_stats_port.avg_rates', 'process.avg_rates', (['current_data', 'self.prevreadings[dp]', 'self.placeholder'], {}), '(current_data, self.prevreadings[dp], self.placeholder)\n', (5814, 5869), True, 'import process_stats_port as process\n'), ((6807, 6845), 'process_stats_flow.stats_event', 'processf.stats_event', (['ev', 'self.logging'], {}), '(ev, self.logging)\n', (6827, 6845), True, 'import process_stats_flow as processf\n'), ((6918, 6992), 'process_stats_flow.avg_rates', 'processf.avg_rates', (['current_flows', 'self.prevreadings[dp]', 'self.placeholder'], {}), '(current_flows, self.prevreadings[dp], self.placeholder)\n', (6936, 6992), True, 'import process_stats_flow as processf\n'), ((7686, 7704), 'ryu.lib.dpid.dpid_to_str', 'dpid_to_str', (['dp.id'], {}), '(dp.id)\n', (7697, 7704), False, 'from ryu.lib.dpid import dpid_to_str\n'), ((7836, 7862), 'ryu.lib.packet.packet.Packet', 'packet.Packet', (['ev.msg.data'], {}), '(ev.msg.data)\n', (7849, 7862), False, 'from ryu.lib.packet import packet, ethernet\n'), ((2087, 2100), 'time.gmtime', 'time.gmtime', ([], {}), '()\n', (2098, 2100), False, 'import calendar, time\n'), ((2143, 2156), 'time.gmtime', 'time.gmtime', ([], {}), '()\n', (2154, 2156), False, 'import calendar, time\n'), ((4240, 4264), 'ryu.lib.hub.sleep', 'hub.sleep', (['self.waittime'], {}), '(self.waittime)\n', (4249, 4264), False, 'from ryu.lib import hub\n'), ((8756, 8771), 'ryu.lib.dpid.dpid_to_str', 'dpid_to_str', (['dp'], {}), '(dp)\n', (8767, 8771), False, 'from ryu.lib.dpid import dpid_to_str\n'), ((8968, 8981), 'time.gmtime', 'time.gmtime', ([], {}), '()\n', (8979, 8981), False, 'import calendar, time\n'), ((4047, 4183), 'sys.stdout.write', 'sys.stdout.write', (["('Counted %d datapaths. Request #%d sent. Packet_in: %d \\r' % (count,\n self.req_count, self.total_packet_in))"], {}), "(\n 'Counted %d datapaths. Request #%d sent. Packet_in: %d \\r' % (count,\n self.req_count, self.total_packet_in))\n", (4063, 4183), False, 'import sys\n'), ((4191, 4209), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (4207, 4209), False, 'import sys\n'), ((7472, 7490), 'ryu.lib.dpid.dpid_to_str', 'dpid_to_str', (['dp.id'], {}), '(dp.id)\n', (7483, 7490), False, 'from ryu.lib.dpid import dpid_to_str\n'), ((2730, 2754), 'ryu.lib.dpid.dpid_to_str', 'dpid_to_str', (['datapath.id'], {}), '(datapath.id)\n', (2741, 2754), False, 'from ryu.lib.dpid import dpid_to_str\n')] |
import painter
import sys
from utils import distance
from collections import deque
#returns last node (destination_node)
def dfs(origin_node, dest_node, board, screen):
#put origin node on open_nodes_list (first to be expanded)
open_nodes=deque()
closed_nodes=list()
open_nodes.append(origin_node)
curr_node=None
#may have some bugs (check later)
#implement heuristics
while len(open_nodes)>0:
curr_node=open_nodes.pop()
painter.paint_search(screen,curr_node.pos,board)
print(curr_node.pos)
if curr_node is dest_node:
print('found')
return curr_node
for node in curr_node.neighbour_list:
if node not in closed_nodes:
if node not in open_nodes:
#print('entered')
node.parent_node=curr_node
open_nodes.append(node)
closed_nodes.append(curr_node)
#if no possible path was found
return None | [
"collections.deque",
"painter.paint_search"
] | [((249, 256), 'collections.deque', 'deque', ([], {}), '()\n', (254, 256), False, 'from collections import deque\n'), ((476, 526), 'painter.paint_search', 'painter.paint_search', (['screen', 'curr_node.pos', 'board'], {}), '(screen, curr_node.pos, board)\n', (496, 526), False, 'import painter\n')] |
#!/bin/python
import roomai.common
from roomai.sevenking import SevenKingPublicState
from roomai.sevenking import SevenKingPrivateState
from roomai.sevenking import SevenKingPersonState
from roomai.sevenking import SevenKingAction
from roomai.sevenking import SevenKingPokerCard
from roomai.sevenking import AllSevenKingPatterns
from roomai.sevenking import AllSevenKingPokerCards
import random
import roomai.sevenking
logger = roomai.get_logger()
class SevenKingEnv(roomai.common.AbstractEnv):
'''
The SevenKing game environment
'''
def init(self, params = dict()):
'''
Initialize the SevenKing game environment with the initialization params.\n
The initialization is a dict with some options\n
1) backward_enable: whether to record all history states. if you need call the backward function, please set it to True. default False\n
2) num_normal_players: how many players are in the game \n
An example of the initialization param is {"num_normal_players":2,"backward_enable":True}\n
:param params: the initialization params
:return: infos, public_state, person_states, private_state
'''
if "num_normal_players" in params:
self.__params__["num_normal_players"] = params["num_normal_players"]
else:
self.__params__["num_normal_players"] = 3
if "backward_enable" in params:
self.__params__["backward_enable"] = params["backward_enable"]
else:
self.__params__["backward_enable"] = False
self.public_state = SevenKingPublicState()
self.private_state = SevenKingPrivateState()
self.person_states = [SevenKingPersonState() for i in range(self.__params__["num_normal_players"] + 1)]
self.public_state_history = []
self.private_state_history = []
self.person_states_history = []
## private_state
allcards = [c.__deepcopy__() for c in AllSevenKingPokerCards.values()]
random.shuffle(allcards)
self.private_state.__keep_cards__ = allcards
for i in range(self.__params__["num_normal_players"]):
tmp = []
for j in range(5):
c = self.private_state.__keep_cards__.pop()
tmp.append(c)
self.person_states[i].__add_cards__(tmp)
## public_state
self.public_state.__turn__,_ = self.__choose_player_with_lowest_card__()
self.public_state.__is_terminal__ = False
self.public_state.__scores__ = []
self.public_state.__license_action__ = SevenKingAction.lookup("")
self.public_state.__stage__ = 0
self.public_state.__num_normal_players__ = self.__params__["num_normal_players"]
self.public_state.__num_keep_cards__ = len(self.private_state.keep_cards)
self.public_state.__num_hand_cards__ = [len(person_state.hand_cards) for person_state in self.person_states]
self.public_state.__is_fold__ = [False for i in range(self.public_state.num_normal_players)]
self.public_state.__num_fold__ = 0
## person_state
for i in range(self.__params__["num_normal_players"]+1):
self.person_states[i].__id__ = i
if i == self.public_state.turn:
self.person_states[i].__available_actions__ = SevenKingEnv.available_actions(self.public_state, self.person_states[i])
self.__gen_state_history_list__()
infos = self.__gen_infos__()
return infos, self.public_state, self.person_states, self.private_state
def forward(self, action):
'''
The SevenKing game environment steps with the action taken by the current player
:param action:
:return:
'''
pu = self.public_state
pr = self.private_state
pes = self.person_states
turn = pu.turn
if self.is_action_valid(action,pu, pes[turn]) == False:
raise ValueError("The (%s) is an invalid action " % (action.key))
pes[pu.turn].__available_actions__ = dict()
pu.__action_history__.append((pu.turn,action))
## the action plays its role
if action.pattern[0] == "p_0":
pu.__is_fold__[turn] = True
pu.__num_fold__ += 1
pes[turn].__available_actions__ = dict()
else:
pes[turn].__del_cards__(action.cards)
if pu.stage == 0:
tmp = []
for i in range(5 - len(pes[turn].hand_cards)):
c = pr.__keep_cards__.pop()
tmp.append(c)
pes[turn].__add_cards__(tmp)
elif pu.stage == 1:
pu.__num_hand_cards__[turn] = len(pes[turn].hand_cards)
if action.pattern[0] != "p_0":
pu.__license_action__ = action
#print (turn, "len_of_hand_card=",len(self.private_state.hand_cards[turn]), " len_of_keep_card=", len(self.private_state.keep_cards), " action = (%s)" %action.key,\
# " handcard1=%s"%(",".join([a.key for a in self.private_state.hand_cards[0]]))," handcard2=%s"%(",".join([a.key for a in self.private_state.hand_cards[1]])),\
# " num_fold =%d"%(self.public_state.num_fold),"fold=%s"%(",".join([str(s) for s in pu.is_fold])))
## termminal
if self.public_state.stage == 1 and len(self.person_states[turn].hand_cards) == 0:
pu.__is_terminal__ = True
pu.__scores__ = self.__compute_scores__()
new_turn = None
pu.__turn__ = new_turn
pu.__license_action__ = SevenKingAction.lookup("")
## stage 0 to 1
elif len(self.private_state.keep_cards) < 5 and pu.stage == 0:
new_turn, min_card = self.__choose_player_with_lowest_card__()
pu.__turn__ = new_turn
pu.__num_fold__ = 0
pu.__is_fold__ = [False for i in range(pu.num_normal_players)]
pu.__license_action__ = SevenKingAction.lookup("")
pes[new_turn].__available_actions__ = SevenKingEnv.available_actions(pu, pes[new_turn])
keys = list(pes[new_turn].available_actions.keys())
for key in keys:
if min_card.key not in key:
del pes[new_turn].__available_actions__[key]
pu.__stage__ = 1
## round next
elif self.public_state.num_fold + 1 == pu.num_normal_players:
new_turn = self.__choose_player_with_nofold__()
pu.__turn__ = new_turn
pu.__num_fold__ = 0
pu.__is_fold__ = [False for i in range(pu.num_normal_players)]
pu.__license_action__ = SevenKingAction.lookup("")
pes[new_turn].__available_actions__ = SevenKingEnv.available_actions(pu, pes[new_turn])
else:
new_turn = (turn + 1) % pu.num_normal_players
pu.__turn__ = new_turn
pes[new_turn].__available_actions__ = SevenKingEnv.available_actions(pu, pes[new_turn])
self.__gen_state_history_list__()
infos = self.__gen_infos__()
return infos, self.public_state, self.person_states, self.private_state
def __compute_scores__(self):
scores = [-1 for i in range(self.__params__["num_normal_players"])]
scores[self.public_state.turn] = self.__params__["num_normal_players"] -1
return scores
def __choose_player_with_nofold__(self):
for player_id in range(self.public_state.num_normal_players):
if self.public_state.is_fold[player_id]== False:
return player_id
def __choose_player_with_lowest_card__(self):
min_card = self.person_states[0].hand_cards[0]
min_playerid = 0
for playerid in range(self.__params__["num_normal_players"]):
for c in self.person_states[playerid].hand_cards:
if SevenKingPokerCard.compare(min_card, c) > 0:
min_card = c
min_playerid = playerid
return min_playerid, min_card
######################## Utils function ###################
@classmethod
def compete(cls, env, players):
'''
Use the game environment to hold a compete for the players
:param env: The game environment
:param players: The players
:return: scores for the players
'''
num_normal_players = len(players)
infos, public_state, person_states, private_state = env.init({"num_normal_players":num_normal_players})
for i in range(env.__params__["num_normal_players"]):
players[i].receive_info(infos[i])
while public_state.is_terminal == False:
turn = public_state.turn
action = players[turn].take_action()
infos, public_state, person_states, private_state = env.forward(action)
for i in range(env.__params__["num_normal_players"]):
players[i].receive_info(infos[i])
return public_state.scores
@classmethod
def is_action_valid(self, action, public_state, person_state):
return action.key in person_state.available_actions
########################### about gen_available_actions ########################
@classmethod
def available_actions(cls, public_state, person_state):
available_actions = dict()
license_action = public_state.license_action
if license_action is None:
license_action = SevenKingAction("")
hand_cards = person_state.hand_cards
patterns = set()
if license_action.pattern[0] == "p_0":
for p in AllSevenKingPatterns.values():
if p[0] != "p_0":
patterns.add(p)
else:
patterns.add(license_action.pattern)
patterns.add(AllSevenKingPatterns["p_0"])
for pattern in patterns:
if pattern[1] >= 2:
point2cards = person_state.__gen_pointrank2cards__()
if len(person_state.hand_cards) < pattern[1]:
continue
elif pattern[0] == "p_0":
available_actions[""] = SevenKingAction.lookup("")
elif pattern[0] == "p_1":
license_pattern = license_action.pattern
license_card = None
if license_pattern[0] != "p_0":
license_card = license_action.cards[-1]
for c in person_state.hand_cards:
if license_pattern[0] == "p_0" or SevenKingPokerCard.compare(c,license_card) >0:
available_actions[c.key] = SevenKingAction.lookup(c.key)
elif pattern[0] == "p_2":
for p in point2cards:
license_pattern = license_action.pattern
license_card = None
if license_pattern[0] != "p_0":
#print license_action.key, license_action.pattern, license_pattern[0] != "p_0"
license_card = license_action.cards[-1]
len1 = len(point2cards[p])
if len1 == 2:
if license_pattern[0] == "p_0" or SevenKingPokerCard.compare(point2cards[p][1],
license_card) > 0:
str = "%s,%s" % (point2cards[p][0].key, point2cards[p][1].key)
available_actions[str] = SevenKingAction.lookup(str)
if len1 == 3:
if license_pattern[0] == "p_0" or SevenKingPokerCard.compare(point2cards[p][1],
license_card) > 0:
str = "%s,%s" % (point2cards[p][0].key, point2cards[p][1].key)
available_actions[str] = (SevenKingAction.lookup(str))
if license_pattern[0] == "p_0" or SevenKingPokerCard.compare(point2cards[p][2],
license_card) > 0:
str = "%s,%s" % (point2cards[p][0].key, point2cards[p][2].key)
available_actions[str] = (SevenKingAction.lookup(str))
str = "%s,%s" % (point2cards[p][1].key, point2cards[p][2].key)
available_actions[str] = (SevenKingAction.lookup(str))
if len1 == 4:
if license_pattern[0] == "p_0" or SevenKingPokerCard.compare(point2cards[p][1],
license_card) > 0:
str = "%s,%s" % (point2cards[p][0].key, point2cards[p][1].key)
available_actions[str] = (SevenKingAction.lookup(str))
if license_pattern[0] == "p_0" or SevenKingPokerCard.compare(point2cards[p][2],
license_card) > 0:
str = "%s,%s" % (point2cards[p][0].key, point2cards[p][2].key)
available_actions[str] = (SevenKingAction.lookup(str))
str = "%s,%s" % (point2cards[p][1].key, point2cards[p][2].key)
available_actions[str] = (SevenKingAction.lookup(str))
if license_pattern[0] == "p_0" or SevenKingPokerCard.compare(point2cards[p][3],
license_card) > 0:
str = "%s,%s" % (point2cards[p][0].key, point2cards[p][3].key)
available_actions[str] = (SevenKingAction.lookup(str))
str = "%s,%s" % (point2cards[p][1].key, point2cards[p][3].key)
available_actions[str] = (SevenKingAction.lookup(str))
str = "%s,%s" % (point2cards[p][2].key, point2cards[p][3].key)
available_actions[str] = (SevenKingAction.lookup(str))
elif pattern[0] == "p_3":
for p in point2cards:
license_pattern = license_action.pattern
license_card = None
if license_pattern[0] != "p_0" :
license_card = license_action.cards[-1]
len1 = len(point2cards[p])
if len1 == 3:
if license_pattern[0] == "p_0" or SevenKingPokerCard.compare(point2cards[p][2],
license_card) > 0:
str = "%s,%s,%s" % (point2cards[p][0].key, point2cards[p][1].key, point2cards[p][2].key)
available_actions[str] = (SevenKingAction.lookup(str))
if len1 == 4:
if license_pattern[0] == "p_0" or SevenKingPokerCard.compare(point2cards[p][2],
license_card) > 0:
str = "%s,%s,%s" % (point2cards[p][0].key, point2cards[p][1].key, point2cards[p][2].key)
available_actions[str] = (SevenKingAction.lookup(str))
if license_pattern[0] == "p_0" or SevenKingPokerCard.compare(point2cards[p][3],
license_card) > 0:
str = "%s,%s,%s" % (point2cards[p][0].key, point2cards[p][1].key, point2cards[p][3].key)
available_actions[str]=(SevenKingAction.lookup(str))
str = "%s,%s,%s" % (point2cards[p][0].key, point2cards[p][2].key, point2cards[p][3].key)
available_actions[str]=(SevenKingAction.lookup(str))
str = "%s,%s,%s" % (point2cards[p][1].key, point2cards[p][2].key, point2cards[p][3].key)
available_actions[str]=(SevenKingAction.lookup(str))
elif pattern[0] == "p_4":
for p in point2cards:
license_pattern = license_action.pattern
license_card = None
if license_pattern[0] != "p_0" :
license_card = license_action.cards[-1]
len1 = len(point2cards[p])
if len1 >= 4:
if license_pattern[0] == "p_0" or SevenKingPokerCard.compare(point2cards[p][3],
license_card) > 0:
str = "%s,%s,%s,%s" % (
point2cards[p][0].key,
point2cards[p][1].key,
point2cards[p][2].key,
point2cards[p][3].key
)
available_actions[str]=(SevenKingAction.lookup(str))
if pattern[0] != "p_0" and pattern[0] != "p_1" and\
pattern[0] != "p_2" and pattern[0] != "p_3" and pattern[0] != "p_4":
raise ValueError("The %s pattern is invalid" % (pattern[0]))
#for a in available_actions.values():
# if SevenKingEnv.__is_action_valid__(a,public_state,person_state) == False:
# del available_actions[a.key]
return available_actions
def __deepcopy__(self, memodict={}, newinstance = None):
if newinstance is None:
newinstance = SevenKingEnv()
newinstance = super(SevenKingEnv, self).__deepcopy__(newinstance=newinstance)
return newinstance | [
"random.shuffle",
"roomai.sevenking.SevenKingPokerCard.compare",
"roomai.sevenking.SevenKingAction",
"roomai.sevenking.SevenKingPersonState",
"roomai.sevenking.SevenKingPrivateState",
"roomai.sevenking.AllSevenKingPokerCards.values",
"roomai.sevenking.SevenKingAction.lookup",
"roomai.sevenking.SevenKi... | [((1590, 1612), 'roomai.sevenking.SevenKingPublicState', 'SevenKingPublicState', ([], {}), '()\n', (1610, 1612), False, 'from roomai.sevenking import SevenKingPublicState\n'), ((1642, 1665), 'roomai.sevenking.SevenKingPrivateState', 'SevenKingPrivateState', ([], {}), '()\n', (1663, 1665), False, 'from roomai.sevenking import SevenKingPrivateState\n'), ((2013, 2037), 'random.shuffle', 'random.shuffle', (['allcards'], {}), '(allcards)\n', (2027, 2037), False, 'import random\n'), ((2618, 2644), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['""""""'], {}), "('')\n", (2640, 2644), False, 'from roomai.sevenking import SevenKingAction\n'), ((1696, 1718), 'roomai.sevenking.SevenKingPersonState', 'SevenKingPersonState', ([], {}), '()\n', (1716, 1718), False, 'from roomai.sevenking import SevenKingPersonState\n'), ((5720, 5746), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['""""""'], {}), "('')\n", (5742, 5746), False, 'from roomai.sevenking import SevenKingAction\n'), ((9906, 9925), 'roomai.sevenking.SevenKingAction', 'SevenKingAction', (['""""""'], {}), "('')\n", (9921, 9925), False, 'from roomai.sevenking import SevenKingAction\n'), ((10066, 10095), 'roomai.sevenking.AllSevenKingPatterns.values', 'AllSevenKingPatterns.values', ([], {}), '()\n', (10093, 10095), False, 'from roomai.sevenking import AllSevenKingPatterns\n'), ((1972, 2003), 'roomai.sevenking.AllSevenKingPokerCards.values', 'AllSevenKingPokerCards.values', ([], {}), '()\n', (2001, 2003), False, 'from roomai.sevenking import AllSevenKingPokerCards\n'), ((6189, 6215), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['""""""'], {}), "('')\n", (6211, 6215), False, 'from roomai.sevenking import SevenKingAction\n'), ((7027, 7053), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['""""""'], {}), "('')\n", (7049, 7053), False, 'from roomai.sevenking import SevenKingAction\n'), ((8312, 8351), 'roomai.sevenking.SevenKingPokerCard.compare', 'SevenKingPokerCard.compare', (['min_card', 'c'], {}), '(min_card, c)\n', (8338, 8351), False, 'from roomai.sevenking import SevenKingPokerCard\n'), ((10607, 10633), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['""""""'], {}), "('')\n", (10629, 10633), False, 'from roomai.sevenking import SevenKingAction\n'), ((11110, 11139), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['c.key'], {}), '(c.key)\n', (11132, 11139), False, 'from roomai.sevenking import SevenKingAction\n'), ((11008, 11051), 'roomai.sevenking.SevenKingPokerCard.compare', 'SevenKingPokerCard.compare', (['c', 'license_card'], {}), '(c, license_card)\n', (11034, 11051), False, 'from roomai.sevenking import SevenKingPokerCard\n'), ((12027, 12054), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (12049, 12054), False, 'from roomai.sevenking import SevenKingAction\n'), ((12460, 12487), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (12482, 12487), False, 'from roomai.sevenking import SevenKingAction\n'), ((12856, 12883), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (12878, 12883), False, 'from roomai.sevenking import SevenKingAction\n'), ((13038, 13065), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (13060, 13065), False, 'from roomai.sevenking import SevenKingAction\n'), ((13472, 13499), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (13494, 13499), False, 'from roomai.sevenking import SevenKingAction\n'), ((13868, 13895), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (13890, 13895), False, 'from roomai.sevenking import SevenKingAction\n'), ((14050, 14077), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (14072, 14077), False, 'from roomai.sevenking import SevenKingAction\n'), ((14445, 14472), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (14467, 14472), False, 'from roomai.sevenking import SevenKingAction\n'), ((14627, 14654), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (14649, 14654), False, 'from roomai.sevenking import SevenKingAction\n'), ((14809, 14836), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (14831, 14836), False, 'from roomai.sevenking import SevenKingAction\n'), ((11724, 11783), 'roomai.sevenking.SevenKingPokerCard.compare', 'SevenKingPokerCard.compare', (['point2cards[p][1]', 'license_card'], {}), '(point2cards[p][1], license_card)\n', (11750, 11783), False, 'from roomai.sevenking import SevenKingPokerCard\n'), ((12156, 12215), 'roomai.sevenking.SevenKingPokerCard.compare', 'SevenKingPokerCard.compare', (['point2cards[p][1]', 'license_card'], {}), '(point2cards[p][1], license_card)\n', (12182, 12215), False, 'from roomai.sevenking import SevenKingPokerCard\n'), ((12552, 12611), 'roomai.sevenking.SevenKingPokerCard.compare', 'SevenKingPokerCard.compare', (['point2cards[p][2]', 'license_card'], {}), '(point2cards[p][2], license_card)\n', (12578, 12611), False, 'from roomai.sevenking import SevenKingPokerCard\n'), ((13168, 13227), 'roomai.sevenking.SevenKingPokerCard.compare', 'SevenKingPokerCard.compare', (['point2cards[p][1]', 'license_card'], {}), '(point2cards[p][1], license_card)\n', (13194, 13227), False, 'from roomai.sevenking import SevenKingPokerCard\n'), ((13564, 13623), 'roomai.sevenking.SevenKingPokerCard.compare', 'SevenKingPokerCard.compare', (['point2cards[p][2]', 'license_card'], {}), '(point2cards[p][2], license_card)\n', (13590, 13623), False, 'from roomai.sevenking import SevenKingPokerCard\n'), ((14141, 14200), 'roomai.sevenking.SevenKingPokerCard.compare', 'SevenKingPokerCard.compare', (['point2cards[p][3]', 'license_card'], {}), '(point2cards[p][3], license_card)\n', (14167, 14200), False, 'from roomai.sevenking import SevenKingPokerCard\n'), ((15647, 15674), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (15669, 15674), False, 'from roomai.sevenking import SevenKingAction\n'), ((16107, 16134), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (16129, 16134), False, 'from roomai.sevenking import SevenKingAction\n'), ((16526, 16553), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (16548, 16553), False, 'from roomai.sevenking import SevenKingAction\n'), ((16732, 16759), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (16754, 16759), False, 'from roomai.sevenking import SevenKingAction\n'), ((16938, 16965), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (16960, 16965), False, 'from roomai.sevenking import SevenKingAction\n'), ((15317, 15376), 'roomai.sevenking.SevenKingPokerCard.compare', 'SevenKingPokerCard.compare', (['point2cards[p][2]', 'license_card'], {}), '(point2cards[p][2], license_card)\n', (15343, 15376), False, 'from roomai.sevenking import SevenKingPokerCard\n'), ((15777, 15836), 'roomai.sevenking.SevenKingPokerCard.compare', 'SevenKingPokerCard.compare', (['point2cards[p][2]', 'license_card'], {}), '(point2cards[p][2], license_card)\n', (15803, 15836), False, 'from roomai.sevenking import SevenKingPokerCard\n'), ((16198, 16257), 'roomai.sevenking.SevenKingPokerCard.compare', 'SevenKingPokerCard.compare', (['point2cards[p][3]', 'license_card'], {}), '(point2cards[p][3], license_card)\n', (16224, 16257), False, 'from roomai.sevenking import SevenKingPokerCard\n'), ((17972, 17999), 'roomai.sevenking.SevenKingAction.lookup', 'SevenKingAction.lookup', (['str'], {}), '(str)\n', (17994, 17999), False, 'from roomai.sevenking import SevenKingAction\n'), ((17444, 17503), 'roomai.sevenking.SevenKingPokerCard.compare', 'SevenKingPokerCard.compare', (['point2cards[p][3]', 'license_card'], {}), '(point2cards[p][3], license_card)\n', (17470, 17503), False, 'from roomai.sevenking import SevenKingPokerCard\n')] |
from __future__ import unicode_literals
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
from django.template import RequestContext, Template
from djblets.avatars.services.gravatar import GravatarService
from reviewboard.testing import TestCase
from reviewboard.avatars import avatar_services
from reviewboard.avatars.tests import DummyAvatarService
class TemplateTagTests(TestCase):
"""Tests for reviewboard.avatars.templatetags."""
fixtures = ['test_users']
@staticmethod
def _reset_avatar_services():
avatar_services.reset()
gravatar_service = avatar_services.get_avatar_service(
GravatarService.avatar_service_id)
avatar_services.enable_service(gravatar_service.avatar_service_id,
save=False)
avatar_services.set_default_service(gravatar_service, save=False)
@classmethod
def tearDownClass(cls):
super(TemplateTagTests, cls).tearDownClass()
cls._reset_avatar_services()
def setUp(self):
super(TemplateTagTests, self).setUp()
self._reset_avatar_services()
self.user = User.objects.get(username='doc')
self.request = HttpRequest()
self.request.user = AnonymousUser()
def test_avatar_urls(self):
"""Testing {% avatar_urls %} template tag"""
service = avatar_services.default_service
self.assertIsNotNone(service)
t = Template(
'{% load avatars %}'
'{% avatar_urls u 32 %}'
)
self.assertEqual(
t.render(RequestContext(self.request, {
'u': self.user,
'service_id': service.avatar_service_id,
})),
'{'
'"1x": "%(1x)s", '
'"3x": "%(3x)s", '
'"2x": "%(2x)s"'
'}'
% service.get_avatar_urls_uncached(self.user, 32)
)
def test_avatar_urls_with_service(self):
"""Testing {% avatar_urls %} template tag with avatar_service_id"""
service = DummyAvatarService(use_2x=True)
avatar_services.register(service)
avatar_services.enable_service(service.avatar_service_id)
t = Template(
'{% load avatars %}'
'{% avatar_urls u 32 service_id %}'
)
self.assertEqual(
t.render(RequestContext(self.request, {
'u': self.user,
'service_id': service.avatar_service_id,
})),
'{'
'"1x": "%(1x)s", '
'"2x": "%(2x)s"'
'}'
% service.get_avatar_urls_uncached(self.user, 32)
)
def test_avatar_urls_no_service(self):
"""Testing {% avatar_urls %} template tag with no available services"""
services = list(avatar_services)
for service in services:
avatar_services.unregister(service)
t = Template(
'{% load avatars %}'
'{% avatar_urls u 32 %}'
)
self.assertEqual(
t.render(RequestContext(self.request, {
'u': self.user,
})),
'{}')
def test_avatar_urls_service_not_found(self):
"""Testing {% avatar_urls %} template tag with an invalid service"""
service = avatar_services.default_service
self.assertIsNotNone(service)
self.assertIsNone(avatar_services.get_avatar_service(
DummyAvatarService.avatar_service_id))
t = Template(
'{% load avatars %}'
'{% avatar_urls u 32 service_id %}'
)
self.assertEqual(
t.render(RequestContext(self.request, {
'u': self.user,
'service_id': DummyAvatarService.avatar_service_id,
})),
'{'
'"1x": "%(1x)s", '
'"3x": "%(3x)s", '
'"2x": "%(2x)s"'
'}'
% service.get_avatar_urls_uncached(self.user, 32)
)
| [
"reviewboard.avatars.tests.DummyAvatarService",
"django.template.Template",
"django.contrib.auth.models.AnonymousUser",
"reviewboard.avatars.avatar_services.set_default_service",
"django.template.RequestContext",
"reviewboard.avatars.avatar_services.reset",
"reviewboard.avatars.avatar_services.get_avata... | [((580, 603), 'reviewboard.avatars.avatar_services.reset', 'avatar_services.reset', ([], {}), '()\n', (601, 603), False, 'from reviewboard.avatars import avatar_services\n'), ((631, 700), 'reviewboard.avatars.avatar_services.get_avatar_service', 'avatar_services.get_avatar_service', (['GravatarService.avatar_service_id'], {}), '(GravatarService.avatar_service_id)\n', (665, 700), False, 'from reviewboard.avatars import avatar_services\n'), ((722, 800), 'reviewboard.avatars.avatar_services.enable_service', 'avatar_services.enable_service', (['gravatar_service.avatar_service_id'], {'save': '(False)'}), '(gravatar_service.avatar_service_id, save=False)\n', (752, 800), False, 'from reviewboard.avatars import avatar_services\n'), ((848, 913), 'reviewboard.avatars.avatar_services.set_default_service', 'avatar_services.set_default_service', (['gravatar_service'], {'save': '(False)'}), '(gravatar_service, save=False)\n', (883, 913), False, 'from reviewboard.avatars import avatar_services\n'), ((1177, 1209), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': '"""doc"""'}), "(username='doc')\n", (1193, 1209), False, 'from django.contrib.auth.models import AnonymousUser, User\n'), ((1233, 1246), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (1244, 1246), False, 'from django.http import HttpRequest\n'), ((1275, 1290), 'django.contrib.auth.models.AnonymousUser', 'AnonymousUser', ([], {}), '()\n', (1288, 1290), False, 'from django.contrib.auth.models import AnonymousUser, User\n'), ((1478, 1530), 'django.template.Template', 'Template', (['"""{% load avatars %}{% avatar_urls u 32 %}"""'], {}), "('{% load avatars %}{% avatar_urls u 32 %}')\n", (1486, 1530), False, 'from django.template import RequestContext, Template\n'), ((2088, 2119), 'reviewboard.avatars.tests.DummyAvatarService', 'DummyAvatarService', ([], {'use_2x': '(True)'}), '(use_2x=True)\n', (2106, 2119), False, 'from reviewboard.avatars.tests import DummyAvatarService\n'), ((2128, 2161), 'reviewboard.avatars.avatar_services.register', 'avatar_services.register', (['service'], {}), '(service)\n', (2152, 2161), False, 'from reviewboard.avatars import avatar_services\n'), ((2170, 2227), 'reviewboard.avatars.avatar_services.enable_service', 'avatar_services.enable_service', (['service.avatar_service_id'], {}), '(service.avatar_service_id)\n', (2200, 2227), False, 'from reviewboard.avatars import avatar_services\n'), ((2241, 2304), 'django.template.Template', 'Template', (['"""{% load avatars %}{% avatar_urls u 32 service_id %}"""'], {}), "('{% load avatars %}{% avatar_urls u 32 service_id %}')\n", (2249, 2304), False, 'from django.template import RequestContext, Template\n'), ((2951, 3003), 'django.template.Template', 'Template', (['"""{% load avatars %}{% avatar_urls u 32 %}"""'], {}), "('{% load avatars %}{% avatar_urls u 32 %}')\n", (2959, 3003), False, 'from django.template import RequestContext, Template\n'), ((3530, 3593), 'django.template.Template', 'Template', (['"""{% load avatars %}{% avatar_urls u 32 service_id %}"""'], {}), "('{% load avatars %}{% avatar_urls u 32 service_id %}')\n", (3538, 3593), False, 'from django.template import RequestContext, Template\n'), ((2902, 2937), 'reviewboard.avatars.avatar_services.unregister', 'avatar_services.unregister', (['service'], {}), '(service)\n', (2928, 2937), False, 'from reviewboard.avatars import avatar_services\n'), ((3430, 3502), 'reviewboard.avatars.avatar_services.get_avatar_service', 'avatar_services.get_avatar_service', (['DummyAvatarService.avatar_service_id'], {}), '(DummyAvatarService.avatar_service_id)\n', (3464, 3502), False, 'from reviewboard.avatars import avatar_services\n'), ((1616, 1708), 'django.template.RequestContext', 'RequestContext', (['self.request', "{'u': self.user, 'service_id': service.avatar_service_id}"], {}), "(self.request, {'u': self.user, 'service_id': service.\n avatar_service_id})\n", (1630, 1708), False, 'from django.template import RequestContext, Template\n'), ((2390, 2482), 'django.template.RequestContext', 'RequestContext', (['self.request', "{'u': self.user, 'service_id': service.avatar_service_id}"], {}), "(self.request, {'u': self.user, 'service_id': service.\n avatar_service_id})\n", (2404, 2482), False, 'from django.template import RequestContext, Template\n'), ((3089, 3135), 'django.template.RequestContext', 'RequestContext', (['self.request', "{'u': self.user}"], {}), "(self.request, {'u': self.user})\n", (3103, 3135), False, 'from django.template import RequestContext, Template\n'), ((3679, 3781), 'django.template.RequestContext', 'RequestContext', (['self.request', "{'u': self.user, 'service_id': DummyAvatarService.avatar_service_id}"], {}), "(self.request, {'u': self.user, 'service_id':\n DummyAvatarService.avatar_service_id})\n", (3693, 3781), False, 'from django.template import RequestContext, Template\n')] |
import jwt
import requests
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicNumbers
from oauth2 import Oauth2, ResponseObject, RemoteUserProfile, OAuthDiscoveryError
class OpenID(Oauth2):
state_token: str or None = None
well_know_path_part: str = ".well-known" # it is path in url to .well-know https://tools.ietf.org/html/rfc5785
openid_conf_path_part = "openid-configuration" # it is path in url to openid configuration
auth_server_domain_name: str or None = None
_configuration: dict or None = None # https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfig
was_load_openid_conf: bool = False
audience: str or None = None
jwk_cert: dict or None = None
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.openid_conf_path_part = kwargs.get("openid_conf_path_part") or self.openid_conf_path_part
self.state_token = kwargs.get('state_token')
self.jwk_cert = kwargs.get('jwk_cert')
self.audience = kwargs.get('audience')
@staticmethod
def get_jwt_header(token):
"""
:param token: the token is a jwt (that has name id_token in google notation)
:return: dict with key alg (algorithm), kid (key id), typ (token type(JWT always))
"""
return jwt.get_unverified_header(token)
@staticmethod
def jwk_to_rsa(jwk):
"""
Decoding jwk format to public rsa key carried out by next steps:
(jwk['e'] (RSA exponent) and jwk['n'] (RSA modulus) -> decode from base64url to binary -> encode binary to DEC (десятичная))
:param jwk: JSON Web Key https://tools.ietf.org/html/rfc7517
:return: RSA public key as instance of cryptography.hazmat.backends.openssl.rsa._RSAPublicKey
"""
e = jwk.get('e') # exponent
assert e, "jwk must contains the exponent value by key 'e'"
n = jwk.get('n') # modulus
assert n, "jwk must contains the modulus value by key 'n'"
base64_url_exp = jwt.api_jws.base64url_decode(e)
base64_url_modulus = jwt.api_jws.base64url_decode(n)
dec64_url_exp = int(base64_url_exp.hex(), base=16)
dec64_url_modulus = int(base64_url_modulus.hex(), base=16)
return RSAPublicNumbers(e=dec64_url_exp, n=dec64_url_modulus).public_key(default_backend())
def get_user_info_by_jwt(self, token, with_verify=True, **options)-> dict:
"""
:param token: jwt
:param with_verify: verify that response is not compromised
:param options: if verify is True we need to pass JWK value for verification
:return:
"""
user_info = {}
jwk = options.get("jwk")
jwt_decode_options = {}
audience = options.get('audience')
if audience:
jwt_decode_options['audience'] = audience
elif self.audience:
jwt_decode_options['audience'] = self.audience
rsa_key = self.jwk_to_rsa(jwk)
if with_verify:
user_info = jwt.decode(token, key=rsa_key, **jwt_decode_options)
else:
user_info = jwt.decode(token, verify=False)
return user_info
def is_state_token_valid(self, state):
# todo
return self.state_token == state
def get_user_profile(self, exchange_code_response: ResponseObject) -> RemoteUserProfile:
id_token = exchange_code_response.get('id_token')
assert id_token, "Wrong openid response from auth server. " \
"response must contains id_token (with JWT data) field"
options = {
'jwk': self.get_jwk_for_jwt(id_token)
}
return self.get_user_info_by_jwt(id_token, **options)
def load_configuration(self, configuration: dict = None):
if configuration is None:
configuration = self.get_oauth_conf_by_discovery_doc()
self._configuration = configuration
self.was_load_openid_conf = True
def get_oauth_conf_by_discovery_doc(self):
discovery_path = f'https://{self.auth_server_domain_name}/{self.well_know_path_part}/{self.openid_conf_path_part}'
response = requests.get(url=discovery_path)
if response.status_code != 200:
raise OAuthDiscoveryError()
configuration = response.json()
return configuration
def get_jwk_for_jwt(self, token):
if self.jwk_cert is None:
jwt_header = self.get_jwt_header(token)
self.load_jwk_by_remote(jwt_header.get('kid'))
return self.jwk_cert
def load_jwk_by_remote(self, kid: str):
"""
:param kid: remote key id
:return: jwk
"""
assert kid, "You cant load jwk if key id (kid) is not set"
configuration = self.get_configuration()
jwks_uri = configuration.get('jwks_uri')
certs_response = requests.get(url=jwks_uri).json()
for key in certs_response['keys']:
if key['kid'] == kid:
self.load_jwk(key)
break
def load_jwk(self, jwk):
self.jwk_cert = jwk
| [
"jwt.decode",
"requests.get",
"jwt.get_unverified_header",
"jwt.api_jws.base64url_decode",
"oauth2.OAuthDiscoveryError",
"cryptography.hazmat.backends.default_backend",
"cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicNumbers"
] | [((1388, 1420), 'jwt.get_unverified_header', 'jwt.get_unverified_header', (['token'], {}), '(token)\n', (1413, 1420), False, 'import jwt\n'), ((2101, 2132), 'jwt.api_jws.base64url_decode', 'jwt.api_jws.base64url_decode', (['e'], {}), '(e)\n', (2129, 2132), False, 'import jwt\n'), ((2162, 2193), 'jwt.api_jws.base64url_decode', 'jwt.api_jws.base64url_decode', (['n'], {}), '(n)\n', (2190, 2193), False, 'import jwt\n'), ((4240, 4272), 'requests.get', 'requests.get', ([], {'url': 'discovery_path'}), '(url=discovery_path)\n', (4252, 4272), False, 'import requests\n'), ((2403, 2420), 'cryptography.hazmat.backends.default_backend', 'default_backend', ([], {}), '()\n', (2418, 2420), False, 'from cryptography.hazmat.backends import default_backend\n'), ((3106, 3158), 'jwt.decode', 'jwt.decode', (['token'], {'key': 'rsa_key'}), '(token, key=rsa_key, **jwt_decode_options)\n', (3116, 3158), False, 'import jwt\n'), ((3197, 3228), 'jwt.decode', 'jwt.decode', (['token'], {'verify': '(False)'}), '(token, verify=False)\n', (3207, 3228), False, 'import jwt\n'), ((4332, 4353), 'oauth2.OAuthDiscoveryError', 'OAuthDiscoveryError', ([], {}), '()\n', (4351, 4353), False, 'from oauth2 import Oauth2, ResponseObject, RemoteUserProfile, OAuthDiscoveryError\n'), ((2337, 2391), 'cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicNumbers', 'RSAPublicNumbers', ([], {'e': 'dec64_url_exp', 'n': 'dec64_url_modulus'}), '(e=dec64_url_exp, n=dec64_url_modulus)\n', (2353, 2391), False, 'from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicNumbers\n'), ((4951, 4977), 'requests.get', 'requests.get', ([], {'url': 'jwks_uri'}), '(url=jwks_uri)\n', (4963, 4977), False, 'import requests\n')] |
from dvc_preprocessing import plot, preprocessing, constants
from skimage.filters import threshold_otsu
import numpy as np
def auto_processing(filename, dirpath='./', data_type=np.int16, init_slice=0, final_slice="last", outname="output", ret="True"):
'''
TODO: add outpath
'''
stack = preprocessing.read_images_from_h5(filename, data_type, dirpath)
threshold_value = threshold_otsu(stack)
print(f'Threshold value: {threshold_value}.')
stack = preprocessing.intensity_rescaling(stack)
if data_type == np.int8:
stack[stack < threshold_value] = constants.INT8MINVAL()
else:
stack[stack < threshold_value] = constants.INT16MINVAL()
if final_slice == "last":
final_slice = stack.shape[0]
CoM = preprocessing.volume_CoM(stack, init_slice, final_slice)
print(f'The center of mass is {CoM}')
if init_slice != 0 or final_slice != "last":
stack = preprocessing.crop_around_CoM(
stack, CoM, (init_slice, final_slice))
else:
stack = preprocessing.crop_around_CoM(stack, CoM)
preprocessing.save_3d_tiff(stack, outname, dirpath)
if ret == True:
return stack, CoM, threshold_value
| [
"dvc_preprocessing.constants.INT8MINVAL",
"dvc_preprocessing.preprocessing.save_3d_tiff",
"skimage.filters.threshold_otsu",
"dvc_preprocessing.preprocessing.volume_CoM",
"dvc_preprocessing.preprocessing.read_images_from_h5",
"dvc_preprocessing.preprocessing.crop_around_CoM",
"dvc_preprocessing.preproces... | [((305, 368), 'dvc_preprocessing.preprocessing.read_images_from_h5', 'preprocessing.read_images_from_h5', (['filename', 'data_type', 'dirpath'], {}), '(filename, data_type, dirpath)\n', (338, 368), False, 'from dvc_preprocessing import plot, preprocessing, constants\n'), ((392, 413), 'skimage.filters.threshold_otsu', 'threshold_otsu', (['stack'], {}), '(stack)\n', (406, 413), False, 'from skimage.filters import threshold_otsu\n'), ((477, 517), 'dvc_preprocessing.preprocessing.intensity_rescaling', 'preprocessing.intensity_rescaling', (['stack'], {}), '(stack)\n', (510, 517), False, 'from dvc_preprocessing import plot, preprocessing, constants\n'), ((766, 822), 'dvc_preprocessing.preprocessing.volume_CoM', 'preprocessing.volume_CoM', (['stack', 'init_slice', 'final_slice'], {}), '(stack, init_slice, final_slice)\n', (790, 822), False, 'from dvc_preprocessing import plot, preprocessing, constants\n'), ((1086, 1137), 'dvc_preprocessing.preprocessing.save_3d_tiff', 'preprocessing.save_3d_tiff', (['stack', 'outname', 'dirpath'], {}), '(stack, outname, dirpath)\n', (1112, 1137), False, 'from dvc_preprocessing import plot, preprocessing, constants\n'), ((589, 611), 'dvc_preprocessing.constants.INT8MINVAL', 'constants.INT8MINVAL', ([], {}), '()\n', (609, 611), False, 'from dvc_preprocessing import plot, preprocessing, constants\n'), ((663, 686), 'dvc_preprocessing.constants.INT16MINVAL', 'constants.INT16MINVAL', ([], {}), '()\n', (684, 686), False, 'from dvc_preprocessing import plot, preprocessing, constants\n'), ((931, 999), 'dvc_preprocessing.preprocessing.crop_around_CoM', 'preprocessing.crop_around_CoM', (['stack', 'CoM', '(init_slice, final_slice)'], {}), '(stack, CoM, (init_slice, final_slice))\n', (960, 999), False, 'from dvc_preprocessing import plot, preprocessing, constants\n'), ((1039, 1080), 'dvc_preprocessing.preprocessing.crop_around_CoM', 'preprocessing.crop_around_CoM', (['stack', 'CoM'], {}), '(stack, CoM)\n', (1068, 1080), False, 'from dvc_preprocessing import plot, preprocessing, constants\n')] |
from __future__ import absolute_import, division, print_function
import pytest
from ..spparser import Scanner
scanner = Scanner()
# Test of a single instance of each token. Does not test them in
# context, but at least it tests that each one is recognized.
tokens = [
# bug: the original pysynphot could not recognize integer
# ('INTEGER', '1'),
# basic float
('FLOAT', '.1'),
('FLOAT', '1.1'),
('FLOAT', '1.'),
('FLOAT', '1'),
# basic float with e+
('FLOAT', '.1e+1'),
('FLOAT', '1.1e+1'),
('FLOAT', '1.e+1'),
('FLOAT', '1e+1'),
# basic float with e-
('FLOAT', '.1e-1'),
('FLOAT', '1.1e-1'),
('FLOAT', '1.e-1'),
('FLOAT', '1e-1'),
# basic float with e
('FLOAT', '.1e1'),
('FLOAT', '1.1e1'),
('FLOAT', '1.e1'),
('FLOAT', '1e1'),
# identifier
('IDENTIFIER', 'xyzzy'),
('IDENTIFIER', 'xyzzy20'),
('IDENTIFIER', '20xyzzy'),
('IDENTIFIER', '20xyzzy20'),
# special characters
('LPAREN', '('),
('RPAREN', ')'),
(',', ','),
('/', ' / '),
# filename
('IDENTIFIER', '/a/b/c'),
('IDENTIFIER', 'foo$bar'),
('IDENTIFIER', 'a/b'),
# file list
('FILELIST', '@arf'),
('FILELIST', '@narf')]
def print_token_list(tklist):
s = 'Token list: {} items\n'.format(len(tklist))
for x in tklist:
s += '{:<20s} \n'.format(x.type, x.attr)
s += '---\n'
return s
def ptl2(tkl):
"""
Use this to generate the list of tokens in a form easy to copy/paste
into a test.
"""
s = ''
for x in tkl:
s += ' ( "{}", {} ), \n'.format(x.type, repr(x.attr))
s += '\n'
return s
def stream_t(text, result):
"""
Parse a bit of text and compare it to the expected token stream.
Each actual test function calls this.
"""
tkl = scanner.tokenize(text)
msg = print_token_list(tkl)
assert result is not None, \
msg + 'NO EXPECT LIST\n [\n' + ptl2(tkl) + ' ]\n'
for n, (expect, actual) in enumerate(zip(result, tkl)):
assert expect[0] == actual.type and expect[1] == actual.attr, \
(msg + '{} expect={} actual=({}, {})'.format(
n, expect, actual.type, actual.attr))
@pytest.mark.parametrize(
('text', 'result'),
[('spec($PYSYN_CDBS//calspec/gd71_mod_005.fits)',
[('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', '$PYSYN_CDBS//calspec/gd71_mod_005.fits'),
('RPAREN', None)]),
(('spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),'
'band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+'
'spec(el1302a.fits)+spec(el1356a.fits)+'
'spec(el2471a.fits))*0.5'),
[('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'earthshine.fits'),
('RPAREN', None),
('*', None),
('FLOAT', '0.5'),
('+', None),
('IDENTIFIER', 'rn'),
('LPAREN', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'Zodi.fits'),
('RPAREN', None),
(',', None),
('IDENTIFIER', 'band'),
('LPAREN', None),
('IDENTIFIER', 'johnson'),
(',', None),
('IDENTIFIER', 'v'),
('RPAREN', None),
(',', None),
('FLOAT', '22.7'),
(',', None),
('IDENTIFIER', 'vegamag'),
('RPAREN', None),
('+', None),
('LPAREN', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'el1215a.fits'),
('RPAREN', None),
('+', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'el1302a.fits'),
('RPAREN', None),
('+', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'el1356a.fits'),
('RPAREN', None),
('+', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'el2471a.fits'),
('RPAREN', None),
('RPAREN', None),
('*', None),
('FLOAT', '0.5')]),
(('spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),'
'band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)*0.1+'
'spec(el1302a.fits)*0.066666667+spec(el1356a.fits)*0.0060+'
'spec(el2471a.fits)*0.0050)'),
[('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'earthshine.fits'),
('RPAREN', None),
('*', None),
('FLOAT', '0.5'),
('+', None),
('IDENTIFIER', 'rn'),
('LPAREN', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'Zodi.fits'),
('RPAREN', None),
(',', None),
('IDENTIFIER', 'band'),
('LPAREN', None),
('IDENTIFIER', 'johnson'),
(',', None),
('IDENTIFIER', 'v'),
('RPAREN', None),
(',', None),
('FLOAT', '22.7'),
(',', None),
('IDENTIFIER', 'vegamag'),
('RPAREN', None),
('+', None),
('LPAREN', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'el1215a.fits'),
('RPAREN', None),
('*', None),
('FLOAT', '0.1'),
('+', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'el1302a.fits'),
('RPAREN', None),
('*', None),
('FLOAT', '0.066666667'),
('+', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'el1356a.fits'),
('RPAREN', None),
('*', None),
('FLOAT', '0.0060'),
('+', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'el2471a.fits'),
('RPAREN', None),
('*', None),
('FLOAT', '0.0050'),
('RPAREN', None)]),
(('spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),'
'22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+'
'spec(el1356a.fits)+spec(el2471a.fits))'),
[('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'earthshine.fits'),
('RPAREN', None),
('*', None),
('FLOAT', '0.5'),
('+', None),
('IDENTIFIER', 'rn'),
('LPAREN', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'Zodi.fits'),
('RPAREN', None),
(',', None),
('IDENTIFIER', 'band'),
('LPAREN', None),
('IDENTIFIER', 'johnson'),
(',', None),
('IDENTIFIER', 'v'),
('RPAREN', None),
(',', None),
('FLOAT', '22.7'),
(',', None),
('IDENTIFIER', 'vegamag'),
('RPAREN', None),
('+', None),
('LPAREN', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'el1215a.fits'),
('RPAREN', None),
('+', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'el1302a.fits'),
('RPAREN', None),
('+', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'el1356a.fits'),
('RPAREN', None),
('+', None),
('IDENTIFIER', 'spec'),
('LPAREN', None),
('IDENTIFIER', 'el2471a.fits'),
('RPAREN', None),
('RPAREN', None)])])
def test_stream(text, result):
stream_t(text, result)
@pytest.mark.xfail(reason='does not work')
@pytest.mark.parametrize(
('text', 'result'),
[('rn(unit(1.,flam),band(stis,ccd,g430m,c4451,52X0.2),10.000000,abmag)',
[('IDENTIFIER', 'rn'),
('LPAREN', None),
('IDENTIFIER', 'unit'),
('LPAREN', None),
('FLOAT', '1.'),
(',', None),
('IDENTIFIER', 'flam'),
('RPAREN', None),
(',', None),
('IDENTIFIER', 'band'),
('LPAREN', None),
('IDENTIFIER', 'stis'),
(',', None),
('IDENTIFIER', 'ccd'),
(',', None),
('IDENTIFIER', 'g430m'),
(',', None),
('IDENTIFIER', 'c4451'),
(',', None),
('IDENTIFIER', '52X0.2'),
('RPAREN', None),
(',', None),
('FLOAT', '10.000000'),
(',', None),
('IDENTIFIER', 'abmag'),
('RPAREN', None)]),
('rn(unit(1.,flam),band(stis,ccd,mirror,50CCD),10.000000,abmag)',
[('IDENTIFIER', 'rn'),
('LPAREN', None),
('IDENTIFIER', 'unit'),
('LPAREN', None),
('FLOAT', '1.'),
(',', None),
('IDENTIFIER', 'flam'),
('RPAREN', None),
(',', None),
('IDENTIFIER', 'band'),
('LPAREN', None),
('IDENTIFIER', 'stis'),
(',', None),
('IDENTIFIER', 'ccd'),
(',', None),
('IDENTIFIER', 'mirror'),
(',', None),
('IDENTIFIER', '50CCD'),
('RPAREN', None),
(',', None),
('FLOAT', '10.000000'),
(',', None),
('IDENTIFIER', 'abmag'),
('RPAREN', None)])])
def test_stream_xfail(text, result):
stream_t(text, result)
@pytest.mark.xfail(reason='does not work')
def test_tokens():
for x in tokens:
typ, val = x
tkl = scanner.tokenize(val)
assert len(tkl) == 1, 'too many tokens\n' + print_token_list(tkl)
assert tkl[0].type == typ, \
('wrong type: found {} want {}\n'.format(tkl[0].type, typ) +
print_token_list(tkl))
assert tkl[0].attr == val or tkl[0].attr is None or \
(val.startswith('@') and tkl[0].attr == val[1:]), \
('token value incorrect: found {} want {}'.format(
tkl[0].attr, val) + print_token_list(tkl))
| [
"pytest.mark.xfail",
"pytest.mark.parametrize"
] | [((2241, 6212), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('text', 'result')", "[('spec($PYSYN_CDBS//calspec/gd71_mod_005.fits)', [('IDENTIFIER', 'spec'),\n ('LPAREN', None), ('IDENTIFIER',\n '$PYSYN_CDBS//calspec/gd71_mod_005.fits'), ('RPAREN', None)]), (\n 'spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))*0.5'\n , [('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'earthshine.fits'), ('RPAREN', None), ('*', None), ('FLOAT', '0.5'), (\n '+', None), ('IDENTIFIER', 'rn'), ('LPAREN', None), ('IDENTIFIER',\n 'spec'), ('LPAREN', None), ('IDENTIFIER', 'Zodi.fits'), ('RPAREN', None\n ), (',', None), ('IDENTIFIER', 'band'), ('LPAREN', None), ('IDENTIFIER',\n 'johnson'), (',', None), ('IDENTIFIER', 'v'), ('RPAREN', None), (',',\n None), ('FLOAT', '22.7'), (',', None), ('IDENTIFIER', 'vegamag'), (\n 'RPAREN', None), ('+', None), ('LPAREN', None), ('IDENTIFIER', 'spec'),\n ('LPAREN', None), ('IDENTIFIER', 'el1215a.fits'), ('RPAREN', None), (\n '+', None), ('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'el1302a.fits'), ('RPAREN', None), ('+', None), ('IDENTIFIER', 'spec'),\n ('LPAREN', None), ('IDENTIFIER', 'el1356a.fits'), ('RPAREN', None), (\n '+', None), ('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'el2471a.fits'), ('RPAREN', None), ('RPAREN', None), ('*', None), (\n 'FLOAT', '0.5')]), (\n 'spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)*0.1+spec(el1302a.fits)*0.066666667+spec(el1356a.fits)*0.0060+spec(el2471a.fits)*0.0050)'\n , [('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'earthshine.fits'), ('RPAREN', None), ('*', None), ('FLOAT', '0.5'), (\n '+', None), ('IDENTIFIER', 'rn'), ('LPAREN', None), ('IDENTIFIER',\n 'spec'), ('LPAREN', None), ('IDENTIFIER', 'Zodi.fits'), ('RPAREN', None\n ), (',', None), ('IDENTIFIER', 'band'), ('LPAREN', None), ('IDENTIFIER',\n 'johnson'), (',', None), ('IDENTIFIER', 'v'), ('RPAREN', None), (',',\n None), ('FLOAT', '22.7'), (',', None), ('IDENTIFIER', 'vegamag'), (\n 'RPAREN', None), ('+', None), ('LPAREN', None), ('IDENTIFIER', 'spec'),\n ('LPAREN', None), ('IDENTIFIER', 'el1215a.fits'), ('RPAREN', None), (\n '*', None), ('FLOAT', '0.1'), ('+', None), ('IDENTIFIER', 'spec'), (\n 'LPAREN', None), ('IDENTIFIER', 'el1302a.fits'), ('RPAREN', None), ('*',\n None), ('FLOAT', '0.066666667'), ('+', None), ('IDENTIFIER', 'spec'), (\n 'LPAREN', None), ('IDENTIFIER', 'el1356a.fits'), ('RPAREN', None), ('*',\n None), ('FLOAT', '0.0060'), ('+', None), ('IDENTIFIER', 'spec'), (\n 'LPAREN', None), ('IDENTIFIER', 'el2471a.fits'), ('RPAREN', None), ('*',\n None), ('FLOAT', '0.0050'), ('RPAREN', None)]), (\n 'spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))'\n , [('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'earthshine.fits'), ('RPAREN', None), ('*', None), ('FLOAT', '0.5'), (\n '+', None), ('IDENTIFIER', 'rn'), ('LPAREN', None), ('IDENTIFIER',\n 'spec'), ('LPAREN', None), ('IDENTIFIER', 'Zodi.fits'), ('RPAREN', None\n ), (',', None), ('IDENTIFIER', 'band'), ('LPAREN', None), ('IDENTIFIER',\n 'johnson'), (',', None), ('IDENTIFIER', 'v'), ('RPAREN', None), (',',\n None), ('FLOAT', '22.7'), (',', None), ('IDENTIFIER', 'vegamag'), (\n 'RPAREN', None), ('+', None), ('LPAREN', None), ('IDENTIFIER', 'spec'),\n ('LPAREN', None), ('IDENTIFIER', 'el1215a.fits'), ('RPAREN', None), (\n '+', None), ('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'el1302a.fits'), ('RPAREN', None), ('+', None), ('IDENTIFIER', 'spec'),\n ('LPAREN', None), ('IDENTIFIER', 'el1356a.fits'), ('RPAREN', None), (\n '+', None), ('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'el2471a.fits'), ('RPAREN', None), ('RPAREN', None)])]"], {}), "(('text', 'result'), [(\n 'spec($PYSYN_CDBS//calspec/gd71_mod_005.fits)', [('IDENTIFIER', 'spec'),\n ('LPAREN', None), ('IDENTIFIER',\n '$PYSYN_CDBS//calspec/gd71_mod_005.fits'), ('RPAREN', None)]), (\n 'spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))*0.5'\n , [('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'earthshine.fits'), ('RPAREN', None), ('*', None), ('FLOAT', '0.5'), (\n '+', None), ('IDENTIFIER', 'rn'), ('LPAREN', None), ('IDENTIFIER',\n 'spec'), ('LPAREN', None), ('IDENTIFIER', 'Zodi.fits'), ('RPAREN', None\n ), (',', None), ('IDENTIFIER', 'band'), ('LPAREN', None), ('IDENTIFIER',\n 'johnson'), (',', None), ('IDENTIFIER', 'v'), ('RPAREN', None), (',',\n None), ('FLOAT', '22.7'), (',', None), ('IDENTIFIER', 'vegamag'), (\n 'RPAREN', None), ('+', None), ('LPAREN', None), ('IDENTIFIER', 'spec'),\n ('LPAREN', None), ('IDENTIFIER', 'el1215a.fits'), ('RPAREN', None), (\n '+', None), ('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'el1302a.fits'), ('RPAREN', None), ('+', None), ('IDENTIFIER', 'spec'),\n ('LPAREN', None), ('IDENTIFIER', 'el1356a.fits'), ('RPAREN', None), (\n '+', None), ('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'el2471a.fits'), ('RPAREN', None), ('RPAREN', None), ('*', None), (\n 'FLOAT', '0.5')]), (\n 'spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)*0.1+spec(el1302a.fits)*0.066666667+spec(el1356a.fits)*0.0060+spec(el2471a.fits)*0.0050)'\n , [('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'earthshine.fits'), ('RPAREN', None), ('*', None), ('FLOAT', '0.5'), (\n '+', None), ('IDENTIFIER', 'rn'), ('LPAREN', None), ('IDENTIFIER',\n 'spec'), ('LPAREN', None), ('IDENTIFIER', 'Zodi.fits'), ('RPAREN', None\n ), (',', None), ('IDENTIFIER', 'band'), ('LPAREN', None), ('IDENTIFIER',\n 'johnson'), (',', None), ('IDENTIFIER', 'v'), ('RPAREN', None), (',',\n None), ('FLOAT', '22.7'), (',', None), ('IDENTIFIER', 'vegamag'), (\n 'RPAREN', None), ('+', None), ('LPAREN', None), ('IDENTIFIER', 'spec'),\n ('LPAREN', None), ('IDENTIFIER', 'el1215a.fits'), ('RPAREN', None), (\n '*', None), ('FLOAT', '0.1'), ('+', None), ('IDENTIFIER', 'spec'), (\n 'LPAREN', None), ('IDENTIFIER', 'el1302a.fits'), ('RPAREN', None), ('*',\n None), ('FLOAT', '0.066666667'), ('+', None), ('IDENTIFIER', 'spec'), (\n 'LPAREN', None), ('IDENTIFIER', 'el1356a.fits'), ('RPAREN', None), ('*',\n None), ('FLOAT', '0.0060'), ('+', None), ('IDENTIFIER', 'spec'), (\n 'LPAREN', None), ('IDENTIFIER', 'el2471a.fits'), ('RPAREN', None), ('*',\n None), ('FLOAT', '0.0050'), ('RPAREN', None)]), (\n 'spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))'\n , [('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'earthshine.fits'), ('RPAREN', None), ('*', None), ('FLOAT', '0.5'), (\n '+', None), ('IDENTIFIER', 'rn'), ('LPAREN', None), ('IDENTIFIER',\n 'spec'), ('LPAREN', None), ('IDENTIFIER', 'Zodi.fits'), ('RPAREN', None\n ), (',', None), ('IDENTIFIER', 'band'), ('LPAREN', None), ('IDENTIFIER',\n 'johnson'), (',', None), ('IDENTIFIER', 'v'), ('RPAREN', None), (',',\n None), ('FLOAT', '22.7'), (',', None), ('IDENTIFIER', 'vegamag'), (\n 'RPAREN', None), ('+', None), ('LPAREN', None), ('IDENTIFIER', 'spec'),\n ('LPAREN', None), ('IDENTIFIER', 'el1215a.fits'), ('RPAREN', None), (\n '+', None), ('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'el1302a.fits'), ('RPAREN', None), ('+', None), ('IDENTIFIER', 'spec'),\n ('LPAREN', None), ('IDENTIFIER', 'el1356a.fits'), ('RPAREN', None), (\n '+', None), ('IDENTIFIER', 'spec'), ('LPAREN', None), ('IDENTIFIER',\n 'el2471a.fits'), ('RPAREN', None), ('RPAREN', None)])])\n", (2264, 6212), False, 'import pytest\n'), ((7236, 7277), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'reason': '"""does not work"""'}), "(reason='does not work')\n", (7253, 7277), False, 'import pytest\n'), ((7279, 8496), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('text', 'result')", "[('rn(unit(1.,flam),band(stis,ccd,g430m,c4451,52X0.2),10.000000,abmag)', [(\n 'IDENTIFIER', 'rn'), ('LPAREN', None), ('IDENTIFIER', 'unit'), (\n 'LPAREN', None), ('FLOAT', '1.'), (',', None), ('IDENTIFIER', 'flam'),\n ('RPAREN', None), (',', None), ('IDENTIFIER', 'band'), ('LPAREN', None),\n ('IDENTIFIER', 'stis'), (',', None), ('IDENTIFIER', 'ccd'), (',', None),\n ('IDENTIFIER', 'g430m'), (',', None), ('IDENTIFIER', 'c4451'), (',',\n None), ('IDENTIFIER', '52X0.2'), ('RPAREN', None), (',', None), (\n 'FLOAT', '10.000000'), (',', None), ('IDENTIFIER', 'abmag'), ('RPAREN',\n None)]), (\n 'rn(unit(1.,flam),band(stis,ccd,mirror,50CCD),10.000000,abmag)', [(\n 'IDENTIFIER', 'rn'), ('LPAREN', None), ('IDENTIFIER', 'unit'), (\n 'LPAREN', None), ('FLOAT', '1.'), (',', None), ('IDENTIFIER', 'flam'),\n ('RPAREN', None), (',', None), ('IDENTIFIER', 'band'), ('LPAREN', None),\n ('IDENTIFIER', 'stis'), (',', None), ('IDENTIFIER', 'ccd'), (',', None),\n ('IDENTIFIER', 'mirror'), (',', None), ('IDENTIFIER', '50CCD'), (\n 'RPAREN', None), (',', None), ('FLOAT', '10.000000'), (',', None), (\n 'IDENTIFIER', 'abmag'), ('RPAREN', None)])]"], {}), "(('text', 'result'), [(\n 'rn(unit(1.,flam),band(stis,ccd,g430m,c4451,52X0.2),10.000000,abmag)',\n [('IDENTIFIER', 'rn'), ('LPAREN', None), ('IDENTIFIER', 'unit'), (\n 'LPAREN', None), ('FLOAT', '1.'), (',', None), ('IDENTIFIER', 'flam'),\n ('RPAREN', None), (',', None), ('IDENTIFIER', 'band'), ('LPAREN', None),\n ('IDENTIFIER', 'stis'), (',', None), ('IDENTIFIER', 'ccd'), (',', None),\n ('IDENTIFIER', 'g430m'), (',', None), ('IDENTIFIER', 'c4451'), (',',\n None), ('IDENTIFIER', '52X0.2'), ('RPAREN', None), (',', None), (\n 'FLOAT', '10.000000'), (',', None), ('IDENTIFIER', 'abmag'), ('RPAREN',\n None)]), (\n 'rn(unit(1.,flam),band(stis,ccd,mirror,50CCD),10.000000,abmag)', [(\n 'IDENTIFIER', 'rn'), ('LPAREN', None), ('IDENTIFIER', 'unit'), (\n 'LPAREN', None), ('FLOAT', '1.'), (',', None), ('IDENTIFIER', 'flam'),\n ('RPAREN', None), (',', None), ('IDENTIFIER', 'band'), ('LPAREN', None),\n ('IDENTIFIER', 'stis'), (',', None), ('IDENTIFIER', 'ccd'), (',', None),\n ('IDENTIFIER', 'mirror'), (',', None), ('IDENTIFIER', '50CCD'), (\n 'RPAREN', None), (',', None), ('FLOAT', '10.000000'), (',', None), (\n 'IDENTIFIER', 'abmag'), ('RPAREN', None)])])\n", (7302, 8496), False, 'import pytest\n'), ((8850, 8891), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'reason': '"""does not work"""'}), "(reason='does not work')\n", (8867, 8891), False, 'import pytest\n')] |
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import unittest
from xml.etree import ElementTree as etree
import os
from proton import template
class TestTwoTemplatesFunctionality(unittest.TestCase):
def setUp(self):
template.base_dir = os.path.dirname(os.path.realpath(__file__))
def applydata(self, tmp):
tmp.repeat('list', 2)
items = [ 'A', 'B' ]
for x in range(0, 2):
y = x + 1
tmp.set_value('listid', str(y), x)
tmp.set_attribute('listid', 'id', str(y), x)
tmp.set_value('listval', 'my item %s' % items[x], x)
def test_two_templates(self):
tmp1 = template.get_template('twotemplates.xhtml')
self.applydata(tmp1)
print("\nXHTML:\n%s" % str(tmp1))
et = etree.fromstring(str(tmp1))
td = et.findall('.//td')
self.assert_(td[1].text == '1', 'expected 1 was %s' % td[1].text)
self.assert_(td[3].text == 'my item A')
self.assert_(td[5].text == '2')
self.assert_(td[7].text == 'my item B')
tmp2 = template.get_template('twotemplates.xml')
self.applydata(tmp2)
print("\nXML:\n%s\n" % str(tmp2))
et = etree.fromstring(str(tmp2))
item = et.findall('item')
self.assert_(item[0].attrib['id'] == '1')
self.assert_(item[0].text == 'my item A')
self.assert_(item[1].attrib['id'] == '2')
self.assert_(item[1].text == 'my item B')
| [
"os.path.realpath",
"proton.template.get_template"
] | [((1270, 1313), 'proton.template.get_template', 'template.get_template', (['"""twotemplates.xhtml"""'], {}), "('twotemplates.xhtml')\n", (1291, 1313), False, 'from proton import template\n'), ((1686, 1727), 'proton.template.get_template', 'template.get_template', (['"""twotemplates.xml"""'], {}), "('twotemplates.xml')\n", (1707, 1727), False, 'from proton import template\n'), ((881, 907), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (897, 907), False, 'import os\n')] |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import InvalidOrder
from ccxt.base.decimal_to_precision import SIGNIFICANT_DIGITS
import json
import sys
from datetime import datetime
class tprexchange(Exchange):
def describe(self):
return self.deep_extend(super(tprexchange, self).describe(), {
'id': 'tprexchange',
'name': 'TPR Exchange',
# 'countries': ['US'],
# 'rateLimit': 500,
'version': 'v1',
'certified': False,
'has': {
'loadMarkets': True,
'cancelAllOrders': False,
'cancelOrder': True,
'cancelOrders': False,
'CORS': False,
'createDepositAddress': False,
'createLimitOrder': False,
'createMarketOrder': False,
'createOrder': True,
'deposit': False,
'editOrder': 'emulated',
'fetchBalance': True,
'fetchBidsAsks': False,
'fetchClosedOrders': True,
'fetchCurrencies': True,
'fetchDepositAddress': False,
'fetchDeposits': False,
'fetchFundingFees': False,
'fetchL2OrderBook': False,
'fetchLedger': False,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrderBooks': False,
'fetchOrders': True,
'fetchOrderTrades': False,
'fetchStatus': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': False,
'fetchTrades': True,
'fetchTradingFee': False,
'fetchTradingFees': False,
'fetchTradingLimits': False,
'fetchTransactions': False,
'fetchWithdrawals': False,
'privateAPI': True,
'publicAPI': False,
'signIn': True,
'withdraw': False,
'getMarketPrice': True,
},
'timeframes': {
'1m': '1',
'1h': '60',
'1d': '1440',
'1w': '10080',
'1mn': '43200',
},
'urls': {
'logo': '',
'api': '{hostname}',
'www': '',
'doc': '',
'fees': '',
'referral': '',
},
'api': {
'private': {
'get': [
],
'post': [
'ucenter/api-login',
'ucenter/member/balance',
'market/symbol-thumb',
'market/coins-info',
'market/symbol-info',
'exchange/order/add',
'exchange/order/find',
'exchange/order/all',
'exchange/order/apicancel',
'exchange/order/trades',
'exchange/order/my-trades',
'exchange/exchange-coin/base-symbol',
],
'delete': [
],
},
'feed': {
'get': [
],
},
},
'fees': {
'trading': {
},
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'uid': False,
},
'precisionMode': SIGNIFICANT_DIGITS,
'options': {
'createMarketBuyOrderRequiresPrice': False,
},
'exceptions': {
'exact': {
'Invalid cost': InvalidOrder, # {"message":"Invalid cost","_links":{"self":{"href":"/orders","templated":false}}}
'Invalid order ID': InvalidOrder, # {"message":"Invalid order ID","_links":{"self":{"href":"/orders/4a151805-d594-4a96-9d64-e3984f2441f7","templated":false}}}
'Invalid market !': BadSymbol, # {"message":"Invalid market !","_links":{"self":{"href":"/markets/300/order-book","templated":false}}}
},
'broad': {
'Failed to convert argument': BadRequest,
},
},
})
def parse_ticker(self, response):
if len(response) == 0:
return []
symbol = response[0].get('symbol')
high = 0
bidVolume = 0
askVolume = 0
vwap = 0
vwapCost = 0
vwapVolume = 0
open_ = 'None'
close = 0
last = close
previousClose = 'None'
change = 'None'
percentage = 'None'
average = 'None'
baseVolume = 0
quoteVolume = 0
time = 0
lastDayTime = int((datetime.now().timestamp() - 86400) * 1000)
currentTimestamp = int(datetime.now().timestamp() * 1000)
currentDatetime = str(datetime.fromtimestamp(currentTimestamp * 0.001))
low = response[0].get('price')
bid = 0
ask = sys.maxsize
openSellOrdersCount = 0
for order in response:
price = order.get('price')
amount = order.get('amount')
timestamp = order.get('timestamp')
if high < price:
high = price
if low > price:
low = price
if order.get('status') == 'open':
if order.get('side') == 'buy':
if bid < price:
bid = price
if bidVolume < amount:
bidVolume = amount
if order.get('status') == 'open':
if order.get('side') == 'sell':
openSellOrdersCount += 1
if ask > price:
ask = price
if askVolume < amount:
askVolume = amount
if order.get('info').get('status') == 'COMPLETED':
vwapCost += price * amount
vwapVolume += amount
if time < timestamp:
time = timestamp
close = price
if timestamp > lastDayTime:
quoteVolume += amount
baseVolume += price
if vwapVolume != 0:
vwap = vwapCost / vwapVolume
if openSellOrdersCount == 0:
ask = 0
last = close
result = {
'symbol': symbol,
'info': response,
'timestamp': currentTimestamp,
'datetime': currentDatetime,
'high': high,
'low': low,
'bid': bid,
'bidVolume': bidVolume,
'ask': ask,
'askVolume': askVolume,
'vwap': vwap,
'open': open_,
'close': close,
'last': last,
'previousClose': previousClose,
'change': change,
'percentage': percentage,
'average': average,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
}
return result
def fetch_ticker(self, symbol, since=None, limit=None):
response = self.fetch_orders(symbol, since, limit)
# Response example:
# {
# 'symbol': 'BTC/USDT',
# 'info': [...],
# 'timestamp': 1615386851976,
# 'datetime': '2021-03-10 16:34:11.976000',
# 'high': 50.0,
# 'low': 1.0,
# 'bid': 30.0,
# 'bidVolume': 15.0,
# 'ask': 40.0,
# 'askVolume': 25.0,
# 'vwap': 11.0,
# 'open': 'None',
# 'close': 20.0,
# 'last': 20.0,
# 'previousClose': 'None',
# 'change': 'None',
# 'percentage': 'None',
# 'average': 'None',
# 'baseVolume': 60.0,
# 'quoteVolume': 30.0
# }
return self.parse_ticker(response)
def fetch_tickers(self, since=None, limit=None):
# Response example:
# [
# {
# 'symbol': 'BTC/USDT',
# 'info': [...],
# 'timestamp': 1615386851976,
# 'datetime': '2021-03-10 16:34:11.976000',
# 'high': 50.0,
# 'low': 1.0,
# 'bid': 30.0,
# 'bidVolume': 15.0,
# 'ask': 40.0,
# 'askVolume': 25.0,
# 'vwap': 11.0,
# 'open': 'None',
# 'close': 20.0,
# 'last': 20.0,
# 'previousClose': 'None',
# 'change': 'None',
# 'percentage': 'None',
# 'average': 'None',
# 'baseVolume': 60.0,
# 'quoteVolume': 30.0
# },
# ...
# ]
result = []
symbols = self.fetch_markets()
for symblol in symbols:
response = self.fetch_orders(symblol.get('symbol'), since, limit)
ticker = self.parse_ticker(response)
if len(ticker) != 0:
result.append(ticker)
return result
def fetch_order_book(self, symbol, limit, since=0):
# Response example:
# {
# 'bids':
# [
# [20.0, 10.0, 'E161538482263642'], // [price, amount, orderId]
# [30.0, 15.0, 'E161538482271646']
# ],
# 'asks':
# [
# [40.0, 20.0, 'E161538482278825'],
# [50.0, 25.0, 'E161538482286085']
# ],
# 'timestamp': 1615390711695,
# 'datetime': '2021-03-10 17:38:31.695000',
# 'nonce': 1615390711695
# }
orders = self.fetch_open_orders(symbol, since, limit)
bids = []
asks = []
for order in orders:
temp = []
temp.append(order.get('price'))
temp.append(order.get('amount'))
temp.append(order.get('id'))
if order.get('side') == 'buy':
bids.append(temp)
else:
asks.append(temp)
currentTimestamp = int(datetime.now().timestamp() * 1000)
currentDatetime = str(datetime.fromtimestamp(currentTimestamp * 0.001))
result = {
'bids': bids,
'asks': asks,
'timestamp': currentTimestamp,
'datetime': currentDatetime,
'nonce': currentTimestamp,
}
return result
def parse_markets(self, response):
listData = []
for value in response:
tmp = {
"id": value.get("coinSymbol"),
"symbol": value.get("symbol"),
"base": value.get("coinSymbol"),
"quote": value.get("baseSymbol"),
"baseId": value.get("coinSymbol"),
"quoteId": value.get("baseSymbol"),
"type": value.get("publishType"),
"active": value.get("enable"),
"precision": {
"amount": value.get("coinScale"),
"price": value.get("baseCoinScale"),
},
"limits": {
"amount": {"min": value.get("minVolume"), "max": value.get("maxVolume")},
"price": {"min": value.get("minSellPrice"), "max": value.get("maxBuyPrice")},
"cost": {"min": value.get("minVolume") * value.get("minSellPrice"), "max": value.get("maxVolume") * value.get("maxBuyPrice")},
},
"taker": value.get("fee"),
"maker": value.get("fee"),
"info": value,
}
listData.append(tmp)
return listData
def add_frame(self, timeFrameStart, timeFrameEnd, timeframe, highestPrice, lowestPrice, amount, result, openPrice, closePrice):
frame = []
frame.append(timeFrameStart)
frame.append(openPrice)
frame.append(highestPrice)
frame.append(lowestPrice)
frame.append(closePrice)
frame.append(amount)
result.append(frame)
def parse_ohlcv(self, response, since, timeframe):
highestPrice = 0
lowestPrice = sys.maxsize
price = 0
amount = 0
timeFrameStart = since
timeFrameEnd = int((since * 0.001 + timeframe) * 1000)
result = []
i = 0
orders = response.get('content')
isOpenPrice = True
openPrice = 0
closePrice = 0
while i < len(orders):
if isOpenPrice == True:
openPrice = orders[i].get('price')
isOpenPrice = False
time = orders[i].get('time')
if time >= timeFrameStart and time <= timeFrameEnd:
price = orders[i].get('price')
closePrice = price
if highestPrice < price:
highestPrice = price
if lowestPrice > price:
lowestPrice = price
amount += orders[i].get('amount')
i += 1
if i == len(orders):
self.add_frame(timeFrameStart, timeFrameEnd, timeframe, highestPrice, lowestPrice, amount, result, openPrice, closePrice)
else:
if lowestPrice == sys.maxsize:
lowestPrice = 0
openPrice = 0
closePrice = 0
i -= 1
self.add_frame(timeFrameStart, timeFrameEnd, timeframe, highestPrice, lowestPrice, amount, result, openPrice, closePrice)
timeFrameStart = timeFrameEnd + 1
timeFrameEnd = int((timeFrameEnd * 0.001 + timeframe) * 1000)
amount = 0
highestPrice = 0
lowestPrice = sys.maxsize
isOpenPrice = True
i += 1
return result
# timeframe variants:
# 1m (one minute);
# 1h (one hour);
# 1d (one day - 24 hours)
# 1w (one week - 7 days)
# 1mn (one mounth - 30 days)
def fetch_ohlcv(self, symbol, timeframe=None, since=0, limit=None, params={}):
# Response example:
# [
# [
# 1504541580000, // UTC timestamp in milliseconds, integer
# 4235.4, // (O)pen price, float
# 4240.6, // (H)ighest price, float
# 4230.0, // (L)owest price, float
# 4230.7, // (C)losing price, float
# 37.72941911 // (V)olume (in terms of the base currency), float
# ],
# ...
# ]
inputDataCheck = False
for frame in self.timeframes:
if frame == timeframe:
inputDataCheck = True
break
if inputDataCheck == False:
return {'error': 'Incorrect timeframe'}
tFrame = int(self.timeframes.get(timeframe)) * 60
default_order_amount_limit = 100
params['status'] = 'COMPLETED'
if 'page' in params:
params['pageNo'] = self.safe_string(params, 'page')
else:
params['pageNo'] = 0
if since is None:
since = 0
if limit is None:
limit = default_order_amount_limit
request = {
'symbol': symbol,
'since': since,
'pageSize': limit,
}
fullRequest = self.extend(request, params)
response = self.privatePostExchangeOrderAll(fullRequest)
return self.parse_ohlcv(response, since, tFrame)
def fetch_markets(self, symbol=''):
request = {
'symbol': symbol,
}
response = self.privatePostMarketSymbolInfo(request)
return self.parse_markets(response)
# RETURN EXAMPLE:
# [
# {
# 'id': 'BTC',
# 'symbol': 'BTC/USDT',
# 'base': 'BTC',
# 'quote': 'USDT',
# 'baseId': 'BTC',
# 'quoteId': 'USDT',
# 'type': 'NONE',
# 'active': 1,
# 'precision': { 'amount': 2, 'price': 2 },
# 'limits':
# {
# 'amount': { 'min': 0.0, 'max': 0.0 },
# 'price': { 'min': 0.0, 'max': 0.0 },
# 'cost': { 'min': 0.0, 'max': 0.0 }
# },
# 'taker': 0.001,
# 'maker': 0.001,
# 'info': {backend response}
# },
# ...
# ]
def load_markets(self, reload=False, symbol=''):
if not reload:
if self.markets:
if not self.markets_by_id:
return self.set_markets(self.markets)
return self.markets
currencies = None
if self.has['fetchCurrencies']:
currencies = self.fetch_currencies()
markets = self.fetch_markets(symbol)
return self.set_markets(markets, currencies)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
# Check existance of authentication token
# Just use empy one in case of an application is not signed in yet
authToken = ''
if 'token' in self.options:
authToken = self.options['token']
# Get URL
url = self.implode_params(self.urls['api'], {'hostname': self.hostname}) + '/' + path
# Calculate body and content type depending on method type: GET or POST
keys = list(params.keys())
keysLength = len(keys)
# In case of body is still not assigned just make it empty string
if body is None:
body = ''
# Prepare line for hashing
# This hash sum is checked on backend side to verify API user
# POST params should not be added as body
query = method + ' /' + path + ' ' + self.urlencode(params) + ' ' + authToken + '\n' + body
signed = self.hmac(self.encode(query), self.encode(self.secret))
contentType = None
if method == 'POST':
contentType = 'application/x-www-form-urlencoded'
if keysLength > 0:
body = self.urlencode(params)
else:
if keysLength > 0:
url += '?' + self.urlencode(params)
headers = {
'x-auth-sign': signed,
'x-auth-token': authToken,
}
if authToken != '':
headers['access-auth-token'] = authToken
if contentType is not None:
headers['Content-Type'] = contentType
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def sign_in(self, params={}):
params = {
'key': self.key,
'token': self.token,
}
response = self.privatePostUcenterApiLogin(params)
loginData = response['data']
self.options['token'] = self.safe_string(loginData, 'token')
memberId = self.safe_string(loginData, 'id')
return memberId
def fetch_status(self):
# Responce examples:
# {'status': 'ok'}
# or
# {'status': 'shutdown', 'reason': 'ExchangeNotAvailable'}
# or
# {'status': 'shutdown', 'reason': 'Unknown reason'}
result = False
try:
response = self.privatePostExchangeExchangeCoinBaseSymbol()
for field in response.items():
if field[0] == 'message':
if field[1] == 'SUCCESS':
result = True
if result is True:
return {"status": "ok"}
else:
return {"status": "shutdown", "reason": "ExchangeNotAvailable"}
except:
reason = str(sys.exc_info()[0])
if reason.find('ExchangeNotAvailable') != -1:
return {"status": "shutdown", "reason": "ExchangeNotAvailable"}
else:
return {"status": "shutdown", "reason": "Unknown reason"}
def parse_currencies(self, response):
listData = []
for value in response:
tmp = {
'id': value.get('name'),
'code': value.get('name').upper(),
'name': value.get('name'),
'active': bool(value.get('status')),
'fee': 0.005,
'precision': 0,
'limits':
{
'amount':
{
'min': 'None',
'max': 'None',
},
'price':
{
'min': 'None',
'max': 'None',
},
'cost':
{
'min': 'None',
'max': 'None',
},
'withdraw':
{
'min': value.get('minWithdrawAmount'),
'max': value.get('maxWithdrawAmount'),
},
},
'info': value
}
listData.append(tmp)
return listData
def fetch_currencies(self):
# Responce example
#[
# {
# 'id': 'BTC',
# 'code': 'BTC',
# 'name': 'BTC',
# 'active': True,
# 'fee': 0.001,
# 'precision': 0,
# 'limits': // TPR exchange has no restrictions
# {
# 'amount':
# {
# 'min': 'None',
# 'max': 'None'
# },
# 'price':
# {
# 'min': 'None',
# 'max': 'None'
# },
# 'cost':
# {
# 'min': 'None',
# 'max': 'None'
# },
# 'withdraw':
# {
# 'min': 1.0,
# 'max': 5000.0
# }
# },
# 'info': { },
# },
# ...
#]
try:
response = self.privatePostMarketCoinsInfo()
return self.parse_currencies(response)
except:
reason = str(sys.exc_info()[0])
if reason.find('ExchangeNotAvailable') != -1:
return {"Error": "ExchangeNotAvailable"}
else:
return {"Error": "Unknown reason"}
def fetch_order(self, id, symbol=None, params={}):
request = {
'orderId': id,
}
response = self.privatePostExchangeOrderFind(request)
return self.parse_order(response)
def parse_order(self, order, market=None):
# {
# 'orderId':'E161183624377614',
# 'memberId':2,
# 'type':'LIMIT_PRICE',
# 'amount':1000.0,
# 'symbol':'BCH/USDT',
# 'tradedAmount':1000.0,
# 'turnover':1080.0,
# 'coinSymbol':'BCH',
# 'baseSymbol':'USDT',
# 'status':'COMPLETED',
# 'latestTradeTimestamp':1611836256242,
# 'direction':'SELL',
# 'price':1.0,
# 'time':1611836243776,
# 'completedTime':1611836256242,
# },
if not order:
return None
type = 'market'
if order['type'] == 'LIMIT_PRICE':
type = 'limit'
side = order['direction'].lower()
remaining = order['amount'] - order['tradedAmount']
status = order['status']
if status == 'COMPLETED':
status = 'closed'
elif status == 'TRADING' or status == 'PAUSED' or status == 'RESERVED':
status = 'open'
else:
status = 'canceled'
cost = order['tradedAmount'] * order['price']
result = {
'info': order,
'id': order['orderId'],
'clientOrderId': order['memberId'],
'timestamp': order['time'],
'datetime': self.iso8601(order['time']),
'latestTradeTimestamp': order['latestTradeTimestamp'],
'symbol': order['symbol'],
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': order['price'],
'stopPrice': None,
'cost': cost,
'average': None,
'amount': order['amount'],
'filled': order['tradedAmount'],
'remaining': remaining,
'status': status,
'fee': None,
'trades': None,
}
return result
def create_order(self, symbol, type, side, amount, price=None, params={}):
params['symbol'] = symbol
params['price'] = price
params['amount'] = amount
if side == 'buy':
params['direction'] = 'BUY'
else:
params['direction'] = 'SELL'
if type == 'market':
params['type'] = 'MARKET_PRICE'
else:
params['type'] = 'LIMIT_PRICE'
params['useDiscount'] = '0'
response = self.privatePostExchangeOrderAdd(params)
orderId = self.safe_string(response, 'data')
return self.fetch_order(orderId)
def cancel_order(self, id, symbol=None, params={}):
request = {
'orderId': id,
}
response = self.privatePostExchangeOrderApicancel(self.extend(request, params))
return self.parse_order(response['data'])
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
# Request structure
# {
# 'symbol': Parameter from method arguments
# 'since': Timestamp of first order in list in Unix epoch format
# 'limit': Response list size
# 'memberId': May be set in params. May be not set
# 'status': one of TRADING COMPLETED CANCELED OVERTIMED. May be set in params
# 'page': for pagination. In self case limit is size of every page. May be set in params
# }
default_order_amount_limit = 10000
if 'page' in params:
params['pageNo'] = self.safe_string(params, 'page')
else:
params['pageNo'] = 0
if symbol is None:
symbol = ''
if since is None:
since = 0
if limit is None:
limit = default_order_amount_limit
request = {
'symbol': symbol,
'since': since,
'pageSize': limit,
}
fullRequest = self.extend(request, params)
response = self.privatePostExchangeOrderAll(fullRequest)
# {
# 'content': [
# {
# 'orderId':'E161183624377614',
# 'memberId':2,
# 'type':'LIMIT_PRICE',
# 'amount':1000.0,
# 'symbol':'BCH/USDT',
# 'tradedAmount':1000.0,
# 'turnover':1080.0,
# 'coinSymbol':'BCH',
# 'baseSymbol':'USDT',
# 'status':'COMPLETED',
# 'direction':'SELL',
# 'price':1.0,
# 'time':1611836243776,
# 'completedTime':1611836256242,
# },
# ...
# ],
# 'totalElements':41,
# 'totalPages':3,
# 'last':False,
# 'size':20,
# 'number':1,
# 'first':False,
# 'numberOfElements':20,
# 'sort': [
# {
# 'direction':'DESC',
# 'property':'time',
# 'ignoreCase':False,
# 'nullHandling':'NATIVE',
# 'ascending':False,
# 'descending':True,
# }
# ]
# }
return self.parse_orders(response['content'])
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
# Request structure
# {
# 'symbol': Parameter from method arguments
# 'since': Timestamp of first order in list in Unix epoch format
# 'limit': Response list size
# 'memberId': May be set in params. May be not set
# 'status': one of TRADING COMPLETED CANCELED OVERTIMED. May be set in params
# 'page': for pagination. In self case limit is size of every page. May be set in params
# }
default_order_amount_limit = 20
if 'page' in params:
params['pageNo'] = self.safe_string(params, 'page')
else:
params['pageNo'] = 0
if symbol is None:
symbol = ''
if since is None:
since = 0
if limit is None:
limit = default_order_amount_limit
request = {
'symbol': symbol,
'since': since,
'pageSize': limit,
}
fullRequest = self.extend(request, params)
response = self.privatePostExchangeOrderAll(fullRequest)
# {
# 'content': [
# {
# 'orderId':'E161183624377614',
# 'memberId':2,
# 'type':'LIMIT_PRICE',
# 'amount':1000.0,
# 'symbol':'BCH/USDT',
# 'tradedAmount':1000.0,
# 'turnover':1080.0,
# 'coinSymbol':'BCH',
# 'baseSymbol':'USDT',
# 'status':'COMPLETED',
# 'direction':'SELL',
# 'price':1.0,
# 'time':1611836243776,
# 'completedTime':1611836256242,
# },
# ...
# ],
# 'totalElements':41,
# 'totalPages':3,
# 'last':False,
# 'size':20,
# 'number':1,
# 'first':False,
# 'numberOfElements':20,
# 'sort': [
# {
# 'direction':'DESC',
# 'property':'time',
# 'ignoreCase':False,
# 'nullHandling':'NATIVE',
# 'ascending':False,
# 'descending':True,
# }
# ]
# }
return self.parse_orders(response['content'])
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
# Request structure
# {
# 'symbol': Parameter from method arguments
# 'since': Timestamp of first order in list in Unix epoch format
# 'limit': Response list size
# 'memberId': May be set in params. May be not set
# 'status': one of TRADING COMPLETED CANCELED OVERTIMED. May be set in params
# 'page': for pagination. In self case limit is size of every page. May be set in params
# }
default_order_amount_limit = 20
params['status'] = 'CANCELED'
if 'page' in params:
params['pageNo'] = self.safe_string(params, 'page')
else:
params['pageNo'] = 0
if symbol is None:
symbol = ''
if since is None:
since = 0
if limit is None:
limit = default_order_amount_limit
request = {
'symbol': symbol,
'since': since,
'pageSize': limit,
}
fullRequest = self.extend(request, params)
response = self.privatePostExchangeOrderAll(fullRequest)
# {
# 'content': [
# {
# 'orderId':'E161183624377614',
# 'memberId':2,
# 'type':'LIMIT_PRICE',
# 'amount':1000.0,
# 'symbol':'BCH/USDT',
# 'tradedAmount':1000.0,
# 'turnover':1080.0,
# 'coinSymbol':'BCH',
# 'baseSymbol':'USDT',
# 'status':'COMPLETED',
# 'direction':'SELL',
# 'price':1.0,
# 'time':1611836243776,
# 'completedTime':1611836256242,
# },
# ...
# ],
# 'totalElements':41,
# 'totalPages':3,
# 'last':False,
# 'size':20,
# 'number':1,
# 'first':False,
# 'numberOfElements':20,
# 'sort': [
# {
# 'direction':'DESC',
# 'property':'time',
# 'ignoreCase':False,
# 'nullHandling':'NATIVE',
# 'ascending':False,
# 'descending':True,
# }
# ]
# }
return self.parse_orders(response['content'])
# If call without params the function returns balance of current user
def fetch_balance(self, uid='-1', params={}):
params = {
'uid': uid
}
try:
response = self.privatePostUcenterMemberBalance(params)
except Exception as e:
return e
return self.parse_balance(response)
def parse_balance(self, response):
data = json.loads(json.dumps(response))
if data['message'] == 'SUCCESS':
result = { "free":{}, "used":{}, "total":{}}
for row in data['data']['balances']:
result['free'].update({row['coinName']:row['free']})
result['used'].update({row['coinName']:row['used']})
result['total'].update({row['coinName']:row['total']})
result.update({row['coinName']:{'free':row['free'], 'used':row['used'], 'total':row['total']}})
return result
# Returns int or None
def get_market_price(self, symbol):
response = self.privatePostMarketSymbolThumb()
for i in response:
if i.get('symbol') == symbol:
return i.get('close')
def fetch_trades(self, orderId, since, pageNo=None, pageSize=None):
# Responce example:
# [
# {
# 'info': { backend response },
# 'id': 'E161460499516968',
# 'timestamp': 1614605187661,
# 'datetime': '2021-03-01 15:26:27.661000',
# 'symbol': 'BTC/USDT',
# 'order': 'E161460499516968',
# 'type': 'LIMIT_PRICE',
# 'side': 'SELL',
# 'takerOrMaker': 'None', (Have no this information inside TPR exchange)
# 'price': 1.0,
# 'amount': 1.0,
# 'cost': 1.0,
# 'fee':
# {
# 'cost': 0.005,
# 'currency': 'BTC',
# 'rate': 'None' (Have no this information inside TPR exchange)
# }
# }
# ]
if pageNo is None:
pageNo = 0
if pageSize is None:
pageSize = 100
request = { 'orderId': orderId,
'since': since,
'pageNo': pageNo,
'pageSize': pageSize }
return self.parse_trade(self.privatePostExchangeOrderTrades(request))
def parse_trade(self, response):
trades = []
content = response.get('content')
for exchangeTrade in content:
timestamp = exchangeTrade.get('time')
datetime_ = str(datetime.fromtimestamp(int(timestamp) * 0.001))
price = exchangeTrade.get('price')
amount = exchangeTrade.get('amount')
cost = price * amount
tmp = {
'info': exchangeTrade,
'id': exchangeTrade.get('orderId'),
'timestamp': timestamp,
'datetime': datetime_,
'symbol': exchangeTrade.get('symbol'),
'order': exchangeTrade.get('orderId'),
'type': exchangeTrade.get('type'),
'side': exchangeTrade.get('direction'),
'takerOrMaker': 'None',
'price': price,
'amount': amount,
'cost': cost,
'fee':
{
'cost': exchangeTrade.get('fee'),
'currency': exchangeTrade.get('coinSymbol'),
'rate': 'None',
}
}
trades.append(tmp)
return trades
def parse_my_trades(self, response):
listData = []
for value in response:
ExchangeOrder = response.get(value)
id_ = ExchangeOrder.get('orderId')
timestamp = ExchangeOrder.get('time')
datetime_ = str(datetime.fromtimestamp(int(timestamp) * 0.001))
price = ExchangeOrder.get('price')
amount = ExchangeOrder.get('amount')
cost = price * amount
tmp = {
'info': response.get(value),
'id': id_,
'timestamp': timestamp,
'datetime': datetime_,
'symbol': ExchangeOrder.get('symbol'),
'order': id_,
'type': ExchangeOrder.get('type'),
'side': ExchangeOrder.get('direction'),
'takerOrMaker': 'None',
'price': price,
'amount': amount,
'cost': cost,
'fee':
{
'cost': ExchangeOrder.get('fee'),
'currency': ExchangeOrder.get('coinSymbol'),
'rate': 'None',
}
}
listData.append(tmp)
return listData
def fetch_my_trades(self, pageNo=None, pageSize=None):
# Responce example:
# [
# {
# 'info': { backend response },
# 'id': 'E161460499516968',
# 'timestamp': 1614605187661,
# 'datetime': '2021-03-01 15:26:27.661000',
# 'symbol': 'BTC/USDT',
# 'order': 'E161460499516968',
# 'type': 'LIMIT_PRICE',
# 'side': 'SELL',
# 'takerOrMaker': 'None', (Have no this information inside TPR exchange)
# 'price': 1.0,
# 'amount': 1.0,
# 'cost': 1.0,
# 'fee':
# {
# 'cost': 0.001,
# 'currency': 'BTC',
# 'rate': 'None' (Have no this information inside TPR exchange)
# }
# },
# { ... },
# ]
if pageNo is None:
pageNo = 0
if pageSize is None:
pageSize = 100
request = { 'orderId': '',
'pageNo': pageNo,
'pageSize': pageSize }
return self.parse_my_trades(self.privatePostExchangeOrderMyTrades(request))
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
if httpCode == 200:
if 'code' in response:
if response['code'] == 0:
return
else:
return
# {
# "message": "Error text in case when HTTP code is not 200",
# ...
# }
message = self.safe_string(response, 'message')
if message is not None:
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
raise ExchangeError(feedback) # unknown message
| [
"datetime.datetime.fromtimestamp",
"json.dumps",
"sys.exc_info",
"datetime.datetime.now",
"ccxt.base.errors.ExchangeError"
] | [((5652, 5700), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['(currentTimestamp * 0.001)'], {}), '(currentTimestamp * 0.001)\n', (5674, 5700), False, 'from datetime import datetime\n'), ((11228, 11276), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['(currentTimestamp * 0.001)'], {}), '(currentTimestamp * 0.001)\n', (11250, 11276), False, 'from datetime import datetime\n'), ((34501, 34521), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (34511, 34521), False, 'import json\n'), ((41018, 41041), 'ccxt.base.errors.ExchangeError', 'ExchangeError', (['feedback'], {}), '(feedback)\n', (41031, 41041), False, 'from ccxt.base.errors import ExchangeError\n'), ((5587, 5601), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5599, 5601), False, 'from datetime import datetime\n'), ((11163, 11177), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (11175, 11177), False, 'from datetime import datetime\n'), ((20801, 20815), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (20813, 20815), False, 'import sys\n'), ((23386, 23400), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (23398, 23400), False, 'import sys\n'), ((5512, 5526), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5524, 5526), False, 'from datetime import datetime\n')] |
from c6 import People
# Student类继承People
class Student(People):
def __init__(self,school,name,age):
self.school = school
# People.__init__(self,name,age)
super(Student,self).__init__(name,age)
Student.sum += 1
def do_homework(self):
print('do homework')
student1 = Student('农村小学',"石敢当",19)
print(student1.sum)
print(Student.sum)
print(student1.name)
print(student1.age)
People.do_homework("","xiaolinzi")
| [
"c6.People.do_homework"
] | [((425, 460), 'c6.People.do_homework', 'People.do_homework', (['""""""', '"""xiaolinzi"""'], {}), "('', 'xiaolinzi')\n", (443, 460), False, 'from c6 import People\n')] |
from django.http import HttpResponseRedirect
from thedaily.models import OAuthState
from thedaily.views import get_or_create_user_profile
def get_phone_number(backend, uid, user=None, social=None, *args, **kwargs):
subscriber = get_or_create_user_profile(user)
if not subscriber.phone:
state = kwargs['request'].GET['state']
try:
oas = OAuthState.objects.get(user=user)
oas.state = state
oas.save()
except OAuthState.DoesNotExist:
OAuthState.objects.create(user=user, state=state, fullname=kwargs['details'].get('fullname'))
is_new, query_params = kwargs.get('is_new'), ''
if is_new:
query_params = '?is_new=1'
return HttpResponseRedirect('/usuarios/registrate/google/' + query_params)
| [
"thedaily.views.get_or_create_user_profile",
"thedaily.models.OAuthState.objects.get",
"django.http.HttpResponseRedirect"
] | [((235, 267), 'thedaily.views.get_or_create_user_profile', 'get_or_create_user_profile', (['user'], {}), '(user)\n', (261, 267), False, 'from thedaily.views import get_or_create_user_profile\n'), ((737, 804), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["('/usuarios/registrate/google/' + query_params)"], {}), "('/usuarios/registrate/google/' + query_params)\n", (757, 804), False, 'from django.http import HttpResponseRedirect\n'), ((375, 408), 'thedaily.models.OAuthState.objects.get', 'OAuthState.objects.get', ([], {'user': 'user'}), '(user=user)\n', (397, 408), False, 'from thedaily.models import OAuthState\n')] |
from django.core.management.base import BaseCommand
import numpy as np
import pandas as pd
from django.conf import settings
from baseball.models import Player, PlayerStats
import sys
import requests
import datetime
HITTING_BASE_URL = 'http://lookup-service-prod.mlb.com/json/named.sport_hitting_tm.bam'
PITCHING_BASE_URL = 'http://lookup-service-prod.mlb.com/json/named.sport_pitching_tm.bam'
class Command(BaseCommand):
def handle(self, *args, **options):
year = datetime.datetime.now().year
players = Player.objects.all()
for player in players:
if player.pro_debut_date:
start_year = player.pro_debut_date.year
for search_year in range(start_year, year):
try:
year_stats = PlayerStats.objects.get(player=player, year=search_year)
except PlayerStats.DoesNotExist:
payload = {
'league_list_id': '\'mlb\'',
'player_id': '\'%s\'' % player.player_id,
'season': '\'%s\'' % search_year,
'game_type': '\'R\''
}
hitting_response = requests.get(HITTING_BASE_URL,
params=payload)
pitching_response = requests.get(PITCHING_BASE_URL,
params=payload)
hitting_data = hitting_response.json().get('sport_hitting_tm').get('queryResults').get('row')
pitching_data = pitching_response.json().get('sport_pitching_tm').get('queryResults').get('row')
if hitting_data or pitching_data:
year_stats = PlayerStats.objects.create(player=player,
year=search_year)
year_stats.hitting_stats = hitting_data
year_stats.pitching_stats = pitching_data
year_stats.save()
print(year_stats) | [
"baseball.models.Player.objects.all",
"baseball.models.PlayerStats.objects.create",
"requests.get",
"datetime.datetime.now",
"baseball.models.PlayerStats.objects.get"
] | [((528, 548), 'baseball.models.Player.objects.all', 'Player.objects.all', ([], {}), '()\n', (546, 548), False, 'from baseball.models import Player, PlayerStats\n'), ((480, 503), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (501, 503), False, 'import datetime\n'), ((799, 855), 'baseball.models.PlayerStats.objects.get', 'PlayerStats.objects.get', ([], {'player': 'player', 'year': 'search_year'}), '(player=player, year=search_year)\n', (822, 855), False, 'from baseball.models import Player, PlayerStats\n'), ((1252, 1298), 'requests.get', 'requests.get', (['HITTING_BASE_URL'], {'params': 'payload'}), '(HITTING_BASE_URL, params=payload)\n', (1264, 1298), False, 'import requests\n'), ((1372, 1419), 'requests.get', 'requests.get', (['PITCHING_BASE_URL'], {'params': 'payload'}), '(PITCHING_BASE_URL, params=payload)\n', (1384, 1419), False, 'import requests\n'), ((1813, 1872), 'baseball.models.PlayerStats.objects.create', 'PlayerStats.objects.create', ([], {'player': 'player', 'year': 'search_year'}), '(player=player, year=search_year)\n', (1839, 1872), False, 'from baseball.models import Player, PlayerStats\n')] |
from typing import Optional
from uuid import UUID
import attr
from airflow_monitor.shared.base_monitor_config import BaseMonitorConfig
@attr.s
class BaseServerConfig(object):
source_name: str = attr.ib()
source_type: str = attr.ib()
tracking_source_uid: UUID = attr.ib()
sync_interval: int = attr.ib(default=10) # Sync interval in seconds
is_sync_enabled: bool = attr.ib(default=True)
fetcher_type = attr.ib(default=None) # type: str
log_level = attr.ib(default=None) # type: str
@classmethod
def create(
cls, server_config: dict, monitor_config: Optional[BaseMonitorConfig] = None
):
raise NotImplementedError()
@attr.s
class TrackingServiceConfig:
url = attr.ib()
access_token = attr.ib(default=None)
user = attr.ib(default=None)
password = attr.ib(default=None)
service_type = attr.ib(default=None)
| [
"attr.ib"
] | [((202, 211), 'attr.ib', 'attr.ib', ([], {}), '()\n', (209, 211), False, 'import attr\n'), ((235, 244), 'attr.ib', 'attr.ib', ([], {}), '()\n', (242, 244), False, 'import attr\n'), ((277, 286), 'attr.ib', 'attr.ib', ([], {}), '()\n', (284, 286), False, 'import attr\n'), ((313, 332), 'attr.ib', 'attr.ib', ([], {'default': '(10)'}), '(default=10)\n', (320, 332), False, 'import attr\n'), ((389, 410), 'attr.ib', 'attr.ib', ([], {'default': '(True)'}), '(default=True)\n', (396, 410), False, 'import attr\n'), ((430, 451), 'attr.ib', 'attr.ib', ([], {'default': 'None'}), '(default=None)\n', (437, 451), False, 'import attr\n'), ((482, 503), 'attr.ib', 'attr.ib', ([], {'default': 'None'}), '(default=None)\n', (489, 503), False, 'import attr\n'), ((728, 737), 'attr.ib', 'attr.ib', ([], {}), '()\n', (735, 737), False, 'import attr\n'), ((757, 778), 'attr.ib', 'attr.ib', ([], {'default': 'None'}), '(default=None)\n', (764, 778), False, 'import attr\n'), ((790, 811), 'attr.ib', 'attr.ib', ([], {'default': 'None'}), '(default=None)\n', (797, 811), False, 'import attr\n'), ((827, 848), 'attr.ib', 'attr.ib', ([], {'default': 'None'}), '(default=None)\n', (834, 848), False, 'import attr\n'), ((868, 889), 'attr.ib', 'attr.ib', ([], {'default': 'None'}), '(default=None)\n', (875, 889), False, 'import attr\n')] |
from unittest import TestCase, mock
import requests
from waterdata.commands.lookup_generation.wqp_lookups import (
get_lookup_by_json, is_us_county, get_nwis_state_lookup, get_nwis_county_lookup)
@mock.patch('waterdata.commands.lookup_generation.wqp_lookups.execute_get_request')
class GetLookupByJsonTestCase(TestCase):
def test_sets_query_params_correctly(self, mrequest):
mrequest.return_value = requests.Response()
mrequest.return_value.status_code = 500
self.assertEqual(get_lookup_by_json('http://fakehost.com', path='codes', params={'param1': 'value1'}), {})
mrequest.assert_called_with('http://fakehost.com',
path='codes',
params={'param1': 'value1', 'mimeType': 'json'})
def test_bad_request(self, mrequest):
mrequest.return_value = requests.Response()
mrequest.return_value.status_code = 500
self.assertEqual(get_lookup_by_json('http://fakehost.com', path='codes'), {})
def test_good_request(self, mrequest):
def test_json():
return {'codes': []}
mrequest.return_value = requests.Response()
mrequest.return_value.status_code = 200
mrequest.return_value.json = test_json
self.assertEqual(get_lookup_by_json('http://fakehost.com', path='codes'), {'codes': []})
class IsUsCountyTestCase(TestCase):
def test_empty_lookup(self):
self.assertFalse(is_us_county({}))
def test_lookup_with_no_colon_in_value(self):
self.assertFalse(is_us_county({'value': '12US'}))
def test_lookup_with_colon_and_us(self):
self.assertTrue(is_us_county({'value': 'US:12'}))
def test_lookup_with_colon_and_not_us(self):
self.assertFalse(is_us_county({'value': 'CA:12'}))
class GetNwisStateLookupTestCase(TestCase):
def test_empty_list(self):
self.assertEqual(get_nwis_state_lookup([]), {})
def test_valid_lookup(self):
test_lookup = [
{'value': 'US:55', 'desc': 'Wisconsin'},
{'value': 'US:01', 'desc': 'Alabama'}
]
self.assertEqual(get_nwis_state_lookup(test_lookup),
{'55': {'name': 'Wisconsin'},
'01': {'name': 'Alabama'}}
)
class GetNwisCountyLookupTestCase(TestCase):
def test_empty_list(self):
self.assertEqual(get_nwis_county_lookup([]), {})
def test_valid_lookup(self):
test_lookup = [
{'value': 'US:01:001', 'desc': 'US, Alabama, Autauga County'},
{'value': 'US:01:002', 'desc': 'US, Alabama, Baldwin County'},
{'value': 'US:02:068', 'desc': 'US, Alaska, Denali Borough'}
]
self.assertEqual(get_nwis_county_lookup(test_lookup),
{'01': {'001': {'name': 'Autauga County'}, '002': {'name': 'Baldwin County'}},
'02': {'068': {'name': 'Denali Borough'}}}
)
| [
"waterdata.commands.lookup_generation.wqp_lookups.get_nwis_county_lookup",
"requests.Response",
"waterdata.commands.lookup_generation.wqp_lookups.get_lookup_by_json",
"waterdata.commands.lookup_generation.wqp_lookups.get_nwis_state_lookup",
"waterdata.commands.lookup_generation.wqp_lookups.is_us_county",
... | [((206, 293), 'unittest.mock.patch', 'mock.patch', (['"""waterdata.commands.lookup_generation.wqp_lookups.execute_get_request"""'], {}), "(\n 'waterdata.commands.lookup_generation.wqp_lookups.execute_get_request')\n", (216, 293), False, 'from unittest import TestCase, mock\n'), ((421, 440), 'requests.Response', 'requests.Response', ([], {}), '()\n', (438, 440), False, 'import requests\n'), ((873, 892), 'requests.Response', 'requests.Response', ([], {}), '()\n', (890, 892), False, 'import requests\n'), ((1162, 1181), 'requests.Response', 'requests.Response', ([], {}), '()\n', (1179, 1181), False, 'import requests\n'), ((514, 602), 'waterdata.commands.lookup_generation.wqp_lookups.get_lookup_by_json', 'get_lookup_by_json', (['"""http://fakehost.com"""'], {'path': '"""codes"""', 'params': "{'param1': 'value1'}"}), "('http://fakehost.com', path='codes', params={'param1':\n 'value1'})\n", (532, 602), False, 'from waterdata.commands.lookup_generation.wqp_lookups import get_lookup_by_json, is_us_county, get_nwis_state_lookup, get_nwis_county_lookup\n'), ((966, 1021), 'waterdata.commands.lookup_generation.wqp_lookups.get_lookup_by_json', 'get_lookup_by_json', (['"""http://fakehost.com"""'], {'path': '"""codes"""'}), "('http://fakehost.com', path='codes')\n", (984, 1021), False, 'from waterdata.commands.lookup_generation.wqp_lookups import get_lookup_by_json, is_us_county, get_nwis_state_lookup, get_nwis_county_lookup\n'), ((1302, 1357), 'waterdata.commands.lookup_generation.wqp_lookups.get_lookup_by_json', 'get_lookup_by_json', (['"""http://fakehost.com"""'], {'path': '"""codes"""'}), "('http://fakehost.com', path='codes')\n", (1320, 1357), False, 'from waterdata.commands.lookup_generation.wqp_lookups import get_lookup_by_json, is_us_county, get_nwis_state_lookup, get_nwis_county_lookup\n'), ((1471, 1487), 'waterdata.commands.lookup_generation.wqp_lookups.is_us_county', 'is_us_county', (['{}'], {}), '({})\n', (1483, 1487), False, 'from waterdata.commands.lookup_generation.wqp_lookups import get_lookup_by_json, is_us_county, get_nwis_state_lookup, get_nwis_county_lookup\n'), ((1565, 1596), 'waterdata.commands.lookup_generation.wqp_lookups.is_us_county', 'is_us_county', (["{'value': '12US'}"], {}), "({'value': '12US'})\n", (1577, 1596), False, 'from waterdata.commands.lookup_generation.wqp_lookups import get_lookup_by_json, is_us_county, get_nwis_state_lookup, get_nwis_county_lookup\n'), ((1668, 1700), 'waterdata.commands.lookup_generation.wqp_lookups.is_us_county', 'is_us_county', (["{'value': 'US:12'}"], {}), "({'value': 'US:12'})\n", (1680, 1700), False, 'from waterdata.commands.lookup_generation.wqp_lookups import get_lookup_by_json, is_us_county, get_nwis_state_lookup, get_nwis_county_lookup\n'), ((1777, 1809), 'waterdata.commands.lookup_generation.wqp_lookups.is_us_county', 'is_us_county', (["{'value': 'CA:12'}"], {}), "({'value': 'CA:12'})\n", (1789, 1809), False, 'from waterdata.commands.lookup_generation.wqp_lookups import get_lookup_by_json, is_us_county, get_nwis_state_lookup, get_nwis_county_lookup\n'), ((1914, 1939), 'waterdata.commands.lookup_generation.wqp_lookups.get_nwis_state_lookup', 'get_nwis_state_lookup', (['[]'], {}), '([])\n', (1935, 1939), False, 'from waterdata.commands.lookup_generation.wqp_lookups import get_lookup_by_json, is_us_county, get_nwis_state_lookup, get_nwis_county_lookup\n'), ((2141, 2175), 'waterdata.commands.lookup_generation.wqp_lookups.get_nwis_state_lookup', 'get_nwis_state_lookup', (['test_lookup'], {}), '(test_lookup)\n', (2162, 2175), False, 'from waterdata.commands.lookup_generation.wqp_lookups import get_lookup_by_json, is_us_county, get_nwis_state_lookup, get_nwis_county_lookup\n'), ((2416, 2442), 'waterdata.commands.lookup_generation.wqp_lookups.get_nwis_county_lookup', 'get_nwis_county_lookup', (['[]'], {}), '([])\n', (2438, 2442), False, 'from waterdata.commands.lookup_generation.wqp_lookups import get_lookup_by_json, is_us_county, get_nwis_state_lookup, get_nwis_county_lookup\n'), ((2764, 2799), 'waterdata.commands.lookup_generation.wqp_lookups.get_nwis_county_lookup', 'get_nwis_county_lookup', (['test_lookup'], {}), '(test_lookup)\n', (2786, 2799), False, 'from waterdata.commands.lookup_generation.wqp_lookups import get_lookup_by_json, is_us_county, get_nwis_state_lookup, get_nwis_county_lookup\n')] |
import csv
# # Calculate the proportion of Four types of datasets - energy, piezo, elasticity, diel
# # Energy
# energy = []
# with open('training/energy/energy.csv', 'r', encoding='utf-8') as en:
# reader = csv.reader(en)
# for row in reader:
# energy.append(row[0])
# print(len(energy))
#
# # elasticity
# elasticity = []
# with open('training/elasticity/elasticity.csv', 'r', encoding='utf-8') as el:
# reader = csv.reader(el)
# for row in reader:
# elasticity.append(row[0])
# print(len(elasticity))
#
# # diel
# diel = []
# with open('training/diel/diel.csv', 'r', encoding='utf-8') as di:
# reader = csv.reader(di)
# for row in reader:
# diel.append(row[0])
# print(len(diel))
#
# # piezo
# piezo = []
# with open('training/piezo/piezo.csv', 'r', encoding='utf-8') as pi:
# reader = csv.reader(pi)
# for row in reader:
# piezo.append(row[0])
# print(len(piezo))
#
# # energy & elasticity
# intersection = list(set(energy).intersection(set(elasticity)))
# print('energy & elasticity')
# print(len(intersection))
# # energy & diel
# intersection = list(set(energy).intersection(set(diel)))
# print('energy & diel')
# print(len(intersection))
# # energy & piezo
# intersection = list(set(energy).intersection(set(piezo)))
# print('energy & piezo')
# print(len(intersection))
# # elasticity & diel
# intersection = list(set(elasticity).intersection(set(diel)))
# print('elasticity & diel')
# print(len(intersection))
# # elasticity & piezo
# intersection = list(set(elasticity).intersection(set(piezo)))
# print('elasticity & piezo')
# print(len(intersection))
# # diel & piezo
# intersection = list(set(diel).intersection(set(piezo)))
# print('diel & piezo')
# print(len(intersection))
# # diel & piezo & elasticity
# intersection = list(set(diel).intersection(set(piezo).intersection(set(elasticity))))
# print('diel & piezo & elasticity')
# print(len(intersection))
#
# # nelement and nsite
# ne = {}
# ns = {}
# ns_st = {}
# with open('training/energy/energy.csv', 'r', encoding='utf-8') as en:
# reader = csv.reader(en)
# for row in reader:
# nelement = str(row[2])
# ne[nelement] = ne.get(nelement, 0) + 1
# nsite = row[3]
# ns[nsite] = ns.get(nsite, 0) + 1
# ns_new = int(row[3])//20
# ns_st[ns_new] = ns_st.get(ns_new,0)+1
# print('nelement')
# print(len(ne))
# print(ne)
# print('nsite')
# print(len(ns))
# print(ns)
# print(sorted(ns))
# print('nstie_new')
# print(len(ns_st))
# print(ns_st)
# count quality
energy = []
with open('training/energy/energy.csv', 'r', encoding='utf-8') as en:
reader = csv.reader(en)
count1 = 0
count2 = 0
count3 = 0
count4 = 0
for row in reader:
if row[10] == '0.0':
count1+=1
if row[11] == '0.0':
count2+=1
if row[12] == '0.0':
count3+=1
if row[13] =='0.0':
count4+=1
print(count1)
print(count2)
print(count3)
print(count4)
# count quality
with open('training/elasticity/elasticity.csv', 'r', encoding='utf-8') as en:
reader = csv.reader(en)
count1 = 0
count2 = 0
count3 = 0
count4 = 0
for row in reader:
if row[10] == '0.0':
count1+=1
if row[11] == '0.0':
count2+=1
if row[12] == '0.0':
count3+=1
if row[13] =='0.0':
count4+=1
print(count1)
print(count2)
print(count3)
print(count4)
# count quality
with open('training/piezo/piezo.csv', 'r', encoding='utf-8') as en:
reader = csv.reader(en)
count1 = 0
for row in reader:
if row[10] == '0.0':
count1+=1
print(count1)
| [
"csv.reader"
] | [((2693, 2707), 'csv.reader', 'csv.reader', (['en'], {}), '(en)\n', (2703, 2707), False, 'import csv\n'), ((3174, 3188), 'csv.reader', 'csv.reader', (['en'], {}), '(en)\n', (3184, 3188), False, 'import csv\n'), ((3645, 3659), 'csv.reader', 'csv.reader', (['en'], {}), '(en)\n', (3655, 3659), False, 'import csv\n')] |
import numpy as np
import pandas as pd
from welib.tools.clean_exceptions import *
from welib.FEM.graph import Node as GraphNode
from welib.FEM.graph import Element as GraphElement
from welib.FEM.graph import NodeProperty
from welib.FEM.graph import GraphModel
class MaterialProperty(NodeProperty):
def __init__(self):
Property.__init__(self)
pass
class FEMNode(GraphNode):
def __init__(self, ID, x, y, z=0, Type=None, DOFs=[]):
GraphNode.__init__(self, ID, x, y, z)
self.DOFs = DOFs
def __repr__(self):
s='<Node{:4d}> x:{:7.2f} y:{:7.2f} z:{:7.2f}, DOFs: {}'.format(self.ID, self.x, self.y, self.z, self.DOFs)
return s
class FEMElement(GraphElement):
def __init__(self, ID, nodeIDs, nodes=None, properties=None):
GraphElement.__init__(self, ID, nodeIDs, nodes, properties)
self.Ce=[]
self.Ke=[]
self.Me=[]
def __repr__(self):
s='<Elem{:4d}> NodeIDs: {}'.format(self.ID, self.nodeIDs)
if self.nodes is not None:
s+=' l={:.2f}'.format(self.length)
return s
class BeamElement(FEMElement):
def __init__(self, ID, nodeIDs, nodes, properties=None):
super(BeamElement,self).__init__(ID, nodeIDs, nodes=nodes, properties=properties)
class FEMModel(GraphModel):
def __init__(self):
GraphModel.__init__(self)
self.MM = None
self.KK = None
self.DD = None
self.nDOF = None
def setFullMatrices(self,MM,KK,DD=None):
self.MM=MM
self.KK=KK
if DD is not None:
self.DD=DD
def CraigBampton(self, Ileader, Ifollow=None, Ifixed=None):
""" """
from welib.FEM.reduction import CraigBampton
if Ifixed is not None:
M,K = self.applyFixBC()
else:
M,K = self.MM, self.KK
return CraigBampton(M, K, Ileader, Ifollow=Ifollow)
def DOF2Nodes(self):
DOF2Nodes=np.zeros((self.nDOF,4),int)
for iN,node in enumerate(self.Nodes):
for iiDOF,iDOF in enumerate(node.DOFs):
DOF2Nodes[iDOF,0] = iDOF
DOF2Nodes[iDOF,1] = iN
DOF2Nodes[iDOF,2] = len(node.DOFs)
DOF2Nodes[iDOF,3] = iiDOF+1
return DOF2Nodes
if __name__=='__main__':
np.set_printoptions(linewidth=500)
mdl=SubDynModel()
mdl.fromSummaryFile('../../data/Monopile/Pendulum.SD.sum.yaml')
| [
"welib.FEM.graph.Element.__init__",
"welib.FEM.reduction.CraigBampton",
"numpy.zeros",
"welib.FEM.graph.Node.__init__",
"welib.FEM.graph.GraphModel.__init__",
"numpy.set_printoptions"
] | [((2329, 2363), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'linewidth': '(500)'}), '(linewidth=500)\n', (2348, 2363), True, 'import numpy as np\n'), ((464, 501), 'welib.FEM.graph.Node.__init__', 'GraphNode.__init__', (['self', 'ID', 'x', 'y', 'z'], {}), '(self, ID, x, y, z)\n', (482, 501), True, 'from welib.FEM.graph import Node as GraphNode\n'), ((791, 850), 'welib.FEM.graph.Element.__init__', 'GraphElement.__init__', (['self', 'ID', 'nodeIDs', 'nodes', 'properties'], {}), '(self, ID, nodeIDs, nodes, properties)\n', (812, 850), True, 'from welib.FEM.graph import Element as GraphElement\n'), ((1342, 1367), 'welib.FEM.graph.GraphModel.__init__', 'GraphModel.__init__', (['self'], {}), '(self)\n', (1361, 1367), False, 'from welib.FEM.graph import GraphModel\n'), ((1884, 1928), 'welib.FEM.reduction.CraigBampton', 'CraigBampton', (['M', 'K', 'Ileader'], {'Ifollow': 'Ifollow'}), '(M, K, Ileader, Ifollow=Ifollow)\n', (1896, 1928), False, 'from welib.FEM.reduction import CraigBampton\n'), ((1973, 2002), 'numpy.zeros', 'np.zeros', (['(self.nDOF, 4)', 'int'], {}), '((self.nDOF, 4), int)\n', (1981, 2002), True, 'import numpy as np\n')] |
from django.contrib import admin
from .models import Label, Choice, Quiz, Question, Answer, Submit
# Register your models here.
admin.site.register(Quiz)
admin.site.register(Question)
admin.site.register(Answer)
admin.site.register(Label)
admin.site.register(Submit)
admin.site.register(Choice) | [
"django.contrib.admin.site.register"
] | [((129, 154), 'django.contrib.admin.site.register', 'admin.site.register', (['Quiz'], {}), '(Quiz)\n', (148, 154), False, 'from django.contrib import admin\n'), ((155, 184), 'django.contrib.admin.site.register', 'admin.site.register', (['Question'], {}), '(Question)\n', (174, 184), False, 'from django.contrib import admin\n'), ((185, 212), 'django.contrib.admin.site.register', 'admin.site.register', (['Answer'], {}), '(Answer)\n', (204, 212), False, 'from django.contrib import admin\n'), ((214, 240), 'django.contrib.admin.site.register', 'admin.site.register', (['Label'], {}), '(Label)\n', (233, 240), False, 'from django.contrib import admin\n'), ((241, 268), 'django.contrib.admin.site.register', 'admin.site.register', (['Submit'], {}), '(Submit)\n', (260, 268), False, 'from django.contrib import admin\n'), ((269, 296), 'django.contrib.admin.site.register', 'admin.site.register', (['Choice'], {}), '(Choice)\n', (288, 296), False, 'from django.contrib import admin\n')] |