code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import logging
mylogger = logging.getLogger("My Script")
# mylogger = logging.getLogger(__name__)
## messages
mylogger.debug("Сообщение уровня debug")
mylogger.info("Сообщение уровня info")
mylogger.warning("Сообщение уровня warning")
|
[
"logging.getLogger"
] |
[((27, 57), 'logging.getLogger', 'logging.getLogger', (['"""My Script"""'], {}), "('My Script')\n", (44, 57), False, 'import logging\n')]
|
from django.shortcuts import render, redirect, get_object_or_404
from .models import Post, Like, Comment
from .forms import PostForm, CommentForm
from User.models import UserProfile
from django.db import IntegrityError
from django.contrib import messages
def comment_adjustment_view(request, id, id2): # id is a comment, id2 is a post
obj = get_object_or_404(Comment, id=id)
post = get_object_or_404(Post, id=id2)
form = CommentForm(request.POST or None, instance=obj)
context = {'obj': obj, 'post': post}
if form.is_valid():
form.save()
return redirect('/')
context['form'] = form
if request.method == 'POST' and 'c_delete' in request.POST:
obj.delete()
return redirect('/')
return render(request, 'posts/comment_adjustment.html', context)
def like_view(request, id):
post = Post.objects.get(id=id)
likes = post.post.all() # Getting all users that liked the post
context = {
'likes': likes
}
return render(request, 'posts/like_list.html', context)
def detail_view(request, id):
user = request.user
post = Post.objects.get(id=id)
like_count = post.post.filter().values('like').count() # count number of likes
likes = post.post.all().filter().values('like') # users that liked the post
if {'like': user.id} not in likes: # if user didn't like the pic, he can, else he can unlike
like = True
else:
like = False
# like twice exception
try:
if like:
if request.method == 'POST' and 'likeform' in request.POST:
Like.objects.create(like=request.user, post=post)
return redirect('/p/%d' % post.id)
else:
if request.method == 'POST' and 'likeform' in request.POST:
Like.objects.filter(like=request.user, post=post).delete()
return redirect('/p/%d' % post.id)
except IntegrityError:
messages.warning(request, 'no')
form = CommentForm()
comments = post.c_post.all() # Getting all comments of current post
if request.method == 'POST' and 'commentform' in request.POST:
form = CommentForm(request.POST or None)
form.instance.c_post = post
form.instance.user = user
if form.is_valid():
form.save()
return redirect('/p/%d' % post.id)
context = {
'post': post,
'like_count': like_count,
'like': like,
'form': form,
'comments': comments,
}
return render(request, 'posts/post_index.html', context)
def list_view(request):
obj = Post.objects.all()
if request.user.is_authenticated:
pass
else:
return redirect('/login')
follows = request.user.follow.all()
context = {
'obj': obj,
'follows': follows
}
return render(request, 'posts/post_list.html', context)
def all_posts_view(request):
obj = Post.objects.all()
follows = request.user.follow.all()
context = {
'obj': obj,
'follows': follows
}
return render(request, 'posts/all_posts.html', context)
def create_view(request):
form = PostForm()
if request.method == 'POST':
form = PostForm(request.POST or None, request.FILES or None)
form.instance.author = request.user
if form.is_valid():
form.save()
return redirect('/')
context = {
'form': form
}
return render(request, 'posts/post_create.html', context)
def update_view(request, id):
obj = get_object_or_404(Post, id=id)
context = {'obj': obj}
form = PostForm(request.POST or None, request.FILES or None, instance=obj)
if form.is_valid():
form.save()
return redirect('/')
context['form'] = form
return render(request, 'posts/post_update.html', context)
def delete_view(request, id):
obj = get_object_or_404(Post, id=id)
if request.method == 'POST':
obj.delete()
return redirect('/')
context = {
'obj': obj
}
return render(request, 'posts/post_delete.html', context)
|
[
"django.shortcuts.render",
"django.shortcuts.get_object_or_404",
"django.shortcuts.redirect",
"django.contrib.messages.warning"
] |
[((347, 380), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Comment'], {'id': 'id'}), '(Comment, id=id)\n', (364, 380), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((392, 423), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Post'], {'id': 'id2'}), '(Post, id=id2)\n', (409, 423), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((753, 810), 'django.shortcuts.render', 'render', (['request', '"""posts/comment_adjustment.html"""', 'context'], {}), "(request, 'posts/comment_adjustment.html', context)\n", (759, 810), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1002, 1050), 'django.shortcuts.render', 'render', (['request', '"""posts/like_list.html"""', 'context'], {}), "(request, 'posts/like_list.html', context)\n", (1008, 1050), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2529, 2578), 'django.shortcuts.render', 'render', (['request', '"""posts/post_index.html"""', 'context'], {}), "(request, 'posts/post_index.html', context)\n", (2535, 2578), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2851, 2899), 'django.shortcuts.render', 'render', (['request', '"""posts/post_list.html"""', 'context'], {}), "(request, 'posts/post_list.html', context)\n", (2857, 2899), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3081, 3129), 'django.shortcuts.render', 'render', (['request', '"""posts/all_posts.html"""', 'context'], {}), "(request, 'posts/all_posts.html', context)\n", (3087, 3129), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3466, 3516), 'django.shortcuts.render', 'render', (['request', '"""posts/post_create.html"""', 'context'], {}), "(request, 'posts/post_create.html', context)\n", (3472, 3516), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3559, 3589), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Post'], {'id': 'id'}), '(Post, id=id)\n', (3576, 3589), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3808, 3858), 'django.shortcuts.render', 'render', (['request', '"""posts/post_update.html"""', 'context'], {}), "(request, 'posts/post_update.html', context)\n", (3814, 3858), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3901, 3931), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Post'], {'id': 'id'}), '(Post, id=id)\n', (3918, 3931), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4067, 4117), 'django.shortcuts.render', 'render', (['request', '"""posts/post_delete.html"""', 'context'], {}), "(request, 'posts/post_delete.html', context)\n", (4073, 4117), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((584, 597), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (592, 597), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((727, 740), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (735, 740), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2710, 2728), 'django.shortcuts.redirect', 'redirect', (['"""/login"""'], {}), "('/login')\n", (2718, 2728), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3755, 3768), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (3763, 3768), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4001, 4014), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (4009, 4014), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1948, 1979), 'django.contrib.messages.warning', 'messages.warning', (['request', '"""no"""'], {}), "(request, 'no')\n", (1964, 1979), False, 'from django.contrib import messages\n'), ((2336, 2363), 'django.shortcuts.redirect', 'redirect', (["('/p/%d' % post.id)"], {}), "('/p/%d' % post.id)\n", (2344, 2363), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3397, 3410), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (3405, 3410), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1672, 1699), 'django.shortcuts.redirect', 'redirect', (["('/p/%d' % post.id)"], {}), "('/p/%d' % post.id)\n", (1680, 1699), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1884, 1911), 'django.shortcuts.redirect', 'redirect', (["('/p/%d' % post.id)"], {}), "('/p/%d' % post.id)\n", (1892, 1911), False, 'from django.shortcuts import render, redirect, get_object_or_404\n')]
|
from collections.abc import Mapping
from tempfile import NamedTemporaryFile
from typing import Any, List, Literal, Optional, Type
import yamale
from colored import attr, fg
from yamale.validators import DefaultValidators, Validator
from opta.constants import REGISTRY, yaml
from opta.exceptions import UserErrors
class Module(Validator):
"""Custom Module Validator"""
tag = "module"
constraints: List = []
cloud: Optional[str] = None
# Yamale expects this function to return an array of errors
def validate(self, value: Any) -> List[str]:
if not isinstance(value, Mapping):
return ["module is not a Map"]
if "type" not in value:
return ["module must have a 'type' field"]
if self.cloud is None:
raise Exception("Cloud needs to be specified for validation")
type: str = value["type"]
if type in REGISTRY[self.cloud]["module_aliases"]:
value["type"] = REGISTRY[self.cloud]["module_aliases"][ # type: ignore
type
]
type = value["type"]
elif type not in REGISTRY[self.cloud]["modules"]:
raise UserErrors(f"Module {type} is not supported for cloud {self.cloud}")
module_schema_dicts = REGISTRY[self.cloud]["modules"][type]["validators"]
with NamedTemporaryFile(mode="w") as f:
yaml.dump_all(module_schema_dicts, f)
f.flush()
return _get_yamale_errors(value, f.name)
class AwsModule(Module):
cloud = "aws"
class GcpModule(Module):
cloud = "google"
class AzureModule(Module):
cloud = "azurerm"
class LocalModule(Module):
cloud = "local"
class Opta(Validator):
"""Opta Yaml Validator"""
tag = "opta"
constaints: List = []
extra_validators: List[Type[Validator]] = []
environment_schema_dict: Optional[dict] = None
service_schema_dicts: Optional[List[dict]] = None
def _is_valid(self, value: Any) -> bool:
if not isinstance(value, Mapping):
return False
return "org_name" in value or "name" in value
def validate(self, value: Any) -> List[str]:
if not isinstance(value, Mapping):
return ["opta.yaml files should be a map"]
if "org_name" in value:
if self.environment_schema_dict is None:
raise UserErrors(
"We currently only support AWS, GCP, and Azure and Local"
)
schema_dicts = [self.environment_schema_dict]
else:
if self.service_schema_dicts is None:
raise UserErrors("We currently only support AWS, GCP, and Azure")
schema_dicts = self.service_schema_dicts
with NamedTemporaryFile(mode="w") as f:
yaml.dump_all(schema_dicts, f)
f.flush()
return _get_yamale_errors(value, f.name, self.extra_validators)
class AwsId(Validator):
tag = "aws_id"
def _is_valid(self, value: Any) -> bool:
str_value = str(value)
return str_value.isdigit()
class AwsOpta(Opta):
extra_validators = [AwsModule, AwsId]
environment_schema_dict = REGISTRY["aws"]["validator"]
service_schema_dicts = REGISTRY["aws"]["service_validator"]
class GcpOpta(Opta):
extra_validators = [GcpModule]
environment_schema_dict = REGISTRY["google"]["validator"]
service_schema_dicts = REGISTRY["google"]["service_validator"]
class AureOpta(Opta):
extra_validators = [AzureModule]
environment_schema_dict = REGISTRY["azurerm"]["validator"]
service_schema_dicts = REGISTRY["azurerm"]["service_validator"]
class LocalOpta(Opta):
extra_validators = [LocalModule]
environment_schema_dict = REGISTRY["local"]["validator"]
service_schema_dicts = REGISTRY["local"]["service_validator"]
def _get_yamale_errors(
data: Any, schema_path: str, extra_validators: Optional[List[Type[Validator]]] = None
) -> List[str]:
extra_validators = extra_validators or []
validators = DefaultValidators.copy()
for validator in extra_validators:
validators[validator.tag] = validator
schema = yamale.make_schema(schema_path, validators=validators, parser="ruamel")
formatted_data = [(data, None)]
# This is an array of `ValidationResult`s, each of which has an
# array of errors in its `errors` field
validation_results = yamale.validate(schema, formatted_data, _raise_error=False)
all_errors = []
for result in validation_results:
all_errors.extend(result.errors)
return all_errors
vanilla_validators = DefaultValidators.copy()
vanilla_validators[Opta.tag] = Opta
aws_validators = DefaultValidators.copy()
aws_validators[AwsOpta.tag] = AwsOpta
gcp_validators = DefaultValidators.copy()
gcp_validators[GcpOpta.tag] = GcpOpta
azure_validators = DefaultValidators.copy()
azure_validators[AureOpta.tag] = AureOpta
local_validators = DefaultValidators.copy()
local_validators[LocalOpta.tag] = LocalOpta
with NamedTemporaryFile(mode="w") as f:
yaml.dump(REGISTRY["validator"], f)
f.flush()
vanilla_main_schema = yamale.make_schema(
f.name, validators=vanilla_validators, parser="ruamel"
)
aws_main_schema = yamale.make_schema(
f.name, validators=aws_validators, parser="ruamel"
)
gcp_main_schema = yamale.make_schema(
f.name, validators=gcp_validators, parser="ruamel"
)
azure_main_schema = yamale.make_schema(
f.name, validators=azure_validators, parser="ruamel"
)
local_main_schema = yamale.make_schema(
f.name, validators=local_validators, parser="ruamel"
)
def _print_errors(errors: List[str]) -> None:
print(fg("red"), end="")
print(attr("bold"), end="")
print("Opta file validation failed with errors:")
print(attr("reset"), end="")
print(fg("red"), end="")
for error in errors:
print(f" {error}")
print(attr("reset"), end="")
def validate_yaml(config_file_path: str, cloud: str) -> Literal[True]:
data = yamale.make_data(config_file_path, parser="ruamel")
if cloud == "aws":
yamale_result = yamale.validate(aws_main_schema, data, _raise_error=False)
elif cloud == "google":
yamale_result = yamale.validate(gcp_main_schema, data, _raise_error=False)
elif cloud == "azurerm":
yamale_result = yamale.validate(azure_main_schema, data, _raise_error=False)
elif cloud == "local":
yamale_result = yamale.validate(local_main_schema, data, _raise_error=False)
else:
yamale_result = yamale.validate(vanilla_main_schema, data, _raise_error=False)
errors = []
for result in yamale_result:
errors.extend(result.errors)
if len(errors) > 0:
_print_errors(errors)
raise UserErrors(f"{config_file_path} is not a valid Opta file.")
return True
|
[
"tempfile.NamedTemporaryFile",
"opta.constants.yaml.dump_all",
"opta.exceptions.UserErrors",
"yamale.make_data",
"yamale.validators.DefaultValidators.copy",
"yamale.validate",
"colored.fg",
"opta.constants.yaml.dump",
"colored.attr",
"yamale.make_schema"
] |
[((4604, 4628), 'yamale.validators.DefaultValidators.copy', 'DefaultValidators.copy', ([], {}), '()\n', (4626, 4628), False, 'from yamale.validators import DefaultValidators, Validator\n'), ((4682, 4706), 'yamale.validators.DefaultValidators.copy', 'DefaultValidators.copy', ([], {}), '()\n', (4704, 4706), False, 'from yamale.validators import DefaultValidators, Validator\n'), ((4762, 4786), 'yamale.validators.DefaultValidators.copy', 'DefaultValidators.copy', ([], {}), '()\n', (4784, 4786), False, 'from yamale.validators import DefaultValidators, Validator\n'), ((4844, 4868), 'yamale.validators.DefaultValidators.copy', 'DefaultValidators.copy', ([], {}), '()\n', (4866, 4868), False, 'from yamale.validators import DefaultValidators, Validator\n'), ((4930, 4954), 'yamale.validators.DefaultValidators.copy', 'DefaultValidators.copy', ([], {}), '()\n', (4952, 4954), False, 'from yamale.validators import DefaultValidators, Validator\n'), ((4029, 4053), 'yamale.validators.DefaultValidators.copy', 'DefaultValidators.copy', ([], {}), '()\n', (4051, 4053), False, 'from yamale.validators import DefaultValidators, Validator\n'), ((4153, 4224), 'yamale.make_schema', 'yamale.make_schema', (['schema_path'], {'validators': 'validators', 'parser': '"""ruamel"""'}), "(schema_path, validators=validators, parser='ruamel')\n", (4171, 4224), False, 'import yamale\n'), ((4399, 4458), 'yamale.validate', 'yamale.validate', (['schema', 'formatted_data'], {'_raise_error': '(False)'}), '(schema, formatted_data, _raise_error=False)\n', (4414, 4458), False, 'import yamale\n'), ((5005, 5033), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'mode': '"""w"""'}), "(mode='w')\n", (5023, 5033), False, 'from tempfile import NamedTemporaryFile\n'), ((5044, 5079), 'opta.constants.yaml.dump', 'yaml.dump', (["REGISTRY['validator']", 'f'], {}), "(REGISTRY['validator'], f)\n", (5053, 5079), False, 'from opta.constants import REGISTRY, yaml\n'), ((5120, 5194), 'yamale.make_schema', 'yamale.make_schema', (['f.name'], {'validators': 'vanilla_validators', 'parser': '"""ruamel"""'}), "(f.name, validators=vanilla_validators, parser='ruamel')\n", (5138, 5194), False, 'import yamale\n'), ((5231, 5301), 'yamale.make_schema', 'yamale.make_schema', (['f.name'], {'validators': 'aws_validators', 'parser': '"""ruamel"""'}), "(f.name, validators=aws_validators, parser='ruamel')\n", (5249, 5301), False, 'import yamale\n'), ((5338, 5408), 'yamale.make_schema', 'yamale.make_schema', (['f.name'], {'validators': 'gcp_validators', 'parser': '"""ruamel"""'}), "(f.name, validators=gcp_validators, parser='ruamel')\n", (5356, 5408), False, 'import yamale\n'), ((5447, 5519), 'yamale.make_schema', 'yamale.make_schema', (['f.name'], {'validators': 'azure_validators', 'parser': '"""ruamel"""'}), "(f.name, validators=azure_validators, parser='ruamel')\n", (5465, 5519), False, 'import yamale\n'), ((5558, 5630), 'yamale.make_schema', 'yamale.make_schema', (['f.name'], {'validators': 'local_validators', 'parser': '"""ruamel"""'}), "(f.name, validators=local_validators, parser='ruamel')\n", (5576, 5630), False, 'import yamale\n'), ((6041, 6092), 'yamale.make_data', 'yamale.make_data', (['config_file_path'], {'parser': '"""ruamel"""'}), "(config_file_path, parser='ruamel')\n", (6057, 6092), False, 'import yamale\n'), ((5703, 5712), 'colored.fg', 'fg', (['"""red"""'], {}), "('red')\n", (5705, 5712), False, 'from colored import attr, fg\n'), ((5732, 5744), 'colored.attr', 'attr', (['"""bold"""'], {}), "('bold')\n", (5736, 5744), False, 'from colored import attr, fg\n'), ((5818, 5831), 'colored.attr', 'attr', (['"""reset"""'], {}), "('reset')\n", (5822, 5831), False, 'from colored import attr, fg\n'), ((5852, 5861), 'colored.fg', 'fg', (['"""red"""'], {}), "('red')\n", (5854, 5861), False, 'from colored import attr, fg\n'), ((5934, 5947), 'colored.attr', 'attr', (['"""reset"""'], {}), "('reset')\n", (5938, 5947), False, 'from colored import attr, fg\n'), ((6140, 6198), 'yamale.validate', 'yamale.validate', (['aws_main_schema', 'data'], {'_raise_error': '(False)'}), '(aws_main_schema, data, _raise_error=False)\n', (6155, 6198), False, 'import yamale\n'), ((6788, 6847), 'opta.exceptions.UserErrors', 'UserErrors', (['f"""{config_file_path} is not a valid Opta file."""'], {}), "(f'{config_file_path} is not a valid Opta file.')\n", (6798, 6847), False, 'from opta.exceptions import UserErrors\n'), ((1335, 1363), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'mode': '"""w"""'}), "(mode='w')\n", (1353, 1363), False, 'from tempfile import NamedTemporaryFile\n'), ((1382, 1419), 'opta.constants.yaml.dump_all', 'yaml.dump_all', (['module_schema_dicts', 'f'], {}), '(module_schema_dicts, f)\n', (1395, 1419), False, 'from opta.constants import REGISTRY, yaml\n'), ((2744, 2772), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'mode': '"""w"""'}), "(mode='w')\n", (2762, 2772), False, 'from tempfile import NamedTemporaryFile\n'), ((2791, 2821), 'opta.constants.yaml.dump_all', 'yaml.dump_all', (['schema_dicts', 'f'], {}), '(schema_dicts, f)\n', (2804, 2821), False, 'from opta.constants import REGISTRY, yaml\n'), ((6251, 6309), 'yamale.validate', 'yamale.validate', (['gcp_main_schema', 'data'], {'_raise_error': '(False)'}), '(gcp_main_schema, data, _raise_error=False)\n', (6266, 6309), False, 'import yamale\n'), ((1171, 1239), 'opta.exceptions.UserErrors', 'UserErrors', (['f"""Module {type} is not supported for cloud {self.cloud}"""'], {}), "(f'Module {type} is not supported for cloud {self.cloud}')\n", (1181, 1239), False, 'from opta.exceptions import UserErrors\n'), ((2365, 2434), 'opta.exceptions.UserErrors', 'UserErrors', (['"""We currently only support AWS, GCP, and Azure and Local"""'], {}), "('We currently only support AWS, GCP, and Azure and Local')\n", (2375, 2434), False, 'from opta.exceptions import UserErrors\n'), ((2617, 2676), 'opta.exceptions.UserErrors', 'UserErrors', (['"""We currently only support AWS, GCP, and Azure"""'], {}), "('We currently only support AWS, GCP, and Azure')\n", (2627, 2676), False, 'from opta.exceptions import UserErrors\n'), ((6363, 6423), 'yamale.validate', 'yamale.validate', (['azure_main_schema', 'data'], {'_raise_error': '(False)'}), '(azure_main_schema, data, _raise_error=False)\n', (6378, 6423), False, 'import yamale\n'), ((6475, 6535), 'yamale.validate', 'yamale.validate', (['local_main_schema', 'data'], {'_raise_error': '(False)'}), '(local_main_schema, data, _raise_error=False)\n', (6490, 6535), False, 'import yamale\n'), ((6570, 6632), 'yamale.validate', 'yamale.validate', (['vanilla_main_schema', 'data'], {'_raise_error': '(False)'}), '(vanilla_main_schema, data, _raise_error=False)\n', (6585, 6632), False, 'import yamale\n')]
|
import pandas
__pandas_version__ = "0.25.3"
if pandas.__version__ != __pandas_version__:
raise ImportError(
"The pandas version installed does not match the required pandas "
"version in Modin. Please install pandas {} to use "
"Modin.".format(__pandas_version__)
)
from pandas import (
eval,
unique,
value_counts,
cut,
to_numeric,
factorize,
test,
qcut,
date_range,
period_range,
Index,
MultiIndex,
CategoricalIndex,
bdate_range,
DatetimeIndex,
Timedelta,
Timestamp,
to_timedelta,
set_eng_float_format,
options,
set_option,
NaT,
PeriodIndex,
Categorical,
Interval,
UInt8Dtype,
UInt16Dtype,
UInt32Dtype,
UInt64Dtype,
SparseDtype,
Int8Dtype,
Int16Dtype,
Int32Dtype,
Int64Dtype,
CategoricalDtype,
DatetimeTZDtype,
IntervalDtype,
PeriodDtype,
RangeIndex,
Int64Index,
UInt64Index,
Float64Index,
TimedeltaIndex,
IntervalIndex,
IndexSlice,
Grouper,
array,
Period,
show_versions,
DateOffset,
timedelta_range,
infer_freq,
interval_range,
ExcelWriter,
SparseArray,
SparseSeries,
SparseDataFrame,
datetime,
NamedAgg,
)
import threading
import os
import types
import sys
from .. import __version__
from .concat import concat
from .dataframe import DataFrame
from .datetimes import to_datetime
from .io import (
read_csv,
read_parquet,
read_json,
read_html,
read_clipboard,
read_excel,
read_hdf,
read_feather,
read_msgpack,
read_stata,
read_sas,
read_pickle,
read_sql,
read_gbq,
read_table,
read_fwf,
read_sql_table,
read_sql_query,
read_spss,
ExcelFile,
to_pickle,
HDFStore,
)
from .reshape import get_dummies, melt, crosstab, lreshape, wide_to_long
from .series import Series
from .general import (
isna,
isnull,
merge,
merge_asof,
merge_ordered,
pivot_table,
notnull,
notna,
pivot,
)
from .plotting import Plotting as plotting
from .. import __execution_engine__ as execution_engine
# Set this so that Pandas doesn't try to multithread by itself
os.environ["OMP_NUM_THREADS"] = "1"
num_cpus = 1
def initialize_ray():
import ray
"""Initializes ray based on environment variables and internal defaults."""
if threading.current_thread().name == "MainThread":
plasma_directory = None
cluster = os.environ.get("MODIN_RAY_CLUSTER", None)
redis_address = os.environ.get("MODIN_REDIS_ADDRESS", None)
if cluster == "True" and redis_address is not None:
# We only start ray in a cluster setting for the head node.
ray.init(
include_webui=False,
ignore_reinit_error=True,
redis_address=redis_address,
logging_level=100,
)
elif cluster is None:
object_store_memory = os.environ.get("MODIN_MEMORY", None)
if os.environ.get("MODIN_OUT_OF_CORE", "False").title() == "True":
from tempfile import gettempdir
plasma_directory = gettempdir()
# We may have already set the memory from the environment variable, we don't
# want to overwrite that value if we have.
if object_store_memory is None:
# Round down to the nearest Gigabyte.
mem_bytes = ray.utils.get_system_memory() // 10 ** 9 * 10 ** 9
# Default to 8x memory for out of core
object_store_memory = 8 * mem_bytes
# In case anything failed above, we can still improve the memory for Modin.
if object_store_memory is None:
# Round down to the nearest Gigabyte.
object_store_memory = int(
0.6 * ray.utils.get_system_memory() // 10 ** 9 * 10 ** 9
)
# If the memory pool is smaller than 2GB, just use the default in ray.
if object_store_memory == 0:
object_store_memory = None
else:
object_store_memory = int(object_store_memory)
ray.init(
include_webui=False,
ignore_reinit_error=True,
plasma_directory=plasma_directory,
object_store_memory=object_store_memory,
redis_address=redis_address,
logging_level=100,
memory=object_store_memory,
)
# Register custom serializer for method objects to avoid warning message.
# We serialize `MethodType` objects when we use AxisPartition operations.
ray.register_custom_serializer(types.MethodType, use_pickle=True)
# Register a fix import function to run on all_workers including the driver.
# This is a hack solution to fix #647, #746
def move_stdlib_ahead_of_site_packages(*args):
site_packages_path = None
site_packages_path_index = -1
for i, path in enumerate(sys.path):
if sys.exec_prefix in path and path.endswith("site-packages"):
site_packages_path = path
site_packages_path_index = i
# break on first found
break
if site_packages_path is not None:
# stdlib packages layout as follows:
# - python3.x
# - typing.py
# - site-packages/
# - pandas
# So extracting the dirname of the site_packages can point us
# to the directory containing standard libraries.
sys.path.insert(
site_packages_path_index, os.path.dirname(site_packages_path)
)
move_stdlib_ahead_of_site_packages()
ray.worker.global_worker.run_function_on_all_workers(
move_stdlib_ahead_of_site_packages
)
if execution_engine == "Ray":
import ray
initialize_ray()
num_cpus = ray.cluster_resources()["CPU"]
elif execution_engine == "Dask": # pragma: no cover
from distributed.client import _get_global_client
import warnings
warnings.warn("The Dask Engine for Modin is experimental.")
if threading.current_thread().name == "MainThread":
# initialize the dask client
client = _get_global_client()
if client is None:
from distributed import Client
import multiprocessing
num_cpus = multiprocessing.cpu_count()
client = Client(n_workers=num_cpus)
elif execution_engine != "Python":
raise ImportError("Unrecognized execution engine: {}.".format(execution_engine))
DEFAULT_NPARTITIONS = max(4, int(num_cpus))
__all__ = [
"DataFrame",
"Series",
"read_csv",
"read_parquet",
"read_json",
"read_html",
"read_clipboard",
"read_excel",
"read_hdf",
"read_feather",
"read_msgpack",
"read_stata",
"read_sas",
"read_pickle",
"read_sql",
"read_gbq",
"read_table",
"read_spss",
"concat",
"eval",
"unique",
"value_counts",
"cut",
"to_numeric",
"factorize",
"test",
"qcut",
"to_datetime",
"get_dummies",
"isna",
"isnull",
"merge",
"pivot_table",
"date_range",
"Index",
"MultiIndex",
"Series",
"bdate_range",
"period_range",
"DatetimeIndex",
"to_timedelta",
"set_eng_float_format",
"options",
"set_option",
"CategoricalIndex",
"Timedelta",
"Timestamp",
"NaT",
"PeriodIndex",
"Categorical",
"__version__",
"melt",
"crosstab",
"plotting",
"Interval",
"UInt8Dtype",
"UInt16Dtype",
"UInt32Dtype",
"UInt64Dtype",
"SparseDtype",
"Int8Dtype",
"Int16Dtype",
"Int32Dtype",
"Int64Dtype",
"CategoricalDtype",
"DatetimeTZDtype",
"IntervalDtype",
"PeriodDtype",
"RangeIndex",
"Int64Index",
"UInt64Index",
"Float64Index",
"TimedeltaIndex",
"IntervalIndex",
"IndexSlice",
"Grouper",
"array",
"Period",
"show_versions",
"DateOffset",
"timedelta_range",
"infer_freq",
"interval_range",
"ExcelWriter",
"read_fwf",
"read_sql_table",
"read_sql_query",
"ExcelFile",
"to_pickle",
"HDFStore",
"lreshape",
"wide_to_long",
"merge_asof",
"merge_ordered",
"notnull",
"notna",
"pivot",
"SparseArray",
"SparseSeries",
"SparseDataFrame",
"datetime",
"NamedAgg",
"DEFAULT_NPARTITIONS",
]
del pandas
|
[
"ray.init",
"ray.register_custom_serializer",
"os.path.dirname",
"tempfile.gettempdir",
"ray.utils.get_system_memory",
"distributed.client._get_global_client",
"ray.worker.global_worker.run_function_on_all_workers",
"os.environ.get",
"distributed.Client",
"warnings.warn",
"threading.current_thread",
"ray.cluster_resources",
"multiprocessing.cpu_count"
] |
[((2509, 2550), 'os.environ.get', 'os.environ.get', (['"""MODIN_RAY_CLUSTER"""', 'None'], {}), "('MODIN_RAY_CLUSTER', None)\n", (2523, 2550), False, 'import os\n'), ((2575, 2618), 'os.environ.get', 'os.environ.get', (['"""MODIN_REDIS_ADDRESS"""', 'None'], {}), "('MODIN_REDIS_ADDRESS', None)\n", (2589, 2618), False, 'import os\n'), ((4782, 4847), 'ray.register_custom_serializer', 'ray.register_custom_serializer', (['types.MethodType'], {'use_pickle': '(True)'}), '(types.MethodType, use_pickle=True)\n', (4812, 4847), False, 'import ray\n'), ((5974, 6067), 'ray.worker.global_worker.run_function_on_all_workers', 'ray.worker.global_worker.run_function_on_all_workers', (['move_stdlib_ahead_of_site_packages'], {}), '(\n move_stdlib_ahead_of_site_packages)\n', (6026, 6067), False, 'import ray\n'), ((6169, 6192), 'ray.cluster_resources', 'ray.cluster_resources', ([], {}), '()\n', (6190, 6192), False, 'import ray\n'), ((6332, 6391), 'warnings.warn', 'warnings.warn', (['"""The Dask Engine for Modin is experimental."""'], {}), "('The Dask Engine for Modin is experimental.')\n", (6345, 6391), False, 'import warnings\n'), ((2410, 2436), 'threading.current_thread', 'threading.current_thread', ([], {}), '()\n', (2434, 2436), False, 'import threading\n'), ((2763, 2871), 'ray.init', 'ray.init', ([], {'include_webui': '(False)', 'ignore_reinit_error': '(True)', 'redis_address': 'redis_address', 'logging_level': '(100)'}), '(include_webui=False, ignore_reinit_error=True, redis_address=\n redis_address, logging_level=100)\n', (2771, 2871), False, 'import ray\n'), ((6503, 6523), 'distributed.client._get_global_client', '_get_global_client', ([], {}), '()\n', (6521, 6523), False, 'from distributed.client import _get_global_client\n'), ((3010, 3046), 'os.environ.get', 'os.environ.get', (['"""MODIN_MEMORY"""', 'None'], {}), "('MODIN_MEMORY', None)\n", (3024, 3046), False, 'import os\n'), ((4275, 4491), 'ray.init', 'ray.init', ([], {'include_webui': '(False)', 'ignore_reinit_error': '(True)', 'plasma_directory': 'plasma_directory', 'object_store_memory': 'object_store_memory', 'redis_address': 'redis_address', 'logging_level': '(100)', 'memory': 'object_store_memory'}), '(include_webui=False, ignore_reinit_error=True, plasma_directory=\n plasma_directory, object_store_memory=object_store_memory,\n redis_address=redis_address, logging_level=100, memory=object_store_memory)\n', (4283, 4491), False, 'import ray\n'), ((6400, 6426), 'threading.current_thread', 'threading.current_thread', ([], {}), '()\n', (6424, 6426), False, 'import threading\n'), ((6653, 6680), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (6678, 6680), False, 'import multiprocessing\n'), ((6702, 6728), 'distributed.Client', 'Client', ([], {'n_workers': 'num_cpus'}), '(n_workers=num_cpus)\n', (6708, 6728), False, 'from distributed import Client\n'), ((3210, 3222), 'tempfile.gettempdir', 'gettempdir', ([], {}), '()\n', (3220, 3222), False, 'from tempfile import gettempdir\n'), ((5866, 5901), 'os.path.dirname', 'os.path.dirname', (['site_packages_path'], {}), '(site_packages_path)\n', (5881, 5901), False, 'import os\n'), ((3062, 3106), 'os.environ.get', 'os.environ.get', (['"""MODIN_OUT_OF_CORE"""', '"""False"""'], {}), "('MODIN_OUT_OF_CORE', 'False')\n", (3076, 3106), False, 'import os\n'), ((3513, 3542), 'ray.utils.get_system_memory', 'ray.utils.get_system_memory', ([], {}), '()\n', (3540, 3542), False, 'import ray\n'), ((3934, 3963), 'ray.utils.get_system_memory', 'ray.utils.get_system_memory', ([], {}), '()\n', (3961, 3963), False, 'import ray\n')]
|
"""
Test suite for ssp.securityplan module.
"""
import os
import sys
import pytest
import docx
from ssp.securityplan import SecurityPlan
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
class TestSecurityPlan(object):
def test_that_word_doc_opens(self):
"""
Make sure basic word docs open, basically testing docx is working.
"""
path = TEST_DIR + '/test_files/test.docx'
securityplan = SecurityPlan(path)
assert isinstance(securityplan.document, docx.document.Document)
def test_get_version_returns_correct_version(self):
"""
Test that the proper versions are obtained from template
"""
version = '08282018'
path = TEST_DIR + '/test_files/blank_templates/' + version + '/'
file_name = 'FedRAMP-SSP-High-Baseline-Template.docx'
securityplan = SecurityPlan(path + file_name)
assert securityplan.version == version
def test_control(self):
control = 'AC-1'
version = '08282018'
path = TEST_DIR + '/test_files/blank_templates/' + version + '/'
file_name = 'FedRAMP-SSP-High-Baseline-Template.docx'
securityplan = SecurityPlan(path + file_name)
control_object = securityplan.control(control)
assert control_object.number == control
|
[
"os.path.abspath",
"ssp.securityplan.SecurityPlan"
] |
[((167, 192), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (182, 192), False, 'import os\n'), ((444, 462), 'ssp.securityplan.SecurityPlan', 'SecurityPlan', (['path'], {}), '(path)\n', (456, 462), False, 'from ssp.securityplan import SecurityPlan\n'), ((874, 904), 'ssp.securityplan.SecurityPlan', 'SecurityPlan', (['(path + file_name)'], {}), '(path + file_name)\n', (886, 904), False, 'from ssp.securityplan import SecurityPlan\n'), ((1198, 1228), 'ssp.securityplan.SecurityPlan', 'SecurityPlan', (['(path + file_name)'], {}), '(path + file_name)\n', (1210, 1228), False, 'from ssp.securityplan import SecurityPlan\n')]
|
import argparse
import logging
from typing import Union
import pytorch_lightning as pl
import torch
from torch.utils.data import DataLoader
from retinanet import Retinanet
from utils import collate_fn, load_obj
class RetinaNetModel(pl.LightningModule):
"""
Lightning Class to wrap the RetinaNet Model.
So that it can be trainer with LightningTrainer.
Args:
haprams (`DictConfig`) : A `DictConfig` that stores the configs for training .
"""
def __init__(self, hparams: Union[DictConfig, argparse.Namespace]):
super(RetinaNetModel, self).__init__()
self.hparams = hparams
self.net = Retinanet(**hparams.model, logger=logging.getLogger("lightning"))
#add learning_rate to hparams dictionary
self.hparams.learning_rate = self.hparams.optimizer.params.lr
self.save_hyperparameters(hparams)
def forward(self, xb, *args, **kwargs):
output = self.net(xb)
return output
def configure_optimizers(self, *args, **kwargs):
opt = self.hparams.optimizer.class_name
self.optimizer = load_obj(opt)(self.net.parameters(), **self.hparams.optimizer.params)
if self.hparams.scheduler.class_name is None:
return [self.optimizer]
else:
schedps = self.hparams.scheduler
__scheduler = load_obj(schedps.class_name)(self.optimizer, **schedps.params)
if not self.hparams.scheduler.monitor:
self.scheduler = {"scheduler": __scheduler,"interval": schedps.interval,"frequency": schedps.frequency,}
else:
self.scheduler = {"scheduler": __scheduler,"interval": schedps.interval, "frequency": schedps.frequency,"monitor": schedps.monitor,}
return [self.optimizer], [self.scheduler]
def training_step(self, batch, batch_idx, *args, **kwargs):
images, targets, _ = batch # unpack the one batch from the DataLoader
targets = [{k: v for k, v in t.items()} for t in targets] # Unpack the Targets
# Calculate Losses {regression_loss , classification_loss}
loss_dict = self.net(images, targets)
# Calculate Total Loss
losses = sum(loss for loss in loss_dict.values())
return {"loss": losses, "log": loss_dict, "progress_bar": loss_dict}
def validation_step(self, batch, batch_idx, *args, **kwargs):
images, targets, _ = batch # unpack the one batch from the DataLoader
targets = [{k: v for k, v in t.items()} for t in targets] # Unpack the Targets
# Calculate Losses {regression_loss , classification_loss}
loss_dict = self.net(images, targets)
# Calculate Total Loss
loss = sum(loss for loss in loss_dict.values())
loss = torch.as_tensor(loss)
logs = {"val_loss": loss}
return {"val_loss": loss,"log": logs,"progress_bar": logs,}
def test_step(self, batch, batch_idx, *args, **kwargs):
images, targets, _ = batch
targets = [{k: v for k, v in t.items()} for t in targets]
outputs = self.net.predict(images)
res = {t["image_id"].item(): o for t, o in zip(targets, outputs)}
self.test_evaluator.update(res)
return {}
def test_epoch_end(self, outputs, *args, **kwargs):
self.test_evaluator.accumulate()
self.test_evaluator.summarize()
metric = self.test_evaluator.coco_eval["bbox"].stats[0]
metric = torch.as_tensor(metric)
logs = {"AP": metric}
return {"AP": metric,"log": logs,"progress_bar": logs,}
|
[
"torch.as_tensor",
"utils.load_obj",
"logging.getLogger"
] |
[((2757, 2778), 'torch.as_tensor', 'torch.as_tensor', (['loss'], {}), '(loss)\n', (2772, 2778), False, 'import torch\n'), ((3437, 3460), 'torch.as_tensor', 'torch.as_tensor', (['metric'], {}), '(metric)\n', (3452, 3460), False, 'import torch\n'), ((1094, 1107), 'utils.load_obj', 'load_obj', (['opt'], {}), '(opt)\n', (1102, 1107), False, 'from utils import collate_fn, load_obj\n'), ((675, 705), 'logging.getLogger', 'logging.getLogger', (['"""lightning"""'], {}), "('lightning')\n", (692, 705), False, 'import logging\n'), ((1340, 1368), 'utils.load_obj', 'load_obj', (['schedps.class_name'], {}), '(schedps.class_name)\n', (1348, 1368), False, 'from utils import collate_fn, load_obj\n')]
|
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
import argparse
import os
import onnx
from detectron2.checkpoint import DetectionCheckpointer
from detectron2.config import get_cfg
from detectron2.data import build_detection_test_loader
from detectron2.evaluation import COCOEvaluator, inference_on_dataset, print_csv_format
from detectron2.export import Caffe2Tracer, add_export_config
from detectron2.modeling import build_model
from detectron2.utils.logger import setup_logger
def setup_cfg(args):
cfg = get_cfg()
# cuda context is initialized before creating dataloader, so we don't fork anymore
cfg.DATALOADER.NUM_WORKERS = 0
cfg = add_export_config(cfg)
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
return cfg
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Convert a model using caffe2 tracing.")
parser.add_argument(
"--format",
choices=["caffe2", "onnx", "torchscript"],
help="output format",
default="caffe2",
)
parser.add_argument("--config-file", default="", metavar="FILE", help="path to config file")
parser.add_argument("--run-eval", action="store_true")
parser.add_argument("--output", help="output directory for the converted model")
parser.add_argument(
"opts",
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER,
)
args = parser.parse_args()
logger = setup_logger()
logger.info("Command line arguments: " + str(args))
os.makedirs(args.output, exist_ok=True)
cfg = setup_cfg(args)
# create a torch model
torch_model = build_model(cfg)
DetectionCheckpointer(torch_model).resume_or_load(cfg.MODEL.WEIGHTS)
# get a sample data
data_loader = build_detection_test_loader(cfg, cfg.DATASETS.TEST[0])
first_batch = next(iter(data_loader))
# convert and save caffe2 model
tracer = Caffe2Tracer(cfg, torch_model, first_batch)
if args.format == "caffe2":
caffe2_model = tracer.export_caffe2()
caffe2_model.save_protobuf(args.output)
# draw the caffe2 graph
caffe2_model.save_graph(os.path.join(args.output, "model.svg"), inputs=first_batch)
elif args.format == "onnx":
onnx_model = tracer.export_onnx()
onnx.save(onnx_model, os.path.join(args.output, "model.onnx"))
elif args.format == "torchscript":
ts_model = tracer.export_torchscript()
ts_model.save(os.path.join(args.output, "model.ts"))
from detectron2.export.torchscript import dump_torchscript_IR
dump_torchscript_IR(ts_model, args.output)
# run evaluation with the converted model
if args.run_eval:
assert args.format == "caffe2", "Python inference in other format is not yet supported."
logger.info("Running evaluation ... this takes a long time if you export to CPU.")
dataset = cfg.DATASETS.TEST[0]
data_loader = build_detection_test_loader(cfg, dataset)
# NOTE: hard-coded evaluator. change to the evaluator for your dataset
evaluator = COCOEvaluator(dataset, output_dir=args.output)
metrics = inference_on_dataset(caffe2_model, data_loader, evaluator)
print_csv_format(metrics)
|
[
"os.makedirs",
"argparse.ArgumentParser",
"detectron2.evaluation.print_csv_format",
"detectron2.modeling.build_model",
"detectron2.utils.logger.setup_logger",
"detectron2.evaluation.COCOEvaluator",
"detectron2.config.get_cfg",
"detectron2.export.Caffe2Tracer",
"detectron2.export.add_export_config",
"detectron2.export.torchscript.dump_torchscript_IR",
"detectron2.data.build_detection_test_loader",
"detectron2.evaluation.inference_on_dataset",
"os.path.join",
"detectron2.checkpoint.DetectionCheckpointer"
] |
[((538, 547), 'detectron2.config.get_cfg', 'get_cfg', ([], {}), '()\n', (545, 547), False, 'from detectron2.config import get_cfg\n'), ((680, 702), 'detectron2.export.add_export_config', 'add_export_config', (['cfg'], {}), '(cfg)\n', (697, 702), False, 'from detectron2.export import Caffe2Tracer, add_export_config\n'), ((854, 930), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Convert a model using caffe2 tracing."""'}), "(description='Convert a model using caffe2 tracing.')\n", (877, 930), False, 'import argparse\n'), ((1538, 1552), 'detectron2.utils.logger.setup_logger', 'setup_logger', ([], {}), '()\n', (1550, 1552), False, 'from detectron2.utils.logger import setup_logger\n'), ((1613, 1652), 'os.makedirs', 'os.makedirs', (['args.output'], {'exist_ok': '(True)'}), '(args.output, exist_ok=True)\n', (1624, 1652), False, 'import os\n'), ((1726, 1742), 'detectron2.modeling.build_model', 'build_model', (['cfg'], {}), '(cfg)\n', (1737, 1742), False, 'from detectron2.modeling import build_model\n'), ((1859, 1913), 'detectron2.data.build_detection_test_loader', 'build_detection_test_loader', (['cfg', 'cfg.DATASETS.TEST[0]'], {}), '(cfg, cfg.DATASETS.TEST[0])\n', (1886, 1913), False, 'from detectron2.data import build_detection_test_loader\n'), ((2006, 2049), 'detectron2.export.Caffe2Tracer', 'Caffe2Tracer', (['cfg', 'torch_model', 'first_batch'], {}), '(cfg, torch_model, first_batch)\n', (2018, 2049), False, 'from detectron2.export import Caffe2Tracer, add_export_config\n'), ((3032, 3073), 'detectron2.data.build_detection_test_loader', 'build_detection_test_loader', (['cfg', 'dataset'], {}), '(cfg, dataset)\n', (3059, 3073), False, 'from detectron2.data import build_detection_test_loader\n'), ((3173, 3219), 'detectron2.evaluation.COCOEvaluator', 'COCOEvaluator', (['dataset'], {'output_dir': 'args.output'}), '(dataset, output_dir=args.output)\n', (3186, 3219), False, 'from detectron2.evaluation import COCOEvaluator, inference_on_dataset, print_csv_format\n'), ((3238, 3296), 'detectron2.evaluation.inference_on_dataset', 'inference_on_dataset', (['caffe2_model', 'data_loader', 'evaluator'], {}), '(caffe2_model, data_loader, evaluator)\n', (3258, 3296), False, 'from detectron2.evaluation import COCOEvaluator, inference_on_dataset, print_csv_format\n'), ((3305, 3330), 'detectron2.evaluation.print_csv_format', 'print_csv_format', (['metrics'], {}), '(metrics)\n', (3321, 3330), False, 'from detectron2.evaluation import COCOEvaluator, inference_on_dataset, print_csv_format\n'), ((1747, 1781), 'detectron2.checkpoint.DetectionCheckpointer', 'DetectionCheckpointer', (['torch_model'], {}), '(torch_model)\n', (1768, 1781), False, 'from detectron2.checkpoint import DetectionCheckpointer\n'), ((2240, 2278), 'os.path.join', 'os.path.join', (['args.output', '"""model.svg"""'], {}), "(args.output, 'model.svg')\n", (2252, 2278), False, 'import os\n'), ((2404, 2443), 'os.path.join', 'os.path.join', (['args.output', '"""model.onnx"""'], {}), "(args.output, 'model.onnx')\n", (2416, 2443), False, 'import os\n'), ((2671, 2713), 'detectron2.export.torchscript.dump_torchscript_IR', 'dump_torchscript_IR', (['ts_model', 'args.output'], {}), '(ts_model, args.output)\n', (2690, 2713), False, 'from detectron2.export.torchscript import dump_torchscript_IR\n'), ((2553, 2590), 'os.path.join', 'os.path.join', (['args.output', '"""model.ts"""'], {}), "(args.output, 'model.ts')\n", (2565, 2590), False, 'import os\n')]
|
#!/usr/bin/env python2
# coding: utf-8
"""Test ONLY_ON_SYMBOLIZED."""
import unittest
from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess
def checkAstIntegrity(instruction):
"""
This function check if all ASTs under an Instruction class are still
available.
"""
try:
for se in instruction.getSymbolicExpressions():
str(se.getAst())
for x, y in instruction.getLoadAccess():
str(y)
for x, y in instruction.getStoreAccess():
str(y)
for x, y in instruction.getReadRegisters():
str(y)
for x, y in instruction.getWrittenRegisters():
str(y)
for x, y in instruction.getReadImmediates():
str(y)
return True
except:
return False
class TestOnlySymbolizedMode(unittest.TestCase):
"""Testing the ONLY_ON_SYMBOLIZED mode."""
def test_1(self):
ctx = TritonContext()
ctx.setArchitecture(ARCH.X86_64)
ctx.setMode(MODE.ONLY_ON_SYMBOLIZED, False)
inst = Instruction(b"\x48\x89\xc3") # mov rbx, rax
self.assertTrue(ctx.processing(inst))
self.assertTrue(checkAstIntegrity(inst))
self.assertEqual(len(inst.getReadRegisters()), 1)
self.assertEqual(len(inst.getWrittenRegisters()), 2)
ctx.setMode(MODE.ONLY_ON_SYMBOLIZED, True)
self.assertTrue(ctx.processing(inst))
self.assertTrue(checkAstIntegrity(inst))
self.assertEqual(len(inst.getReadRegisters()), 0)
self.assertEqual(len(inst.getWrittenRegisters()), 0)
self.assertEqual(len(inst.getLoadAccess()), 0)
self.assertEqual(len(inst.getStoreAccess()), 0)
def test_2(self):
ctx = TritonContext()
ctx.setArchitecture(ARCH.X86_64)
ctx.setMode(MODE.ONLY_ON_SYMBOLIZED, True)
ctx.symbolizeRegister(ctx.registers.rax)
inst = Instruction(b"\x48\x89\xc3") # mov rbx, rax
self.assertTrue(ctx.processing(inst))
self.assertTrue(checkAstIntegrity(inst))
self.assertEqual(len(inst.getReadRegisters()), 1)
self.assertEqual(len(inst.getWrittenRegisters()), 1)
self.assertEqual(len(inst.getLoadAccess()), 0)
self.assertEqual(len(inst.getStoreAccess()), 0)
def test_3(self):
ctx = TritonContext()
ctx.setArchitecture(ARCH.X86_64)
inst = Instruction(b"\x48\x8b\x18") # mov rbx, qword ptr [rax]
self.assertTrue(ctx.processing(inst))
self.assertTrue(checkAstIntegrity(inst))
self.assertEqual(len(inst.getReadRegisters()), 1)
self.assertEqual(len(inst.getWrittenRegisters()), 2)
self.assertEqual(len(inst.getLoadAccess()), 1)
self.assertEqual(len(inst.getStoreAccess()), 0)
def test_4(self):
ctx = TritonContext()
ctx.setArchitecture(ARCH.X86_64)
ctx.setMode(MODE.ONLY_ON_SYMBOLIZED, True)
ctx.symbolizeRegister(ctx.registers.rax)
inst = Instruction(b"\x48\x8b\x18") # mov rbx, qword ptr [rax]
self.assertTrue(ctx.processing(inst))
self.assertTrue(checkAstIntegrity(inst))
self.assertEqual(len(inst.getReadRegisters()), 1)
self.assertEqual(len(inst.getWrittenRegisters()), 0)
self.assertEqual(len(inst.getLoadAccess()), 0)
self.assertEqual(len(inst.getStoreAccess()), 0)
def test_5(self):
ctx = TritonContext()
ctx.setArchitecture(ARCH.X86_64)
ctx.setMode(MODE.ONLY_ON_SYMBOLIZED, True)
ctx.symbolizeMemory(MemoryAccess(0, CPUSIZE.QWORD))
inst = Instruction(b"\x48\x8b\x18") # mov rbx, qword ptr [rax]
self.assertTrue(ctx.processing(inst))
self.assertTrue(checkAstIntegrity(inst))
self.assertEqual(len(inst.getReadRegisters()), 0)
self.assertEqual(len(inst.getWrittenRegisters()), 1)
self.assertEqual(len(inst.getLoadAccess()), 1)
self.assertEqual(len(inst.getStoreAccess()), 0)
def test_6(self):
ctx = TritonContext()
ctx.setArchitecture(ARCH.X86_64)
ctx.setMode(MODE.ONLY_ON_SYMBOLIZED, True)
ctx.symbolizeRegister(ctx.registers.rax)
ctx.symbolizeMemory(MemoryAccess(0, CPUSIZE.QWORD))
inst = Instruction(b"\x48\x8b\x18") # mov rbx, qword ptr [rax]
self.assertTrue(ctx.processing(inst))
self.assertTrue(checkAstIntegrity(inst))
self.assertEqual(len(inst.getReadRegisters()), 1)
self.assertEqual(len(inst.getWrittenRegisters()), 1)
self.assertEqual(len(inst.getLoadAccess()), 1)
self.assertEqual(len(inst.getStoreAccess()), 0)
def test_7(self):
ctx = TritonContext()
ctx.setArchitecture(ARCH.X86_64)
ctx.setMode(MODE.ONLY_ON_SYMBOLIZED, True)
ctx.setConcreteRegisterValue(ctx.registers.rax, 0x1337)
inst = Instruction(b"\x48\x8b\x18") # mov rbx, qword ptr [rax]
self.assertTrue(ctx.processing(inst))
self.assertTrue(checkAstIntegrity(inst))
self.assertEqual(inst.getOperands()[1].getAddress(), 0x1337)
self.assertIsNone(inst.getOperands()[1].getLeaAst())
def test_8(self):
ctx = TritonContext()
ctx.setArchitecture(ARCH.X86_64)
ctx.setMode(MODE.ONLY_ON_SYMBOLIZED, True)
ctx.setConcreteRegisterValue(ctx.registers.rax, 0x1337)
ctx.symbolizeRegister(ctx.registers.rax)
ctx.symbolizeMemory(MemoryAccess(0, CPUSIZE.QWORD))
inst = Instruction(b"\x48\x8b\x18") # mov rbx, qword ptr [rax]
self.assertTrue(ctx.processing(inst))
self.assertTrue(checkAstIntegrity(inst))
self.assertEqual(inst.getOperands()[1].getAddress(), 0x1337)
self.assertIsNotNone(inst.getOperands()[1].getLeaAst())
|
[
"triton.MemoryAccess",
"triton.TritonContext",
"triton.Instruction"
] |
[((955, 970), 'triton.TritonContext', 'TritonContext', ([], {}), '()\n', (968, 970), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((1080, 1105), 'triton.Instruction', 'Instruction', (["b'H\\x89\\xc3'"], {}), "(b'H\\x89\\xc3')\n", (1091, 1105), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((1755, 1770), 'triton.TritonContext', 'TritonContext', ([], {}), '()\n', (1768, 1770), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((1928, 1953), 'triton.Instruction', 'Instruction', (["b'H\\x89\\xc3'"], {}), "(b'H\\x89\\xc3')\n", (1939, 1953), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((2335, 2350), 'triton.TritonContext', 'TritonContext', ([], {}), '()\n', (2348, 2350), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((2408, 2433), 'triton.Instruction', 'Instruction', (["b'H\\x8b\\x18'"], {}), "(b'H\\x8b\\x18')\n", (2419, 2433), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((2827, 2842), 'triton.TritonContext', 'TritonContext', ([], {}), '()\n', (2840, 2842), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((3000, 3025), 'triton.Instruction', 'Instruction', (["b'H\\x8b\\x18'"], {}), "(b'H\\x8b\\x18')\n", (3011, 3025), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((3419, 3434), 'triton.TritonContext', 'TritonContext', ([], {}), '()\n', (3432, 3434), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((3603, 3628), 'triton.Instruction', 'Instruction', (["b'H\\x8b\\x18'"], {}), "(b'H\\x8b\\x18')\n", (3614, 3628), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((4022, 4037), 'triton.TritonContext', 'TritonContext', ([], {}), '()\n', (4035, 4037), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((4255, 4280), 'triton.Instruction', 'Instruction', (["b'H\\x8b\\x18'"], {}), "(b'H\\x8b\\x18')\n", (4266, 4280), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((4674, 4689), 'triton.TritonContext', 'TritonContext', ([], {}), '()\n', (4687, 4689), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((4862, 4887), 'triton.Instruction', 'Instruction', (["b'H\\x8b\\x18'"], {}), "(b'H\\x8b\\x18')\n", (4873, 4887), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((5181, 5196), 'triton.TritonContext', 'TritonContext', ([], {}), '()\n', (5194, 5196), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((5478, 5503), 'triton.Instruction', 'Instruction', (["b'H\\x8b\\x18'"], {}), "(b'H\\x8b\\x18')\n", (5489, 5503), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((3555, 3585), 'triton.MemoryAccess', 'MemoryAccess', (['(0)', 'CPUSIZE.QWORD'], {}), '(0, CPUSIZE.QWORD)\n', (3567, 3585), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((4207, 4237), 'triton.MemoryAccess', 'MemoryAccess', (['(0)', 'CPUSIZE.QWORD'], {}), '(0, CPUSIZE.QWORD)\n', (4219, 4237), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n'), ((5430, 5460), 'triton.MemoryAccess', 'MemoryAccess', (['(0)', 'CPUSIZE.QWORD'], {}), '(0, CPUSIZE.QWORD)\n', (5442, 5460), False, 'from triton import ARCH, MODE, CPUSIZE, TritonContext, Instruction, MemoryAccess\n')]
|
"""
General Setup and Imports
"""
get_ipython().run_line_magic('matplotlib', 'tk')
import matplotlib.pyplot as plt
from bluesky import RunEngine
from bluesky.callbacks.best_effort import BestEffortCallback
from bluesky.plans import *
from bluesky.preprocessors import run_wrapper
from bluesky.utils import install_nb_kicker
from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe
from functools import partial
from ophyd import Device, Component as Cpt
from ophyd.sim import SynAxis, SynSignal
from ophyd.signal import EpicsSignalRO
from bluesky.callbacks import LivePlot
from pswalker.plans import walk_to_pixel
import pcdsdevices
import numpy as np
import random
from bluesky.simulators import summarize_plan
from pcdsdevices.device_types import Newport
import argparse
def centroid_from_motor_cross(motor, motor2, size=640., scale=1., noise_scale = 1, cross_scale = .1):
"""
Find the centroid from the current position of the motor
"""
noise = np.random.normal(scale = noise_scale)
position = motor.position
position2 = motor2.position
centroid = position*scale + position2*cross_scale
# If we are off the screen just return a value of 0.
if centroid < 0. or centroid > size:
return 0.
# Otherwise give the result
else:
return centroid+noise
def plan_simultaneously(x_centroid, y_centroid, x, y, x_target=None, y_target= None):
"""
This BlueSky plan aligns the laser's centroid with the x-ray's centroid.
This plan implements 'walk_to_pixel' from the pswalker (a beam alignment module). The plan uses an iterative procedure to align any beam to a position on a screen, when two motors move the beam along the two axes. Liveplots are updated and show the paths taken to achieve alignment.
Parameters
----------
x_centroid, y_centroid :
These are the x_centroid and y_centroid
x, y:
These are the x_motor and y_motor
x_target, y_target : int
Target value on the x-axis and y-axis
"""
#Create a figure
fig = plt.figure(figsize=(15,10))
fig.subplots_adjust(hspace=0.3, wspace=0.4)
#The first subplot, which plots the y_centroid vs x_centroid
ax1 = fig.add_subplot(2, 2, 1)
ax1.invert_yaxis()
x_centroid_y_centroid = LivePlot(y_centroid.name, x_centroid.name, ax = ax1, marker='x', markersize=7, color='orange')
#The second subplot, which plots the y_centroid and x_centroid with same x-axis (y_motor)
ax2 = fig.add_subplot(2, 2, 3)
ax2.set_ylabel(y_centroid.name, color='red')
ax3 = ax2.twinx()
# ax2.invert_yaxis()
# ax3.invert_yaxis()
ax3.set_ylabel(x_centroid.name, color='blue')
y_plot_y_centroid = LivePlot(y_centroid.name, y.name, ax = ax2, marker='x', markersize=6, color='red')
y_plot_x_centroid = LivePlot(x_centroid.name, y.name, ax = ax3, marker='o', markersize=6, color='blue')
#The third subplot, which plots the y_centroid and x_centroid with same x-axis (x_motor)
ax4 = fig.add_subplot(2, 2, 4)
ax4.set_ylabel(y_centroid.name, color='green')
ax5 = ax4.twinx()
ax5.set_ylabel(x_centroid.name, color='purple')
x_plot_y_centroid = LivePlot(y_centroid.name, x.name, ax = ax4, marker='x', markersize=6, color='green')
x_plot_x_centroid = LivePlot(x_centroid.name, x.name, ax = ax5, marker='o', markersize=6, color='purple')
#Subscribe the plots
token_x_centroid_y_centroid = yield from subscribe('all', x_centroid_y_centroid)
token_y_plot_x_centroid = yield from subscribe('all', y_plot_x_centroid)
token_y_plot_y_centroid = yield from subscribe('all', y_plot_y_centroid)
token_x_plot_x_centroid = yield from subscribe('all', x_plot_x_centroid)
token_x_plot_y_centroid = yield from subscribe('all', x_plot_y_centroid)
#Start a new run
yield from open_run(md={'detectors': [(x_centroid.name), (y_centroid.name)],
'motors': [(x.name), (y.name)],
'hints': {'dimensions': [(x.hints['fields'], 'primary'),
(y.hints['fields'], 'primary')]}})
#Ask for the target values
if x_target is None:
x_target = int(input('Enter the x value: '))
if y_target is None:
y_target = int(input('Enter the y value: '))
#Iteratively move until x_target and x-centroid are within a certain threshold of each other
while True:
if not np.isclose(x_target, x_centroid.get(), atol=3):
yield from walk_to_pixel(x_centroid, x, x_target, first_step=0.1,
target_fields=[x_centroid.name, x.name], tolerance = 3, average = 5,
system=[y, y_centroid])
elif not np.isclose(y_target, y_centroid.get(), atol = 3):
yield from walk_to_pixel(y_centroid, y, y_target, first_step=0.1, tolerance = 3, average = 5,
target_fields=[y_centroid.name, y.name],
system=[x, x_centroid])
else:
break
# plt.show(block=True)
#Close the run
yield from close_run()
#Unsubscribe the plots
yield from unsubscribe(token_x_centroid_y_centroid)
yield from unsubscribe(token_y_plot_x_centroid)
yield from unsubscribe(token_y_plot_y_centroid)
yield from unsubscribe(token_x_plot_x_centroid)
yield from unsubscribe(token_x_plot_y_centroid)
if __name__ == '__main__':
"""
This creates multiple dependencies that users can use when running the Spatial Overlap Scan
"""
parser = argparse.ArgumentParser(description='Spatial overlap of timetool')
parser.add_argument('--sim', action='store_true', default=False, help='Do a simulated scan')
args = parser.parse_args()
# Interactive matplotlib mode
plt.ion()
# Create a RunEngine
RE = RunEngine()
# Use BestEffortCallback for nice vizualizations during scans
bec = BestEffortCallback()
# Install our notebook kicker to have plots update during a scan
install_nb_kicker()
if args.sim:
# Create our motors
x_motor = SynAxis(name='x')
y_motor = SynAxis(name='y')
#Defines relationships between centroids and motors
x_centroid = SynSignal(func=partial(centroid_from_motor_cross, x_motor,y_motor, noise_scale = 1), name='x_syn')
y_centroid = SynSignal(func=partial(centroid_from_motor_cross, y_motor,x_motor), name='y_syn')
print('Running Simulated Scan')
else:
#The Newport motors
x_motor = Newport('XPP:LAS:MMN:13', name = 'real_x')
y_motor = Newport('XPP:LAS:MMN:14', name = 'real_y')
#Readback from actual beamline devices
x_centroid = EpicsSignalRO('XPP:OPAL1K:01:Stats2:CentroidX_RBV', name = 'x_readback')
y_centroid = EpicsSignalRO('XPP:OPAL1K:01:Stats2:CentroidY_RBV', name = 'y_readback')
print('Running Real Scan')
#Executes the plan
RE(plan_simultaneously(x_centroid, y_centroid, x_motor, y_motor), md={'plan_name': 'special'})
print('Spatial Overlap Scan is complete')
"""
Things to fix/consider:
Lose ipython dependency
User can set tolerance(Look at Spatial_Overlap_Scan_Annotated_Dependecoes.py)
Solve edge case:
Limits of the motor motion
"""
|
[
"bluesky.utils.install_nb_kicker",
"bluesky.plan_stubs.close_run",
"ophyd.signal.EpicsSignalRO",
"functools.partial",
"argparse.ArgumentParser",
"bluesky.plan_stubs.unsubscribe",
"bluesky.callbacks.LivePlot",
"bluesky.callbacks.best_effort.BestEffortCallback",
"pswalker.plans.walk_to_pixel",
"matplotlib.pyplot.ion",
"matplotlib.pyplot.figure",
"bluesky.RunEngine",
"bluesky.plan_stubs.subscribe",
"pcdsdevices.device_types.Newport",
"numpy.random.normal",
"bluesky.plan_stubs.open_run",
"ophyd.sim.SynAxis"
] |
[((997, 1032), 'numpy.random.normal', 'np.random.normal', ([], {'scale': 'noise_scale'}), '(scale=noise_scale)\n', (1013, 1032), True, 'import numpy as np\n'), ((2125, 2153), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 10)'}), '(figsize=(15, 10))\n', (2135, 2153), True, 'import matplotlib.pyplot as plt\n'), ((2357, 2453), 'bluesky.callbacks.LivePlot', 'LivePlot', (['y_centroid.name', 'x_centroid.name'], {'ax': 'ax1', 'marker': '"""x"""', 'markersize': '(7)', 'color': '"""orange"""'}), "(y_centroid.name, x_centroid.name, ax=ax1, marker='x', markersize=7,\n color='orange')\n", (2365, 2453), False, 'from bluesky.callbacks import LivePlot\n'), ((2783, 2868), 'bluesky.callbacks.LivePlot', 'LivePlot', (['y_centroid.name', 'y.name'], {'ax': 'ax2', 'marker': '"""x"""', 'markersize': '(6)', 'color': '"""red"""'}), "(y_centroid.name, y.name, ax=ax2, marker='x', markersize=6, color='red'\n )\n", (2791, 2868), False, 'from bluesky.callbacks import LivePlot\n'), ((2890, 2976), 'bluesky.callbacks.LivePlot', 'LivePlot', (['x_centroid.name', 'y.name'], {'ax': 'ax3', 'marker': '"""o"""', 'markersize': '(6)', 'color': '"""blue"""'}), "(x_centroid.name, y.name, ax=ax3, marker='o', markersize=6, color=\n 'blue')\n", (2898, 2976), False, 'from bluesky.callbacks import LivePlot\n'), ((3257, 3344), 'bluesky.callbacks.LivePlot', 'LivePlot', (['y_centroid.name', 'x.name'], {'ax': 'ax4', 'marker': '"""x"""', 'markersize': '(6)', 'color': '"""green"""'}), "(y_centroid.name, x.name, ax=ax4, marker='x', markersize=6, color=\n 'green')\n", (3265, 3344), False, 'from bluesky.callbacks import LivePlot\n'), ((3367, 3455), 'bluesky.callbacks.LivePlot', 'LivePlot', (['x_centroid.name', 'x.name'], {'ax': 'ax5', 'marker': '"""o"""', 'markersize': '(6)', 'color': '"""purple"""'}), "(x_centroid.name, x.name, ax=ax5, marker='o', markersize=6, color=\n 'purple')\n", (3375, 3455), False, 'from bluesky.callbacks import LivePlot\n'), ((5710, 5776), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Spatial overlap of timetool"""'}), "(description='Spatial overlap of timetool')\n", (5733, 5776), False, 'import argparse\n'), ((5952, 5961), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (5959, 5961), True, 'import matplotlib.pyplot as plt\n'), ((5996, 6007), 'bluesky.RunEngine', 'RunEngine', ([], {}), '()\n', (6005, 6007), False, 'from bluesky import RunEngine\n'), ((6084, 6104), 'bluesky.callbacks.best_effort.BestEffortCallback', 'BestEffortCallback', ([], {}), '()\n', (6102, 6104), False, 'from bluesky.callbacks.best_effort import BestEffortCallback\n'), ((6178, 6197), 'bluesky.utils.install_nb_kicker', 'install_nb_kicker', ([], {}), '()\n', (6195, 6197), False, 'from bluesky.utils import install_nb_kicker\n'), ((3530, 3569), 'bluesky.plan_stubs.subscribe', 'subscribe', (['"""all"""', 'x_centroid_y_centroid'], {}), "('all', x_centroid_y_centroid)\n", (3539, 3569), False, 'from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe\n'), ((3611, 3646), 'bluesky.plan_stubs.subscribe', 'subscribe', (['"""all"""', 'y_plot_x_centroid'], {}), "('all', y_plot_x_centroid)\n", (3620, 3646), False, 'from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe\n'), ((3688, 3723), 'bluesky.plan_stubs.subscribe', 'subscribe', (['"""all"""', 'y_plot_y_centroid'], {}), "('all', y_plot_y_centroid)\n", (3697, 3723), False, 'from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe\n'), ((3765, 3800), 'bluesky.plan_stubs.subscribe', 'subscribe', (['"""all"""', 'x_plot_x_centroid'], {}), "('all', x_plot_x_centroid)\n", (3774, 3800), False, 'from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe\n'), ((3842, 3877), 'bluesky.plan_stubs.subscribe', 'subscribe', (['"""all"""', 'x_plot_y_centroid'], {}), "('all', x_plot_y_centroid)\n", (3851, 3877), False, 'from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe\n'), ((3919, 4109), 'bluesky.plan_stubs.open_run', 'open_run', ([], {'md': "{'detectors': [x_centroid.name, y_centroid.name], 'motors': [x.name, y.name\n ], 'hints': {'dimensions': [(x.hints['fields'], 'primary'), (y.hints[\n 'fields'], 'primary')]}}"}), "(md={'detectors': [x_centroid.name, y_centroid.name], 'motors': [x.\n name, y.name], 'hints': {'dimensions': [(x.hints['fields'], 'primary'),\n (y.hints['fields'], 'primary')]}})\n", (3927, 4109), False, 'from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe\n'), ((5230, 5241), 'bluesky.plan_stubs.close_run', 'close_run', ([], {}), '()\n', (5239, 5241), False, 'from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe\n'), ((5284, 5324), 'bluesky.plan_stubs.unsubscribe', 'unsubscribe', (['token_x_centroid_y_centroid'], {}), '(token_x_centroid_y_centroid)\n', (5295, 5324), False, 'from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe\n'), ((5340, 5376), 'bluesky.plan_stubs.unsubscribe', 'unsubscribe', (['token_y_plot_x_centroid'], {}), '(token_y_plot_x_centroid)\n', (5351, 5376), False, 'from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe\n'), ((5392, 5428), 'bluesky.plan_stubs.unsubscribe', 'unsubscribe', (['token_y_plot_y_centroid'], {}), '(token_y_plot_y_centroid)\n', (5403, 5428), False, 'from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe\n'), ((5444, 5480), 'bluesky.plan_stubs.unsubscribe', 'unsubscribe', (['token_x_plot_x_centroid'], {}), '(token_x_plot_x_centroid)\n', (5455, 5480), False, 'from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe\n'), ((5496, 5532), 'bluesky.plan_stubs.unsubscribe', 'unsubscribe', (['token_x_plot_y_centroid'], {}), '(token_x_plot_y_centroid)\n', (5507, 5532), False, 'from bluesky.plan_stubs import open_run, close_run, subscribe, unsubscribe\n'), ((6262, 6279), 'ophyd.sim.SynAxis', 'SynAxis', ([], {'name': '"""x"""'}), "(name='x')\n", (6269, 6279), False, 'from ophyd.sim import SynAxis, SynSignal\n'), ((6298, 6315), 'ophyd.sim.SynAxis', 'SynAxis', ([], {'name': '"""y"""'}), "(name='y')\n", (6305, 6315), False, 'from ophyd.sim import SynAxis, SynSignal\n'), ((6695, 6735), 'pcdsdevices.device_types.Newport', 'Newport', (['"""XPP:LAS:MMN:13"""'], {'name': '"""real_x"""'}), "('XPP:LAS:MMN:13', name='real_x')\n", (6702, 6735), False, 'from pcdsdevices.device_types import Newport\n'), ((6756, 6796), 'pcdsdevices.device_types.Newport', 'Newport', (['"""XPP:LAS:MMN:14"""'], {'name': '"""real_y"""'}), "('XPP:LAS:MMN:14', name='real_y')\n", (6763, 6796), False, 'from pcdsdevices.device_types import Newport\n'), ((6867, 6937), 'ophyd.signal.EpicsSignalRO', 'EpicsSignalRO', (['"""XPP:OPAL1K:01:Stats2:CentroidX_RBV"""'], {'name': '"""x_readback"""'}), "('XPP:OPAL1K:01:Stats2:CentroidX_RBV', name='x_readback')\n", (6880, 6937), False, 'from ophyd.signal import EpicsSignalRO\n'), ((6961, 7031), 'ophyd.signal.EpicsSignalRO', 'EpicsSignalRO', (['"""XPP:OPAL1K:01:Stats2:CentroidY_RBV"""'], {'name': '"""y_readback"""'}), "('XPP:OPAL1K:01:Stats2:CentroidY_RBV', name='y_readback')\n", (6974, 7031), False, 'from ophyd.signal import EpicsSignalRO\n'), ((4602, 4750), 'pswalker.plans.walk_to_pixel', 'walk_to_pixel', (['x_centroid', 'x', 'x_target'], {'first_step': '(0.1)', 'target_fields': '[x_centroid.name, x.name]', 'tolerance': '(3)', 'average': '(5)', 'system': '[y, y_centroid]'}), '(x_centroid, x, x_target, first_step=0.1, target_fields=[\n x_centroid.name, x.name], tolerance=3, average=5, system=[y, y_centroid])\n', (4615, 4750), False, 'from pswalker.plans import walk_to_pixel\n'), ((6412, 6479), 'functools.partial', 'partial', (['centroid_from_motor_cross', 'x_motor', 'y_motor'], {'noise_scale': '(1)'}), '(centroid_from_motor_cross, x_motor, y_motor, noise_scale=1)\n', (6419, 6479), False, 'from functools import partial\n'), ((6532, 6584), 'functools.partial', 'partial', (['centroid_from_motor_cross', 'y_motor', 'x_motor'], {}), '(centroid_from_motor_cross, y_motor, x_motor)\n', (6539, 6584), False, 'from functools import partial\n'), ((4910, 5058), 'pswalker.plans.walk_to_pixel', 'walk_to_pixel', (['y_centroid', 'y', 'y_target'], {'first_step': '(0.1)', 'tolerance': '(3)', 'average': '(5)', 'target_fields': '[y_centroid.name, y.name]', 'system': '[x, x_centroid]'}), '(y_centroid, y, y_target, first_step=0.1, tolerance=3, average\n =5, target_fields=[y_centroid.name, y.name], system=[x, x_centroid])\n', (4923, 5058), False, 'from pswalker.plans import walk_to_pixel\n')]
|
""" VariableTree class definition
"""
import copy
# pylint: disable-msg=E0611,F0401
from enthought.traits.has_traits import FunctionType
from openmdao.main.variable import Variable
from openmdao.main.container import Container
from openmdao.main.datatypes.api import Slot, Str
from openmdao.main.rbac import rbac
from openmdao.main.mp_support import is_instance
class VariableTree(Container):
"""A container of variables with an input or output sense."""
_iotype = Str('')
def __init__(self, iotype='', doc=None):
super(VariableTree, self).__init__(doc=doc)
self._iotype = iotype
self.on_trait_change(self._iotype_modified, '_iotype')
# register callbacks for our class traits
for name, trait in self.class_traits().items():
if not name.startswith('_'):
self.on_trait_change(self._trait_modified, name)
@property
def iotype(self):
return self._iotype
@rbac(('owner', 'user'))
def cpath_updated(self):
if self.parent:
if isinstance(self.parent, VariableTree):
self._iotype = self.parent._iotype
else:
t = self.parent.trait(self.name)
if t and t.iotype:
self._iotype = t.iotype
super(VariableTree, self).cpath_updated()
@rbac(('owner', 'user'))
def get_metadata(self, traitpath, metaname=None):
if metaname == 'iotype':
return self._iotype
elif metaname is None:
meta = super(VariableTree, self).get_metadata(traitpath, metaname)
meta['iotype'] = self._iotype
return meta
else:
return super(VariableTree, self).get_metadata(traitpath, metaname)
def copy(self):
"""Returns a deep copy of this VariableTree."""
return copy.deepcopy(self)
def add(self, name, obj):
if isinstance(obj, VariableTree):
if self.trait(name) is None:
self.add_trait(name, Slot(VariableTree, iotype=obj._iotype))
self.on_trait_change(self._trait_modified, name)
elif not isinstance(obj, Variable):
msg = "a VariableTree may only contain Variables or other " + \
"VariableTrees"
self.raise_exception(msg, TypeError)
return super(VariableTree, self).add(name, obj)
def add_trait(self, name, trait):
super(VariableTree, self).add_trait(name, trait)
if not name.startswith('_'):
self.on_trait_change(self._trait_modified, name)
def remove_trait(self, name):
trait = self.get_trait(name)
# remove the callback
if trait:
self.on_trait_change(self._trait_modified, name, remove=True)
super(VariableTree, self).remove_trait(name)
def list_vars(self):
"""Return a list of Variables in this VariableTree."""
return [k for k in self.__dict__.keys() if not k.startswith('_')]
@rbac(('owner', 'user'))
def invalidate_deps(self, varnames=None, force=False):
return None
def _iotype_modified(self, obj, name, old, new):
for k, v in self.__dict__.items():
if isinstance(v, VariableTree) and v is not self.parent:
v._iotype = new
def _trait_modified(self, obj, name, old, new):
# handle weird traits side-effect from hasattr call
if name == 'trait_added':
return
if isinstance(new, VariableTree):
obj = getattr(self, name)
obj.parent = self
obj._iotype = self._iotype
if self._iotype == 'in':
p = self
while isinstance(p, VariableTree):
vt = p
p = p.parent
# notify parent Component that this VariableTree has been modified
if p is not None:
t = p.trait(vt.name)
if t and t.iotype == 'in':
p._input_trait_modified(p, vt.name, vt, vt)
def get_iotype(self, name):
"""Return the iotype of the Variable with the given name"""
if self.get_trait(name) is None:
self.raise_exception("'%s' not found" % name)
return self._iotype
def _items(self, visited, recurse=False, **metadata):
"""Return an iterator that returns a list of tuples of the form
(rel_pathname, obj) for each trait of this VariableTree that matches
the given metadata. If recurse is True, also iterate through all
child Containers of each Container found.
"""
if id(self) not in visited:
visited.add(id(self))
if 'iotype' in metadata:
meta_io = metadata['iotype']
matches_io = False
if type( meta_io ) is FunctionType:
if meta_io(self._iotype):
matches_io = True
elif meta_io == self._iotype:
matches_io = True
if matches_io:
newdict = metadata.copy()
del newdict['iotype']
else:
matches_io = True
newdict = metadata
if matches_io:
for name, trait in self._alltraits(**newdict).items():
if name.startswith('_'):
continue
obj = getattr(self, name)
yield (name, obj)
if recurse and is_instance(obj, VariableTree) and \
id(obj) not in visited:
for chname, child in obj._items(visited, recurse,
**metadata):
yield ('.'.join([name, chname]), child)
def get_attributes(self, io_only=True):
""" get attributes for this variable tree. Variables may also include
slots. Used by the GUI.
io_only: Bool
Set to true if we only want to populate the input and output
fields of the attributes dictionary."""
attrs = {}
attrs['type'] = type(self).__name__
# Connection information found in parent comp's parent assy
if not self.parent or not self.parent._parent or \
isinstance(self.parent, VariableTree):
connected = []
else:
graph = self.parent._parent._depgraph
if self._iotype == 'in':
connected = graph.get_connected_inputs()
else:
connected = graph.get_connected_outputs()
variables = []
slots = []
for name in self.list_vars():
trait = self.get_trait(name)
meta = self.get_metadata(name)
value = getattr(self, name)
ttype = trait.trait_type
# Each variable type provides its own basic attributes
attr, slot_attr = ttype.get_attribute(name, value, trait, meta)
attr['connected'] = ''
if name in connected:
connections = self.parent._depgraph.connections_to(name)
if self._iotype == 'in':
# there can be only one connection to an input
attr['connected'] = str([src for src, dst in \
connections]).replace('@xin.', '')
else:
attr['connected'] = str([dst for src, dst in \
connections]).replace('@xout.', '')
variables.append(attr)
# Process singleton and contained slots.
if not io_only and slot_attr is not None:
# We can hide slots (e.g., the Workflow slot in drivers)
if 'hidden' not in meta or meta['hidden'] == False:
slots.append(slot_attr)
if self._iotype == 'in':
panel = 'Inputs'
else:
panel = 'Outputs'
attrs[panel] = variables
attrs['Slots'] = slots
return attrs
# register a flattener for Cases
from openmdao.main.case import flatteners, flatten_obj
def _flatten_vartree(name, vt):
ret = []
for n, v in vt._items(set()):
ret.extend([('.'.join([name, k]), v) for k, v in flatten_obj(n, v)])
return ret
flatteners[VariableTree] = _flatten_vartree
|
[
"copy.deepcopy",
"openmdao.main.case.flatten_obj",
"openmdao.main.rbac.rbac",
"openmdao.main.datatypes.api.Str",
"openmdao.main.datatypes.api.Slot",
"openmdao.main.mp_support.is_instance"
] |
[((481, 488), 'openmdao.main.datatypes.api.Str', 'Str', (['""""""'], {}), "('')\n", (484, 488), False, 'from openmdao.main.datatypes.api import Slot, Str\n'), ((967, 990), 'openmdao.main.rbac.rbac', 'rbac', (["('owner', 'user')"], {}), "(('owner', 'user'))\n", (971, 990), False, 'from openmdao.main.rbac import rbac\n'), ((1355, 1378), 'openmdao.main.rbac.rbac', 'rbac', (["('owner', 'user')"], {}), "(('owner', 'user'))\n", (1359, 1378), False, 'from openmdao.main.rbac import rbac\n'), ((3050, 3073), 'openmdao.main.rbac.rbac', 'rbac', (["('owner', 'user')"], {}), "(('owner', 'user'))\n", (3054, 3073), False, 'from openmdao.main.rbac import rbac\n'), ((1859, 1878), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (1872, 1878), False, 'import copy\n'), ((2034, 2072), 'openmdao.main.datatypes.api.Slot', 'Slot', (['VariableTree'], {'iotype': 'obj._iotype'}), '(VariableTree, iotype=obj._iotype)\n', (2038, 2072), False, 'from openmdao.main.datatypes.api import Slot, Str\n'), ((8612, 8629), 'openmdao.main.case.flatten_obj', 'flatten_obj', (['n', 'v'], {}), '(n, v)\n', (8623, 8629), False, 'from openmdao.main.case import flatteners, flatten_obj\n'), ((5596, 5626), 'openmdao.main.mp_support.is_instance', 'is_instance', (['obj', 'VariableTree'], {}), '(obj, VariableTree)\n', (5607, 5626), False, 'from openmdao.main.mp_support import is_instance\n')]
|
import json
import re
import subprocess
from collections import OrderedDict
from collections import deque
# 3rd party libraries
import jinja2
# 1st party libraries
from lookuponterra.graph import Graph, Node, Edge
from lookuponterra.util import OrderedSet
class DotGraph(Graph):
def __init__(self, filename, file_contents=None):
self.filename = filename
self.nodes = []
self.edges = []
self.clusters = OrderedDict()
self.clusters['root'] = True # Used like an ordered Set.
if file_contents:
self.contents = file_contents
else:
with open(filename, 'r') as f:
self.contents = f.read()
# pretty naive way to put a parser together, considering graphviz/dot have a
# bnf grammar. but gets the job done, for now.
edge_fmt_re = re.compile(r'\s+\"(?P<src>.*)\"\s+\-\>(?P<dst>.*)\s+\[(?P<fmt>.*)\]')
edge_re = re.compile(r'\s+\"(?P<src>.*)\"\s+\-\>\s+\"(?P<dst>.*)\"')
decl_re = re.compile(r'\s+\"(?P<node>.*)\"\s+\[(?P<fmt>.*)\]')
# read node and edge declarations from an existing graphviz/dot file.
for l in self.contents.splitlines():
for pat in [edge_fmt_re, edge_re, decl_re]:
m = pat.match(l)
if m:
d = m.groupdict()
fmt = Format(d['fmt']) if 'fmt' in d else Format('')
if 'src' in m.groupdict():
e = DotEdge(d['src'], d['dst'], fmt=fmt)
self.edges.append(e)
elif 'node' in m.groupdict():
self.nodes.append(DotNode(d['node'], fmt=fmt))
break
# terraform graph output doesn't always make explicit node declarations;
# sometimes they're a side-effect of edge definitions. Capture them.
for e in self.edges:
if e.source not in [ n.label for n in self.nodes ]:
self.nodes.append(DotNode(e.source))
if e.target not in [ n.label for n in self.nodes ]:
self.nodes.append(DotNode(e.target))
self.stack('var')
self.stack('output')
# leftover nodes belong to the root subgraph.
for n in self.nodes:
n.cluster = 'root' if not n.cluster else n.cluster
def get_node_by_name(self, label):
'''return node by label (if exists) otherwise simple_name'''
for n in self.nodes:
if n.label == label:
return n
for n in self.nodes:
if n.simple_name == label:
return n
return None
#
# Output functions (return strings).
#
def dot(self):
'returns a dot/graphviz representation of the graph (a string)'
return self.dot_template.render({ 'nodes': self.nodes, 'edges': self.edges, 'clusters' : self.clusters, 'EdgeType' : EdgeType })
def json(self):
edges = [ dict(e) for e in self.edges ]
nodes = [ dict(n) for n in self.nodes ]
return json.dumps({ 'nodes' : nodes, 'edges' : edges }, indent=4, sort_keys=True)
#
# A handful of graph manipulations. These are hampered by the decision
# to not de-serialize the graphs (leaving them as lists of nodes and
# edges). This code is garbage, but it mostly works.
#
def stack(self, node_type, threshold=2):
'''if a group of nodes of type 'type' number as many as 'threshold',
and share the same (single) parent and (single) child, then
hide their dependencies, and create a chain of pseudo-dependencies
so that they stack one above the next in the final diagram.'''
new_edges = []
for n in self.nodes:
if n.type != node_type:
continue
parents = [ e for e in self.edges if e.target == n.label ]
children = [ e for e in self.edges if e.source == n.label ]
if len(children) > 1 or len(parents) != 1:
continue
# setup the cluster.
target = children[0].target if len(children) > 0 else ''
n.cluster = 'cluster' + parents[0].source + '_' + node_type + '_' + target
self.clusters[n.cluster] = True # <-- OrderedDict, used for its ordering. Pretend its a Set
for cluster in [ cluster for cluster in self.clusters.keys() if re.match('.*_' + node_type + '_.*', cluster) ]:
nodes = [ n for n in self.nodes if n.cluster == cluster ]
prev = None
last_edge = None
if len(nodes) == 1:
continue
for n in nodes:
# 1st iteration.
if not prev:
for e in self.edges:
if e.source == n.label:
e.edge_type = EdgeType.HIDDEN
# subsequent iterations.
else:
last_edge = None
for e in self.edges:
if e.target == n.label:
e.edge_type = EdgeType.HIDDEN
if e.source == n.label:
e.edge_type = EdgeType.HIDDEN
last_edge = e
new_edges.append(DotEdge(prev.label, n.label, fmt=Format('style=dashed,arrowhead=none'), edge_type=EdgeType.LAYOUT_SHOWN))
# each iteration.
prev = n
if last_edge:
last_edge.edge_type = EdgeType.NORMAL
self.edges = self.edges + new_edges
def set_module_depth(self, depth):
"""
group resources belonging to modules into a single node, to simplify
presentation. No claims made for this code. It's garbage!
"""
depth += 1 # account for [root] module
def is_too_deep(modules):
if len(modules) >= depth and modules[0] != 'root':
return True
def find_edge(edges, e):
for edge in edges:
if e.source == edge.source and e.target == edge.target and e.edge_type == edge.edge_type:
return True
return False
# find DotNodes at too great a depth.
too_deep = [ n for n in self.nodes if is_too_deep(n.modules) ]
# generate ModuleNodes to stand-in for DotNodes at too great a depth.
placeholders = []
for n in too_deep:
match = False
for p in placeholders:
if p.is_standin(n.modules):
match = True
break
if match == False:
placeholders.append(ModuleNode(n.modules[:depth]))
# create replacement edges
new_edges = []
for e in self.edges:
src_mods = DotNode._label_to_modules(e.source)
tgt_mods = DotNode._label_to_modules(e.target)
if is_too_deep(src_mods) and is_too_deep(tgt_mods):
continue
elif is_too_deep(src_mods):
for p in placeholders:
if p.is_standin(src_mods):
replace = True
for ne in new_edges:
if ne.source == p.label and ne.target == e.target:
replace = False
break
if replace:
new_edges.append(DotEdge(p.label, e.target, fmt=Format('')))
break
elif is_too_deep(tgt_mods):
for p in placeholders:
if p.is_standin(tgt_mods):
replace = True
for ne in new_edges:
if ne.source == e.source and ne.target == p.label:
replace = False
break
if replace:
new_edges.append(DotEdge(e.source, p.label, fmt=Format('')))
break
else:
new_edges.append(e)
# make sure we haven't got any duplicate edges.
final_edges = []
for e in new_edges:
if not find_edge(final_edges, e):
final_edges.append(e)
self.edges = final_edges
# add placeholder nodes, remove nodes beyond specified module_depth.
self.nodes = list(OrderedSet(placeholders) | (OrderedSet(self.nodes) - OrderedSet(too_deep)))
def center(self, node):
'''
prunes graph to include only (1) the given node, (2) its
dependencies, and nodes that depend on it.
'''
edges_by_source = {}
for e in self.edges:
if e.source in edges_by_source:
edges_by_source[e.source].append(e)
else:
edges_by_source[e.source] = [ e ]
edges_by_target = {}
for e in self.edges:
if e.target in edges_by_target:
edges_by_target[e.target].append(e)
else:
edges_by_target[e.target] = [ e ]
edges_to_save = OrderedSet() # edge objects
nodes_to_save = OrderedSet() # label strings
q = deque()
if node.label in edges_by_source:
q.append(node.label)
nodes_to_save.add(node.label)
while len(q) > 0:
source = q.pop()
if source in edges_by_source:
for e in edges_by_source[source]:
q.append(e.target)
edges_to_save.add(e)
nodes_to_save.add(e.target)
q = deque()
if node.label in edges_by_target:
q.append(node.label)
nodes_to_save.add(node.label)
while len(q) > 0:
target = q.pop()
if target in edges_by_target:
for e in edges_by_target[target]:
q.append(e.source)
edges_to_save.add(e)
nodes_to_save.add(e.source)
self.edges = list(edges_to_save)
self.nodes = [ n for n in self.nodes if n.label in nodes_to_save ]
def focus(self, node):
'''
prunes graph to include only the given node and its dependencies.
'''
edges_by_source = {}
for e in self.edges:
if e.source in edges_by_source:
edges_by_source[e.source].append(e)
else:
edges_by_source[e.source] = [ e ]
edges_to_save = OrderedSet() # edge objects
nodes_to_save = OrderedSet() # label strings
q = deque()
if node.label in edges_by_source:
q.append(node.label)
nodes_to_save.add(node.label)
while len(q) > 0:
source = q.pop()
if source in edges_by_source:
for e in edges_by_source[source]:
q.append(e.target)
edges_to_save.add(e)
nodes_to_save.add(e.target)
self.edges = list(edges_to_save)
self.nodes = [ n for n in self.nodes if n.label in nodes_to_save ]
#
# Formatting templates.
#
dot_template_str = """
digraph {
compound = "true"
graph [fontname = "courier new",fontsize=8];
node [fontname = "courier new",fontsize=8];
edge [fontname = "courier new",fontsize=8];
{# just the root module #}
{% for cluster in clusters %}
subgraph "{{cluster}}" {
style=invis;
{% for node in nodes %}
{% if node.cluster == cluster and node.module == 'root' %}
{% if node.type %}
"{{node.label}}" [ shape=none, margin=0, id={{node.svg_id}} label=<<TABLE BORDER="0" CELLBORDER="1" CELLSPACING="0">
<TR><TD>{{node.type}}</TD></TR>
<TR><TD>{{node.resource_name}}</TD></TR>
</TABLE>>];
{% else %}
"{{node.label}}" [{{node.fmt}}]
{% endif %}
{% endif %}
{% endfor %}
}
{% endfor %}
{# non-root modules #}
{% for node in nodes %}
{% if node.module != 'root' %}
{% if node.collapsed %}
"{{node.label}}" [ shape=none, margin=0, id={{node.svg_id}} label=<<TABLE BORDER="0" CELLBORDER="1" CELLSPACING="0">
{% for module in node.modules %}<TR><TD>(M) {{module}}</TD></TR>{% endfor %}
<TR><TD>(collapsed)</TD></TR>
<TR><TD>...</TD></TR>
</TABLE>>];
{% else %}
"{{node.label}}" [ shape=none, margin=0, id={{node.svg_id}} label=<<TABLE BORDER="0" CELLBORDER="1" CELLSPACING="0">
{% for module in node.modules %}<TR><TD>(M) {{module}}</TD></TR>{% endfor %}
<TR><TD>{{node.type}}</TD></TR>
<TR><TD>{{node.resource_name}}</TD></TR>
</TABLE>>];
{% endif %}
{% endif %}
{% endfor %}
{% for edge in edges %}
{% if edge.edge_type == EdgeType.NORMAL %}"{{edge.source}}" -> "{{edge.target}}" {% if edge.fmt %} [{{edge.fmt}}] {% endif %}{% endif %}
{% if edge.edge_type == EdgeType.LAYOUT_SHOWN %}"{{edge.source}}" -> "{{edge.target}}" {% if edge.fmt %} [{{edge.fmt}}] {% endif %}{% endif %}
{% if edge.edge_type == EdgeType.LAYOUT_HIDDEN %}"{{edge.source}}" -> "{{edge.target}}" [style="invis"]{% endif %}
{% endfor %}
}
"""
dot_template = jinja2.Environment(loader=jinja2.BaseLoader()).from_string(dot_template_str)
class Format:
"""
Naive parser for graphviz/dot formatting options.
TBD: method to add/replace format options, rather than exposing self.fmt
"""
def __init__(self, s):
self.fmt = {}
if len(s) > 0:
# doesn't handle '=' or ',' within keys/values, and includes quotation
# marks, rather than stripping them... but sufficient for a subset of dotfiles
# produced by terraform, hopefully.
param_re = re.compile(r'\s*(?P<key>.*)\s*\=(?P<val>.*)')
params = s.split(',')
for p in params:
m = param_re.match(p)
if m:
self.fmt[m.groupdict()['key']] = m.groupdict()['val']
else:
print('Error processing format param: ' + 'p', file=sys.stderr)
def add(self, **kwargs):
self.fmt = {**self.fmt, **kwargs}
def remove(self, key):
if key in self.fmt:
del self.fmt[key]
def __str__(self):
return ','.join([ key + '=' + val for key, val in self.fmt.items() ])
class DotNode(Node):
def __init__(self, label, fmt=None):
self.label = DotNode._label_fixup(label)
self.fmt = fmt if fmt else Format('') # graphviz formatting.
self.simple_name = re.sub(r'\[root\]\s+', '', self.label) # strip root module notation.
self.type = DotNode._resource_type(self.label) # e.g. var, aws_instance, output...
self.resource_name = DotNode._resource_name(self.label) #
self.svg_id = 'node_' + str(Node.svg_id_counter()) #
self.definition = {} #
self.group = 20000 # for coloration. placeholder. replaced in javascript.
self.module = DotNode._module(self.label) # for module groupings. 'root' or 'module.foo.module.bar'
self.cluster = None # for stacked resources (usually var/output).
self.collapsed = False
self.fmt.add(id=self.svg_id, shape='box')
self.modules = [ m for m in self.module.split('.') if m != 'module' ]
def __iter__(self):
for key in {'label', 'simple_name', 'type', 'resource_name', 'group', 'svg_id', 'definition', 'cluster', 'module', 'modules'}:
yield (key, getattr(self, key))
#
# static utilities mostly for converting "labels"--which uniquely identify
# DotNodes--to other useful things like a list of parent modules, the isolated
# resource name, the resource type, etc.
#
@staticmethod
def _label_fixup(label):
# fix the resources belonging to removed modules by naming them "removed."
return re.sub(r'\s+\(removed\)', r'.removed (removed)', label)
@staticmethod
def _resource_type(label):
m = re.match(r'(\[root\]\s+)*((?P<modprefix>\S+)\.)*(?P<type>\S+)\.\S+', label)
return m.groupdict()['type'] if m else ''
@staticmethod
def _resource_name(label):
m = re.match(r'(\[root\]\s+)*(?P<type>\S+)\.(?P<name>\S+)', label)
return m.groupdict()['name'] if m else ''
@staticmethod
def _module(label):
try:
if not re.match(r'(\[root\]\s+)*module\..*', label):
return 'root'
m = re.match(r'(\[root\]\s+)*(?P<module>\S+)\.(?P<type>\S+)\.?\S+', label)
return m.groupdict()['module']
except:
raise Exception("None: ", label)
@staticmethod
def _label_to_modules(label):
return [ m for m in DotNode._module(label).split('.') if m != 'module' ]
class ModuleNode(DotNode):
'''
Stands in for multiple DotNodes at the same module depth...
'''
def __init__(self, modules):
self.label = '[root] ' + 'module.' + '.module.'.join(modules) + '.collapsed.etc'
self.fmt = Format('')
self.simple_name = re.sub(r'\[root\]\s+', '', self.label) # strip root module notation.
self.type = DotNode._resource_type(self.label)
self.resource_name = DotNode._resource_name(self.label)
self.svg_id = 'node_' + str(Node.svg_id_counter())
self.definition = {}
self.group = 20000 # for coloration. placeholder. replaced in javascript.
self.module = DotNode._module(self.label) # for module groupings. 'root' or 'module.foo.module.bar'
self.cluster = None # for stacked resources (usually var/output).
self.modules = [ m for m in self.module.split('.') if m != 'module' ]
self.collapsed = True
self.fmt.add(id=self.svg_id, shape='box')
def is_standin(self, modules):
'should this ModuleNode standin for the provided DotNode?'
if len(modules) < len(self.modules):
return False
for i in range(len(self.modules)):
if self.modules[i] != modules[i]:
return False
return True
class EdgeType:
'''
Sometimes we want to hide edges, and sometimes we want to add
edges in order to influence layout.
'''
NORMAL = 1 # what we talk about when we're talking about edges.
HIDDEN = 2 # these are normal edges, but aren't drawn.
LAYOUT_SHOWN = 3 # these edges are drawn, but aren't "real" edges
LAYOUT_HIDDEN = 4 # these edges are not drawn, aren't "real" edges, but inform layout.
def __init__(self):
pass
class DotEdge(Edge):
'''
Distinguished from a Regular Edge, by its Dot language format string.
'''
def __init__(self, source, target, fmt=None, edge_type=EdgeType.NORMAL):
self.source = DotNode._label_fixup(source)
self.target = DotNode._label_fixup(target)
self.svg_id = 'edge_' + str(Edge.svg_id_counter())
self.fmt = fmt
self.edge_type = edge_type
self.fmt.add(id=self.svg_id)
def __iter__(self):
for key in {'source', 'target', 'svg_id', 'edge_type'}:
yield (key, getattr(self, key))
|
[
"lookuponterra.util.OrderedSet",
"json.dumps",
"re.match",
"lookuponterra.graph.Node.svg_id_counter",
"lookuponterra.graph.Edge.svg_id_counter",
"jinja2.BaseLoader",
"collections.OrderedDict",
"re.sub",
"collections.deque",
"re.compile"
] |
[((449, 462), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (460, 462), False, 'from collections import OrderedDict\n'), ((866, 943), 're.compile', 're.compile', (['"""\\\\s+\\\\"(?P<src>.*)\\\\"\\\\s+\\\\-\\\\>(?P<dst>.*)\\\\s+\\\\[(?P<fmt>.*)\\\\]"""'], {}), '(\'\\\\s+\\\\"(?P<src>.*)\\\\"\\\\s+\\\\-\\\\>(?P<dst>.*)\\\\s+\\\\[(?P<fmt>.*)\\\\]\')\n', (876, 943), False, 'import re\n'), ((954, 1020), 're.compile', 're.compile', (['"""\\\\s+\\\\"(?P<src>.*)\\\\"\\\\s+\\\\-\\\\>\\\\s+\\\\"(?P<dst>.*)\\\\\\""""'], {}), '(\'\\\\s+\\\\"(?P<src>.*)\\\\"\\\\s+\\\\-\\\\>\\\\s+\\\\"(?P<dst>.*)\\\\"\')\n', (964, 1020), False, 'import re\n'), ((1031, 1088), 're.compile', 're.compile', (['"""\\\\s+\\\\"(?P<node>.*)\\\\"\\\\s+\\\\[(?P<fmt>.*)\\\\]"""'], {}), '(\'\\\\s+\\\\"(?P<node>.*)\\\\"\\\\s+\\\\[(?P<fmt>.*)\\\\]\')\n', (1041, 1088), False, 'import re\n'), ((3101, 3171), 'json.dumps', 'json.dumps', (["{'nodes': nodes, 'edges': edges}"], {'indent': '(4)', 'sort_keys': '(True)'}), "({'nodes': nodes, 'edges': edges}, indent=4, sort_keys=True)\n", (3111, 3171), False, 'import json\n'), ((9253, 9265), 'lookuponterra.util.OrderedSet', 'OrderedSet', ([], {}), '()\n', (9263, 9265), False, 'from lookuponterra.util import OrderedSet\n'), ((9305, 9317), 'lookuponterra.util.OrderedSet', 'OrderedSet', ([], {}), '()\n', (9315, 9317), False, 'from lookuponterra.util import OrderedSet\n'), ((9347, 9354), 'collections.deque', 'deque', ([], {}), '()\n', (9352, 9354), False, 'from collections import deque\n'), ((9788, 9795), 'collections.deque', 'deque', ([], {}), '()\n', (9793, 9795), False, 'from collections import deque\n'), ((10706, 10718), 'lookuponterra.util.OrderedSet', 'OrderedSet', ([], {}), '()\n', (10716, 10718), False, 'from lookuponterra.util import OrderedSet\n'), ((10758, 10770), 'lookuponterra.util.OrderedSet', 'OrderedSet', ([], {}), '()\n', (10768, 10770), False, 'from lookuponterra.util import OrderedSet\n'), ((10800, 10807), 'collections.deque', 'deque', ([], {}), '()\n', (10805, 10807), False, 'from collections import deque\n'), ((15210, 15250), 're.sub', 're.sub', (['"""\\\\[root\\\\]\\\\s+"""', '""""""', 'self.label'], {}), "('\\\\[root\\\\]\\\\s+', '', self.label)\n", (15216, 15250), False, 'import re\n'), ((16576, 16632), 're.sub', 're.sub', (['"""\\\\s+\\\\(removed\\\\)"""', '""".removed (removed)"""', 'label'], {}), "('\\\\s+\\\\(removed\\\\)', '.removed (removed)', label)\n", (16582, 16632), False, 'import re\n'), ((16694, 16780), 're.match', 're.match', (['"""(\\\\[root\\\\]\\\\s+)*((?P<modprefix>\\\\S+)\\\\.)*(?P<type>\\\\S+)\\\\.\\\\S+"""', 'label'], {}), "('(\\\\[root\\\\]\\\\s+)*((?P<modprefix>\\\\S+)\\\\.)*(?P<type>\\\\S+)\\\\.\\\\S+',\n label)\n", (16702, 16780), False, 'import re\n'), ((16882, 16949), 're.match', 're.match', (['"""(\\\\[root\\\\]\\\\s+)*(?P<type>\\\\S+)\\\\.(?P<name>\\\\S+)"""', 'label'], {}), "('(\\\\[root\\\\]\\\\s+)*(?P<type>\\\\S+)\\\\.(?P<name>\\\\S+)', label)\n", (16890, 16949), False, 'import re\n'), ((17762, 17802), 're.sub', 're.sub', (['"""\\\\[root\\\\]\\\\s+"""', '""""""', 'self.label'], {}), "('\\\\[root\\\\]\\\\s+', '', self.label)\n", (17768, 17802), False, 'import re\n'), ((14364, 14411), 're.compile', 're.compile', (['"""\\\\s*(?P<key>.*)\\\\s*\\\\=(?P<val>.*)"""'], {}), "('\\\\s*(?P<key>.*)\\\\s*\\\\=(?P<val>.*)')\n", (14374, 14411), False, 'import re\n'), ((17162, 17239), 're.match', 're.match', (['"""(\\\\[root\\\\]\\\\s+)*(?P<module>\\\\S+)\\\\.(?P<type>\\\\S+)\\\\.?\\\\S+"""', 'label'], {}), "('(\\\\[root\\\\]\\\\s+)*(?P<module>\\\\S+)\\\\.(?P<type>\\\\S+)\\\\.?\\\\S+', label)\n", (17170, 17239), False, 'import re\n'), ((4441, 4485), 're.match', 're.match', (["('.*_' + node_type + '_.*')", 'cluster'], {}), "('.*_' + node_type + '_.*', cluster)\n", (4449, 4485), False, 'import re\n'), ((8531, 8555), 'lookuponterra.util.OrderedSet', 'OrderedSet', (['placeholders'], {}), '(placeholders)\n', (8541, 8555), False, 'from lookuponterra.util import OrderedSet\n'), ((15491, 15512), 'lookuponterra.graph.Node.svg_id_counter', 'Node.svg_id_counter', ([], {}), '()\n', (15510, 15512), False, 'from lookuponterra.graph import Graph, Node, Edge\n'), ((17070, 17117), 're.match', 're.match', (['"""(\\\\[root\\\\]\\\\s+)*module\\\\..*"""', 'label'], {}), "('(\\\\[root\\\\]\\\\s+)*module\\\\..*', label)\n", (17078, 17117), False, 'import re\n'), ((18005, 18026), 'lookuponterra.graph.Node.svg_id_counter', 'Node.svg_id_counter', ([], {}), '()\n', (18024, 18026), False, 'from lookuponterra.graph import Graph, Node, Edge\n'), ((19641, 19662), 'lookuponterra.graph.Edge.svg_id_counter', 'Edge.svg_id_counter', ([], {}), '()\n', (19660, 19662), False, 'from lookuponterra.graph import Graph, Node, Edge\n'), ((8559, 8581), 'lookuponterra.util.OrderedSet', 'OrderedSet', (['self.nodes'], {}), '(self.nodes)\n', (8569, 8581), False, 'from lookuponterra.util import OrderedSet\n'), ((8584, 8604), 'lookuponterra.util.OrderedSet', 'OrderedSet', (['too_deep'], {}), '(too_deep)\n', (8594, 8604), False, 'from lookuponterra.util import OrderedSet\n'), ((13812, 13831), 'jinja2.BaseLoader', 'jinja2.BaseLoader', ([], {}), '()\n', (13829, 13831), False, 'import jinja2\n')]
|
# tests for things that are not implemented, or have non-compliant behaviour
import array
# array deletion not implemented
try:
a = array.array('b', (1, 2, 3))
del a[1]
except TypeError:
print('TypeError')
# slice with step!=1 not implemented
try:
a = array.array('b', (1, 2, 3))
print(a[3:2:2])
except NotImplementedError:
print('NotImplementedError')
# should raise type error
try:
print(set('12') >= '1')
except TypeError:
print('TypeError')
# should raise type error
try:
print(set('12') <= '123')
except TypeError:
print('TypeError')
# uPy raises TypeError, shold be ValueError
try:
'%c' % b'\x01\x02'
except (TypeError, ValueError):
print('TypeError, ValueError')
# attributes/subscr not implemented
try:
print('{a[0]}'.format(a=[1, 2]))
except NotImplementedError:
print('NotImplementedError')
# str(...) with keywords not implemented
try:
str(b'abc', encoding='utf8')
except NotImplementedError:
print('NotImplementedError')
# str.rsplit(None, n) not implemented
try:
'a a a'.rsplit(None, 1)
except NotImplementedError:
print('NotImplementedError')
# str.endswith(s, start) not implemented
try:
'abc'.endswith('c', 1)
except NotImplementedError:
print('NotImplementedError')
# bytes(...) with keywords not implemented
try:
bytes('abc', encoding='utf8')
except NotImplementedError:
print('NotImplementedError')
# bytes subscr with step!=1 not implemented
try:
b'123'[0:3:2]
except NotImplementedError:
print('NotImplementedError')
mpz = 1 << 70
# mpz and with both args negative
try:
-mpz & -2
except NotImplementedError:
print('NotImplementedError')
# mpz or with args opposite sign
try:
-mpz | 2
except NotImplementedError:
print('NotImplementedError')
# mpz xor with args opposite sign
try:
-mpz ^ 2
except NotImplementedError:
print('NotImplementedError')
|
[
"array.array"
] |
[((138, 165), 'array.array', 'array.array', (['"""b"""', '(1, 2, 3)'], {}), "('b', (1, 2, 3))\n", (149, 165), False, 'import array\n'), ((271, 298), 'array.array', 'array.array', (['"""b"""', '(1, 2, 3)'], {}), "('b', (1, 2, 3))\n", (282, 298), False, 'import array\n')]
|
import emcee
import numpy as np
from astropy.io import fits
from pylinear.utilities import indices,pool
def mp_mcmcUncertainty(A,bi,func,conf):
if A is None or bi is None:
return None,None,None
ndim=1
p0=[]
nwalkers=conf['nwalkers']
for i in range(nwalkers):
p0.append(np.array([func*2.*np.random.randn()]))
cindex=0
sampler=emcee.EnsembleSampler(nwalkers,ndim,lnlike,args=(A,bi))
#sampler=emcee.MHSampler(cov,ndim,lnlike,args=(A,bi))
sampler.run_mcmc(p0,conf['nstep'])
nburn=int(conf['burn']*conf['nstep'])
samples=sampler.chain[:,nburn:,:].reshape((-1,1))
ss=np.std(samples,axis=0)
ll=np.percentile(samples,31.7,axis=0)
aa=np.percentile(samples,50.0,axis=0)
hh=np.percentile(samples,68.3,axis=0)
lo=aa[0]-ll[0]
hi=hh[0]-aa[0]
sig=ss[0]
return lo,hi,sig
def lnlike(x,A,bi):
resid=bi-A.matvec(x)
lnl=-0.5*np.sum(resid*resid)
return lnl
def mcmcStart(data,mat,resid,conf):
return mp_mcmcUncertainty(*mat.residualMatrix(data[0],resid),data[1],conf)
def mcmcUncertainties(conf,mat,result):
if not conf['perform']:
return result
print('[info]Computing MCMC uncertainties')
# compute the residuals
resid=mat.bi-mat.A.matvec(result.x)
# set up the iterates
iters=[(j,f) for j,f in enumerate(result.lo)]
# do the processing
p=pool.Pool(ncpu=conf['cpu']['ncpu'])
unc=p(mcmcStart,iters,mat,resid,conf,prefix='Running MCMC')
# package the outputs
unc=list(zip(*unc))
result.lo=np.array(unc[0])
result.hi=np.array(unc[1])
del unc
return result
|
[
"numpy.sum",
"numpy.random.randn",
"emcee.EnsembleSampler",
"numpy.std",
"numpy.percentile",
"pylinear.utilities.pool.Pool",
"numpy.array"
] |
[((388, 447), 'emcee.EnsembleSampler', 'emcee.EnsembleSampler', (['nwalkers', 'ndim', 'lnlike'], {'args': '(A, bi)'}), '(nwalkers, ndim, lnlike, args=(A, bi))\n', (409, 447), False, 'import emcee\n'), ((662, 685), 'numpy.std', 'np.std', (['samples'], {'axis': '(0)'}), '(samples, axis=0)\n', (668, 685), True, 'import numpy as np\n'), ((692, 728), 'numpy.percentile', 'np.percentile', (['samples', '(31.7)'], {'axis': '(0)'}), '(samples, 31.7, axis=0)\n', (705, 728), True, 'import numpy as np\n'), ((734, 770), 'numpy.percentile', 'np.percentile', (['samples', '(50.0)'], {'axis': '(0)'}), '(samples, 50.0, axis=0)\n', (747, 770), True, 'import numpy as np\n'), ((776, 812), 'numpy.percentile', 'np.percentile', (['samples', '(68.3)'], {'axis': '(0)'}), '(samples, 68.3, axis=0)\n', (789, 812), True, 'import numpy as np\n'), ((1432, 1467), 'pylinear.utilities.pool.Pool', 'pool.Pool', ([], {'ncpu': "conf['cpu']['ncpu']"}), "(ncpu=conf['cpu']['ncpu'])\n", (1441, 1467), False, 'from pylinear.utilities import indices, pool\n'), ((1597, 1613), 'numpy.array', 'np.array', (['unc[0]'], {}), '(unc[0])\n', (1605, 1613), True, 'import numpy as np\n'), ((1628, 1644), 'numpy.array', 'np.array', (['unc[1]'], {}), '(unc[1])\n', (1636, 1644), True, 'import numpy as np\n'), ((949, 970), 'numpy.sum', 'np.sum', (['(resid * resid)'], {}), '(resid * resid)\n', (955, 970), True, 'import numpy as np\n'), ((333, 350), 'numpy.random.randn', 'np.random.randn', ([], {}), '()\n', (348, 350), True, 'import numpy as np\n')]
|
import numpy as np
import heapq
import tensorflow as tf
from sklearn.metrics import roc_auc_score
from layers import Dense, CrossCompressUnit
import metrics
def test_one_user(x, train_items, test_items, item_num, Ks):
rating, u = x[0], x[1]
training_items = train_items[u] if u in train_items else []
user_pos_test = test_items[u]
all_items = set(range(item_num))
test_items = list(all_items - set(training_items))
r, auc = ranklist_by_sorted(user_pos_test, test_items, rating, Ks)
return get_performance(user_pos_test, r, auc, Ks)
def ranklist_by_sorted(user_pos_test, test_items, rating, Ks):
item_score = {}
for i in test_items:
item_score[i] = rating[i]
K_max = max(Ks)
K_max_item_score = heapq.nlargest(K_max, item_score, key=item_score.get)
r = []
for i in K_max_item_score:
if i in user_pos_test:
r.append(1)
else:
r.append(0)
auc = get_auc(item_score, user_pos_test)
return r, auc
def get_performance(user_pos_test, r, auc, Ks):
precision, recall, ndcg, hit_ratio = [], [], [], []
for K in Ks:
precision.append(metrics.precision_at_k(r, K))
recall.append(metrics.recall_at_k(r, K, len(user_pos_test)))
ndcg.append(metrics.ndcg_at_k(r, K))
hit_ratio.append(metrics.hit_at_k(r, K))
return {'recall': np.array(recall), 'precision': np.array(precision),
'ndcg': np.array(ndcg), 'hit_ratio': np.array(hit_ratio), 'auc': auc}
def get_auc(item_score, user_pos_test):
item_score = sorted(item_score.items(), key=lambda kv: kv[1])
item_score.reverse()
item_sort = [x[0] for x in item_score]
posterior = [x[1] for x in item_score]
r = []
for i in item_sort:
if i in user_pos_test:
r.append(1)
else:
r.append(0)
auc = metrics.auc(ground_truth=r, prediction=posterior)
return auc
class MKR(object):
def __init__(self, args, n_users, n_items, n_entities, n_relations):
self._parse_args(n_users, n_items, n_entities, n_relations)
self._build_inputs()
self._build_model(args)
self._build_loss(args)
self._build_train(args)
def _parse_args(self, n_users, n_items, n_entities, n_relations):
self.n_user = n_users
self.n_item = n_items
self.n_entity = n_entities
self.n_relation = n_relations
# for computing l2 loss
self.vars_rs = []
self.vars_kge = []
def _build_inputs(self):
self.user_indices = tf.placeholder(tf.int32, [None], 'user_indices')
self.item_indices = tf.placeholder(tf.int32, [None], 'item_indices')
self.labels = tf.placeholder(tf.float32, [None], 'labels')
def _build_model(self, args):
self._build_low_layers(args)
self._build_high_layers(args)
def _build_low_layers(self, args):
self.user_emb_matrix = tf.get_variable('user_emb_matrix', [self.n_user, args.dim])
self.item_emb_matrix = tf.get_variable('item_emb_matrix', [self.n_item, args.dim])
# [batch_size, dim]
self.user_embeddings = tf.nn.embedding_lookup(self.user_emb_matrix, self.user_indices)
self.item_embeddings = tf.nn.embedding_lookup(self.item_emb_matrix, self.item_indices)
for _ in range(args.L):
user_mlp = Dense(input_dim=args.dim, output_dim=args.dim)
item_mlp = Dense(input_dim=args.dim, output_dim=args.dim)
self.user_embeddings = user_mlp(self.user_embeddings)
self.item_embeddings = item_mlp(self.item_embeddings)
self.vars_rs.extend(user_mlp.vars)
self.vars_rs.extend(item_mlp.vars)
def _build_high_layers(self, args):
# RS
use_inner_product = True
if use_inner_product:
# [batch_size]
self.scores = tf.reduce_sum(self.user_embeddings * self.item_embeddings, axis=1)
else:
# [batch_size, dim * 2]
self.user_item_concat = tf.concat([self.user_embeddings, self.item_embeddings], axis=1)
for _ in range(args.H - 1):
rs_mlp = Dense(input_dim=args.dim * 2, output_dim=args.dim * 2)
# [batch_size, dim * 2]
self.user_item_concat = rs_mlp(self.user_item_concat)
self.vars_rs.extend(rs_mlp.vars)
rs_pred_mlp = Dense(input_dim=args.dim * 2, output_dim=1)
# [batch_size]
self.scores = tf.squeeze(rs_pred_mlp(self.user_item_concat))
self.vars_rs.extend(rs_pred_mlp.vars)
self.scores_normalized = tf.nn.sigmoid(self.scores)
def _build_loss(self, args):
# RS
self.base_loss_rs = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(labels=self.labels, logits=self.scores))
self.l2_loss_rs = tf.nn.l2_loss(self.user_embeddings) + tf.nn.l2_loss(self.item_embeddings)
for var in self.vars_rs:
self.l2_loss_rs += tf.nn.l2_loss(var)
self.loss_rs = self.base_loss_rs + self.l2_loss_rs * args.l2_weight
def _build_train(self, args):
# TODO: better optimizer?
self.optimizer_rs = tf.train.AdamOptimizer(args.lr_rs).minimize(self.loss_rs)
def train_rs(self, sess, feed_dict):
return sess.run([self.optimizer_rs, self.loss_rs], feed_dict)
def eval(self, sess, feed_dict):
labels, scores = sess.run([self.labels, self.scores_normalized], feed_dict)
auc = roc_auc_score(y_true=labels, y_score=scores)
predictions = [1 if i >= 0.5 else 0 for i in scores]
true_positives = sum([1 if p == 1 and l == 1 else 0 for p, l in zip(predictions, labels)])
precision = true_positives / sum(predictions)
recall = true_positives / sum(labels)
f1 = 2 * precision * recall / (precision + recall)
acc = np.mean(np.equal(predictions, labels))
return auc, acc, precision, recall, f1
def calc_ndcg(self, sess, model, train_data, test_data, batch_size):
Ks = [20, 40, 60, 80, 100]
result = {'precision': np.zeros(len(Ks)), 'recall': np.zeros(len(Ks)), 'ndcg': np.zeros(len(Ks)),
'hit_ratio': np.zeros(len(Ks)), 'auc': 0.}
item_num = max(np.max(train_data[:, 1]), np.max(test_data[:, 1]))
test_users = []
train_items, test_items = {}, {}
for uid, iid, label in train_data:
if label == 1:
if uid not in train_items:
train_items[uid] = []
train_items[uid].append(iid)
for uid, iid, label in test_data:
if label == 1:
if uid not in test_items:
test_items[uid] = []
test_items[uid].append(iid)
if uid not in test_users:
test_users.append(uid)
n_test_users = len(test_users)
n_item_batchs = item_num // batch_size + 1
for i, uid in enumerate(test_users):
if (i + 1) % 500 == 0:
print("user:::", i, '/', len(test_users))
item_batch = range(item_num)
feed_dict = {model.user_indices: [uid] * item_num,
model.item_indices: item_batch,
model.labels: [1] * item_num,
model.head_indices: item_batch}
rate_batch = sess.run(self.scores_normalized, feed_dict)
re = test_one_user([rate_batch, uid], train_items, test_items, item_num, Ks)
result['precision'] += re['precision']/n_test_users
result['recall'] += re['recall']/n_test_users
result['ndcg'] += re['ndcg']/n_test_users
result['hit_ratio'] += re['hit_ratio']/n_test_users
result['auc'] += re['auc']/n_test_users
return result
def get_scores(self, sess, feed_dict):
return sess.run([self.item_indices, self.scores_normalized], feed_dict)
|
[
"tensorflow.reduce_sum",
"tensorflow.nn.sigmoid_cross_entropy_with_logits",
"tensorflow.get_variable",
"metrics.ndcg_at_k",
"metrics.auc",
"tensorflow.concat",
"heapq.nlargest",
"numpy.equal",
"tensorflow.placeholder",
"numpy.max",
"tensorflow.nn.embedding_lookup",
"sklearn.metrics.roc_auc_score",
"metrics.precision_at_k",
"layers.Dense",
"metrics.hit_at_k",
"tensorflow.nn.sigmoid",
"numpy.array",
"tensorflow.nn.l2_loss",
"tensorflow.train.AdamOptimizer"
] |
[((749, 802), 'heapq.nlargest', 'heapq.nlargest', (['K_max', 'item_score'], {'key': 'item_score.get'}), '(K_max, item_score, key=item_score.get)\n', (763, 802), False, 'import heapq\n'), ((1857, 1906), 'metrics.auc', 'metrics.auc', ([], {'ground_truth': 'r', 'prediction': 'posterior'}), '(ground_truth=r, prediction=posterior)\n', (1868, 1906), False, 'import metrics\n'), ((1366, 1382), 'numpy.array', 'np.array', (['recall'], {}), '(recall)\n', (1374, 1382), True, 'import numpy as np\n'), ((1397, 1416), 'numpy.array', 'np.array', (['precision'], {}), '(precision)\n', (1405, 1416), True, 'import numpy as np\n'), ((1438, 1452), 'numpy.array', 'np.array', (['ndcg'], {}), '(ndcg)\n', (1446, 1452), True, 'import numpy as np\n'), ((1467, 1486), 'numpy.array', 'np.array', (['hit_ratio'], {}), '(hit_ratio)\n', (1475, 1486), True, 'import numpy as np\n'), ((2555, 2603), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32', '[None]', '"""user_indices"""'], {}), "(tf.int32, [None], 'user_indices')\n", (2569, 2603), True, 'import tensorflow as tf\n'), ((2632, 2680), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32', '[None]', '"""item_indices"""'], {}), "(tf.int32, [None], 'item_indices')\n", (2646, 2680), True, 'import tensorflow as tf\n'), ((2703, 2747), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None]', '"""labels"""'], {}), "(tf.float32, [None], 'labels')\n", (2717, 2747), True, 'import tensorflow as tf\n'), ((2929, 2988), 'tensorflow.get_variable', 'tf.get_variable', (['"""user_emb_matrix"""', '[self.n_user, args.dim]'], {}), "('user_emb_matrix', [self.n_user, args.dim])\n", (2944, 2988), True, 'import tensorflow as tf\n'), ((3020, 3079), 'tensorflow.get_variable', 'tf.get_variable', (['"""item_emb_matrix"""', '[self.n_item, args.dim]'], {}), "('item_emb_matrix', [self.n_item, args.dim])\n", (3035, 3079), True, 'import tensorflow as tf\n'), ((3140, 3203), 'tensorflow.nn.embedding_lookup', 'tf.nn.embedding_lookup', (['self.user_emb_matrix', 'self.user_indices'], {}), '(self.user_emb_matrix, self.user_indices)\n', (3162, 3203), True, 'import tensorflow as tf\n'), ((3235, 3298), 'tensorflow.nn.embedding_lookup', 'tf.nn.embedding_lookup', (['self.item_emb_matrix', 'self.item_indices'], {}), '(self.item_emb_matrix, self.item_indices)\n', (3257, 3298), True, 'import tensorflow as tf\n'), ((4619, 4645), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['self.scores'], {}), '(self.scores)\n', (4632, 4645), True, 'import tensorflow as tf\n'), ((5492, 5536), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', ([], {'y_true': 'labels', 'y_score': 'scores'}), '(y_true=labels, y_score=scores)\n', (5505, 5536), False, 'from sklearn.metrics import roc_auc_score\n'), ((1150, 1178), 'metrics.precision_at_k', 'metrics.precision_at_k', (['r', 'K'], {}), '(r, K)\n', (1172, 1178), False, 'import metrics\n'), ((1269, 1292), 'metrics.ndcg_at_k', 'metrics.ndcg_at_k', (['r', 'K'], {}), '(r, K)\n', (1286, 1292), False, 'import metrics\n'), ((1319, 1341), 'metrics.hit_at_k', 'metrics.hit_at_k', (['r', 'K'], {}), '(r, K)\n', (1335, 1341), False, 'import metrics\n'), ((3355, 3401), 'layers.Dense', 'Dense', ([], {'input_dim': 'args.dim', 'output_dim': 'args.dim'}), '(input_dim=args.dim, output_dim=args.dim)\n', (3360, 3401), False, 'from layers import Dense, CrossCompressUnit\n'), ((3425, 3471), 'layers.Dense', 'Dense', ([], {'input_dim': 'args.dim', 'output_dim': 'args.dim'}), '(input_dim=args.dim, output_dim=args.dim)\n', (3430, 3471), False, 'from layers import Dense, CrossCompressUnit\n'), ((3869, 3935), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(self.user_embeddings * self.item_embeddings)'], {'axis': '(1)'}), '(self.user_embeddings * self.item_embeddings, axis=1)\n', (3882, 3935), True, 'import tensorflow as tf\n'), ((4022, 4085), 'tensorflow.concat', 'tf.concat', (['[self.user_embeddings, self.item_embeddings]'], {'axis': '(1)'}), '([self.user_embeddings, self.item_embeddings], axis=1)\n', (4031, 4085), True, 'import tensorflow as tf\n'), ((4392, 4435), 'layers.Dense', 'Dense', ([], {'input_dim': '(args.dim * 2)', 'output_dim': '(1)'}), '(input_dim=args.dim * 2, output_dim=1)\n', (4397, 4435), False, 'from layers import Dense, CrossCompressUnit\n'), ((4749, 4828), 'tensorflow.nn.sigmoid_cross_entropy_with_logits', 'tf.nn.sigmoid_cross_entropy_with_logits', ([], {'labels': 'self.labels', 'logits': 'self.scores'}), '(labels=self.labels, logits=self.scores)\n', (4788, 4828), True, 'import tensorflow as tf\n'), ((4856, 4891), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['self.user_embeddings'], {}), '(self.user_embeddings)\n', (4869, 4891), True, 'import tensorflow as tf\n'), ((4894, 4929), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['self.item_embeddings'], {}), '(self.item_embeddings)\n', (4907, 4929), True, 'import tensorflow as tf\n'), ((4994, 5012), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['var'], {}), '(var)\n', (5007, 5012), True, 'import tensorflow as tf\n'), ((5878, 5907), 'numpy.equal', 'np.equal', (['predictions', 'labels'], {}), '(predictions, labels)\n', (5886, 5907), True, 'import numpy as np\n'), ((6252, 6276), 'numpy.max', 'np.max', (['train_data[:, 1]'], {}), '(train_data[:, 1])\n', (6258, 6276), True, 'import numpy as np\n'), ((6278, 6301), 'numpy.max', 'np.max', (['test_data[:, 1]'], {}), '(test_data[:, 1])\n', (6284, 6301), True, 'import numpy as np\n'), ((4151, 4205), 'layers.Dense', 'Dense', ([], {'input_dim': '(args.dim * 2)', 'output_dim': '(args.dim * 2)'}), '(input_dim=args.dim * 2, output_dim=args.dim * 2)\n', (4156, 4205), False, 'from layers import Dense, CrossCompressUnit\n'), ((5186, 5220), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['args.lr_rs'], {}), '(args.lr_rs)\n', (5208, 5220), True, 'import tensorflow as tf\n')]
|
# -*- coding: utf-8 -*-
from mahjong.constants import CHUN, HAKU, HATSU
from mahjong.hand_calculating.yaku import Yaku
from mahjong.utils import is_pon
class Daisangen(Yaku):
"""
The hand contains three sets of dragons
"""
def __init__(self, yaku_id=None):
super(Daisangen, self).__init__(yaku_id)
def set_attributes(self):
self.tenhou_id = 39
self.name = 'Daisangen'
self.english = 'Big Three Dragons'
self.japanese = '大三元'
self.han_open = 13
self.han_closed = 13
self.is_yakuman = True
def is_condition_met(self, hand, *args):
count_of_dragon_pon_sets = 0
for item in hand:
if is_pon(item) and item[0] in [CHUN, HAKU, HATSU]:
count_of_dragon_pon_sets += 1
return count_of_dragon_pon_sets == 3
|
[
"mahjong.utils.is_pon"
] |
[((703, 715), 'mahjong.utils.is_pon', 'is_pon', (['item'], {}), '(item)\n', (709, 715), False, 'from mahjong.utils import is_pon\n')]
|
import boto3
import time
import subprocess
import argparse
import os
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('--queue', dest='queue',default='workshop', help='Amazon SQS')
parser.add_argument('--region', dest='region',default=None, help='Region')
args = parser.parse_args()
QUEUE = args.queue
REGION = args.region
try:
if REGION is None:
REGION = os.getenv("REGION")
except:
raise "Must pass region as environment variable or argument"
sleepTime = 5
# takes inputs, runs simulations and returns CSV output
def runSimulation(stock_symbol,short,longVa,days,iterVa,ukey,s3Bucket):
print('Starting simulation')
# use Subprocess to call code str.find(str, beg=0, end=len(string))
try:
output_value = 0
subprocess.check_output(['python','worker.py','--stock', stock_symbol,'--short_window_days',short, '--long_window_days', longVa, '--trading_days', days, '--id', ukey])
return output_value
except subprocess.CalledProcessError as e:
return e.returncode
# Save the simulation results to an S3 bucket
def saveToS3(bucket_name,filename):
s3 = boto3.client('s3')
s3.upload_file(filename, bucket_name, filename)
# s3 = boto3.resource('s3', region_name=REGION)
# s3.Bucket(s3Bucket).put_object(Key=file_key, Body=simData)
# Checks the queue for new messages,
# send them to the simulation if found and deletes from queue once complete.
def main():
while True:
# Get the SQS service resource
sqs = boto3.resource('sqs', region_name=REGION)
# Get the queue
queue = sqs.get_queue_by_name(QueueName=QUEUE)
MessageCount = 0
# Get message attributes and call simulation
for message in queue.receive_messages(MessageAttributeNames=['All']):
MessageCount = MessageCount + 1
# Get the message attributes
stock_symbol = ''
short = ''
longVa = ''
days = ''
iterVa = ''
ukey = ''
s3Bucket = ''
if message.message_attributes is not None:
stock_symbol = message.message_attributes.get('stock').get('StringValue')
short = message.message_attributes.get('short').get('StringValue')
longVa = message.message_attributes.get('long').get('StringValue')
days = message.message_attributes.get('days').get('StringValue')
iterVa = message.message_attributes.get('iter').get('StringValue')
ukey = message.message_attributes.get('key').get('StringValue')
s3Bucket = message.message_attributes.get('bucket').get('StringValue')
# run simulations and get results
simStatus = runSimulation(stock_symbol,short,longVa,days,iterVa,ukey,s3Bucket)
print('Simulation Returned: \n {0}'.format(simStatus))
print('Simulation ReturnCode = {0}'.format(simStatus))
if simStatus == 0:
mcfile = '{0}_{1}_MonteCarloSimResult.csv'.format(ukey, stock_symbol)
ptfile = '{0}_{1}_portfolio_total.csv'.format(ukey, stock_symbol)
srfile = '{0}_{1}_sim_results.csv'.format(ukey, stock_symbol)
prafile = '{0}_PortfolioRiskAssessment.csv'.format(ukey)
# save results to s3 bucket
print('Saving files to S3')
saveToS3(s3Bucket, mcfile)
saveToS3(s3Bucket, ptfile)
saveToS3(s3Bucket, srfile)
saveToS3(s3Bucket, prafile)
print('Cleaning up local files')
subprocess.check_output(['rm',mcfile])
subprocess.check_output(['rm',ptfile])
subprocess.check_output(['rm',srfile])
subprocess.check_output(['rm',prafile])
# delete the processed message
print('Telling SQS message was successfully processed')
message.delete()
print('Simulation success: stock={0}, short={1}, long={2}, days={3}, iterations={4}, id={5}, bucket={6}'.format(stock_symbol,short,longVa,days,iterVa,ukey,s3Bucket))
print('Messages processed: {0}'.format(MessageCount))
else:
print('Simulation failed: stock={0}, short={1}, long={2}, days={3}, iterations={4}, id={5}, bucket={6}'.format(stock_symbol,short,longVa,days,iterVa,ukey,s3Bucket))
print('pausing for {0} seconds.'.format(sleepTime))
time.sleep(sleepTime)
main()
|
[
"argparse.ArgumentParser",
"boto3.client",
"subprocess.check_output",
"time.sleep",
"boto3.resource",
"os.getenv"
] |
[((79, 140), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Process some integers."""'}), "(description='Process some integers.')\n", (102, 140), False, 'import argparse\n'), ((1160, 1178), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (1172, 1178), False, 'import boto3\n'), ((412, 431), 'os.getenv', 'os.getenv', (['"""REGION"""'], {}), "('REGION')\n", (421, 431), False, 'import os\n'), ((797, 976), 'subprocess.check_output', 'subprocess.check_output', (["['python', 'worker.py', '--stock', stock_symbol, '--short_window_days',\n short, '--long_window_days', longVa, '--trading_days', days, '--id', ukey]"], {}), "(['python', 'worker.py', '--stock', stock_symbol,\n '--short_window_days', short, '--long_window_days', longVa,\n '--trading_days', days, '--id', ukey])\n", (820, 976), False, 'import subprocess\n'), ((1551, 1592), 'boto3.resource', 'boto3.resource', (['"""sqs"""'], {'region_name': 'REGION'}), "('sqs', region_name=REGION)\n", (1565, 1592), False, 'import boto3\n'), ((4659, 4680), 'time.sleep', 'time.sleep', (['sleepTime'], {}), '(sleepTime)\n', (4669, 4680), False, 'import time\n'), ((3711, 3750), 'subprocess.check_output', 'subprocess.check_output', (["['rm', mcfile]"], {}), "(['rm', mcfile])\n", (3734, 3750), False, 'import subprocess\n'), ((3766, 3805), 'subprocess.check_output', 'subprocess.check_output', (["['rm', ptfile]"], {}), "(['rm', ptfile])\n", (3789, 3805), False, 'import subprocess\n'), ((3821, 3860), 'subprocess.check_output', 'subprocess.check_output', (["['rm', srfile]"], {}), "(['rm', srfile])\n", (3844, 3860), False, 'import subprocess\n'), ((3876, 3916), 'subprocess.check_output', 'subprocess.check_output', (["['rm', prafile]"], {}), "(['rm', prafile])\n", (3899, 3916), False, 'import subprocess\n')]
|
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.shortcuts import render, get_object_or_404,redirect
from django.contrib.auth import login, authenticate, logout
from django.contrib.auth.decorators import login_required
from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm
from django.contrib.sites.shortcuts import get_current_site
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.template.loader import render_to_string
from neighbor.tokens import account_activation_token
from django.contrib.auth.models import User
from neighbor.models import Neighborhood, UserProfile, Business, Post
from django.core.mail import EmailMessage
from django.views.decorators.http import require_POST
from itertools import chain
'''
Function to return confirm message
'''
def confirm(request):
return render(request, 'registration/confirm.html')
'''
Function to signup
'''
def signup(request):
if request.method == 'POST':
user_form = SignupForm(data=request.POST)
profile_form = UserProfileForm(data=request.POST)
if user_form.is_valid() and profile_form.is_valid():
user = user_form.save(commit=False)
user.is_active = False
user.save()
profile = profile_form.save(commit=False)
profile.user = user
current_site = get_current_site(request)
mail_subject = 'Activate your blog account.'
message = render_to_string('registration/email.html', {
'user': user,
'domain': current_site.domain,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'token': account_activation_token.make_token(user),
})
to_email = user_form.cleaned_data.get('email')
email = EmailMessage(
mail_subject, message, to=[to_email]
)
if 'picture' in request.FILES:
profile.picture = request.FILES['picture']
profile.save()
registered = True
email.send()
return redirect('confirm')
else:
profile.save()
email.send()
registered = True
return redirect('confirm')
else:
user_form = SignupForm()
profile_form = UserProfileForm(data = request.POST)
return render(request, 'registration/signup.html', {'user_form': user_form, 'profile_form': profile_form,})
'''
Function to send activate the account
'''
def activate(request, uidb64, token):
try:
uid = force_text(urlsafe_base64_decode(uidb64))
user = User.objects.get(pk=uid)
except(TypeError, ValueError, OverflowError, User.DoesNotExist):
user = None
if user is not None and account_activation_token.check_token(user, token):
user.is_active = True
user.save()
login(request, user)
return render(request, 'registration/success.html')
else:
return HttpResponse('Activation link is invalid!')
'''
Function to login the User
'''
def user_login(request):
if request.method == 'POST':
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user:
if user:
login(request, user)
next = request.POST.get('next', '/')
return HttpResponseRedirect(next)
else:
return HttpResponse("Your account is disabled.")
else:
print("Invalid login details: {0}, {1}".format(username, password))
return render(request, 'registration/invalid.html')
else:
return render(request, 'registration/login.html')
'''
Function to create neighborhood
'''
@login_required
def create_neighborhood(request):
form = NeighborhoodForm()
if request.method == 'POST':
form = NeighborhoodForm(request.POST, request.FILES)
if form.is_valid():
neighborhood = Neighborhood(image = request.FILES['image'])
neighborhood = form.save(commit=True)
return redirect('neighborhoods')
else:
print(form.errors)
return render(request, 'neighborhood/neighborhood.html', context = {'form':form,})
'''
Function to display neighborhoods
'''
@login_required
def neighborhoods(request):
neighborhoods = Neighborhood.objects.all()
return render(request, 'neighborhood/neighborhood_view.html', context = {'neighborhoods' : neighborhoods,})
'''
Function to edit neighborhoods
'''
@login_required
def show_neighborhood(request, id=None):
neighborhood = get_object_or_404(Neighborhood, id=id)
form = BusinessForm()
all_businesses = Business.objects.all()
businesses = all_businesses.filter(neighborhood_id=id)
print(businesses)
if request.method == 'POST':
form = BusinessForm(request.POST, request.FILES)
if form.is_valid():
business = Business(image = request.FILES.get('image'))
business = form.save(commit=False)
business.neighborhood = neighborhood
business = business.save()
next = request.POST.get('next', '/')
return HttpResponseRedirect(next)
else:
form = BusinessForm()
return render(request, 'neighborhood/neighborhood_dash.html', context={'form' : form, 'neighborhood' : neighborhood, 'businesses':businesses})
'''
Function to display businesses
'''
@login_required
def view_business(request):
business = Business.objects.all()
return render(request, 'business/view_business.html', context = {'business':business})
'''
Function to display one businesses
'''
def one_business(request, id=None):
business = get_object_or_404(Business, id=id)
return render(request, 'business/business_dash.html', context = {'business':business})
'''
Function to add businesses
'''
@login_required
def create_business(request):
form = BusinessForm()
if request.method == 'POST':
form = BusinessForm(request.POST, request.FILES)
if form.is_valid():
business = Business(image=request.FILES.get('image'))
business = form.save(commit=True)
return redirect('view_business')
else:
print(form.errors)
return render(request, 'business/create_business.html', context={'form': form})
'''
Function to display the home page
'''
def index(request):
neighborhoods = Neighborhood.objects.all().order_by('-id')[:4]
if request.method == 'POST':
user_form = SignupForm(data = request.POST)
profile_form = UserProfileForm(data = request.POST)
if user_form.is_valid() and profile_form.is_valid():
user = user_form.save(commit=False)
user.is_active = False
user.save()
profile = profile_form.save(commit=False)
profile.user = user
print('1')
current_site = get_current_site(request)
mail_subject = 'Activate your blog account.'
message = render_to_string('registration/email.html', {
'user': user,
'domain': current_site.domain,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'token': account_activation_token.make_token(user),
})
to_email = user_form.cleaned_data.get('email')
email = EmailMessage(
mail_subject, message, to=[to_email]
)
if 'picture' in request.FILES:
profile.picture = request.FILES['picture']
profile.save()
registered = True
email.send()
else:
email.send()
next = request.POST.get('next', '/')
return HttpResponseRedirect(next)
else:
user_form = SignupForm()
profile_form = UserProfileForm(data = request.POST)
return render(request, 'home/home.html', context = {'neighborhoods' : neighborhoods, 'user_form': user_form, 'profile_form': profile_form,})
'''
Function to logout users
'''
@login_required
def user_logout(request):
logout(request)
return redirect('index')
'''
Function to display posts
'''
@login_required
def posts(request, id=None):
neighborhood = get_object_or_404(Neighborhood, id=id)
form = PostForm()
all_posts = Post.objects.all().order_by('-id')
posts = all_posts.filter(neighborhood_id=id)
if request.method == 'POST':
form = PostForm(request.POST, request.FILES)
if form.is_valid():
post = Post(image = request.FILES.get('image'))
post = form.save(commit=False)
post.author = request.user
post.neighborhood = neighborhood
post = post.save()
next = request.POST.get('next', '/')
return HttpResponseRedirect(next)
else:
form = PostForm()
return render(request, 'posts/posts.html', context = {'form':form, 'posts':posts, 'neighborhood':neighborhood,})
'''
Function to search for neighborhood
'''
def search(request):
if 'contains' in request.GET and request.GET["contains"]:
query = request.GET.get("contains")
businesses = Business.search(query)
neighborhoods = Neighborhood.search(query)
results = list(chain(neighborhoods, businesses))
return render(request, 'registration/search.html',{"output":output, "results":results})
else:
message = "You haven't searched for anything"
return render(request, 'registration/search.html',{"message":message})
return render(request, 'registration/search.html',)
'''
Function to edit neighborhood
'''
@login_required
def edit_neighborhood(request, id = None):
neighborhood = get_object_or_404(Neighborhood, id=id)
n_id = neighborhood.id
if request.method == 'POST':
neighborhood.name = request.POST.get('name')
neighborhood.location = request.POST.get('location')
neighborhood.population = request.POST.get('population')
neighborhood.police = request.POST.get('police')
neighborhood.ambulance = request.POST.get('ambulance')
if request.FILES == True:
neighborhood.image = request.FILES.get('image')
update_neighborhood = neighborhood.save()
return redirect('show_neighborhood', id=n_id)
else:
update_neighborhood = neighborhood.save()
return redirect('show_neighborhood', id=n_id)
else:
return render(request, 'neighborhood/edit_neighborhood.html', {'neighborhood':neighborhood, 'n_id' : n_id,})
return render(request, 'neighborhood/edit_neighborhood.html', {'neighborhood':neighborhood, 'n_id' : n_id,})
'''
Function to delete neighborhood
'''
@login_required
def delete_neighborhood(request, id = None):
neighborhood = get_object_or_404(Neighborhood, id=id)
neighborhood.delete_neighborhood()
return redirect('neighborhoods')
'''
Function to view user
'''
@login_required
def view_user(request, id = None):
user = get_object_or_404(User, id=id)
return render(request, 'home/view_user.html', {'user':user})
|
[
"neighbor.models.Neighborhood.search",
"neighbor.models.Neighborhood.objects.all",
"neighbor.models.Business.objects.all",
"neighbor.forms.NeighborhoodForm",
"django.http.HttpResponseRedirect",
"neighbor.models.Business.search",
"django.contrib.auth.login",
"django.http.HttpResponse",
"django.contrib.auth.models.User.objects.get",
"django.contrib.auth.logout",
"neighbor.tokens.account_activation_token.check_token",
"django.shortcuts.render",
"itertools.chain",
"neighbor.models.Neighborhood",
"django.shortcuts.get_object_or_404",
"django.core.mail.EmailMessage",
"django.utils.http.urlsafe_base64_decode",
"neighbor.forms.BusinessForm",
"neighbor.tokens.account_activation_token.make_token",
"django.utils.encoding.force_bytes",
"django.shortcuts.redirect",
"neighbor.forms.PostForm",
"neighbor.models.Post.objects.all",
"neighbor.forms.UserProfileForm",
"django.contrib.auth.authenticate",
"django.contrib.sites.shortcuts.get_current_site",
"neighbor.forms.SignupForm"
] |
[((998, 1042), 'django.shortcuts.render', 'render', (['request', '"""registration/confirm.html"""'], {}), "(request, 'registration/confirm.html')\n", (1004, 1042), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2574, 2677), 'django.shortcuts.render', 'render', (['request', '"""registration/signup.html"""', "{'user_form': user_form, 'profile_form': profile_form}"], {}), "(request, 'registration/signup.html', {'user_form': user_form,\n 'profile_form': profile_form})\n", (2580, 2677), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((4101, 4119), 'neighbor.forms.NeighborhoodForm', 'NeighborhoodForm', ([], {}), '()\n', (4117, 4119), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((4457, 4530), 'django.shortcuts.render', 'render', (['request', '"""neighborhood/neighborhood.html"""'], {'context': "{'form': form}"}), "(request, 'neighborhood/neighborhood.html', context={'form': form})\n", (4463, 4530), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((4643, 4669), 'neighbor.models.Neighborhood.objects.all', 'Neighborhood.objects.all', ([], {}), '()\n', (4667, 4669), False, 'from neighbor.models import Neighborhood, UserProfile, Business, Post\n'), ((4681, 4782), 'django.shortcuts.render', 'render', (['request', '"""neighborhood/neighborhood_view.html"""'], {'context': "{'neighborhoods': neighborhoods}"}), "(request, 'neighborhood/neighborhood_view.html', context={\n 'neighborhoods': neighborhoods})\n", (4687, 4782), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((4901, 4939), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Neighborhood'], {'id': 'id'}), '(Neighborhood, id=id)\n', (4918, 4939), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((4951, 4965), 'neighbor.forms.BusinessForm', 'BusinessForm', ([], {}), '()\n', (4963, 4965), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((4987, 5009), 'neighbor.models.Business.objects.all', 'Business.objects.all', ([], {}), '()\n', (5007, 5009), False, 'from neighbor.models import Neighborhood, UserProfile, Business, Post\n'), ((5558, 5696), 'django.shortcuts.render', 'render', (['request', '"""neighborhood/neighborhood_dash.html"""'], {'context': "{'form': form, 'neighborhood': neighborhood, 'businesses': businesses}"}), "(request, 'neighborhood/neighborhood_dash.html', context={'form':\n form, 'neighborhood': neighborhood, 'businesses': businesses})\n", (5564, 5696), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((5796, 5818), 'neighbor.models.Business.objects.all', 'Business.objects.all', ([], {}), '()\n', (5816, 5818), False, 'from neighbor.models import Neighborhood, UserProfile, Business, Post\n'), ((5830, 5908), 'django.shortcuts.render', 'render', (['request', '"""business/view_business.html"""'], {'context': "{'business': business}"}), "(request, 'business/view_business.html', context={'business': business})\n", (5836, 5908), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((6008, 6042), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Business'], {'id': 'id'}), '(Business, id=id)\n', (6025, 6042), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((6054, 6132), 'django.shortcuts.render', 'render', (['request', '"""business/business_dash.html"""'], {'context': "{'business': business}"}), "(request, 'business/business_dash.html', context={'business': business})\n", (6060, 6132), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((6230, 6244), 'neighbor.forms.BusinessForm', 'BusinessForm', ([], {}), '()\n', (6242, 6244), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((6568, 6640), 'django.shortcuts.render', 'render', (['request', '"""business/create_business.html"""'], {'context': "{'form': form}"}), "(request, 'business/create_business.html', context={'form': form})\n", (6574, 6640), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((8453, 8468), 'django.contrib.auth.logout', 'logout', (['request'], {}), '(request)\n', (8459, 8468), False, 'from django.contrib.auth import login, authenticate, logout\n'), ((8480, 8497), 'django.shortcuts.redirect', 'redirect', (['"""index"""'], {}), "('index')\n", (8488, 8497), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((8600, 8638), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Neighborhood'], {'id': 'id'}), '(Neighborhood, id=id)\n', (8617, 8638), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((8650, 8660), 'neighbor.forms.PostForm', 'PostForm', ([], {}), '()\n', (8658, 8660), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((9235, 9344), 'django.shortcuts.render', 'render', (['request', '"""posts/posts.html"""'], {'context': "{'form': form, 'posts': posts, 'neighborhood': neighborhood}"}), "(request, 'posts/posts.html', context={'form': form, 'posts': posts,\n 'neighborhood': neighborhood})\n", (9241, 9344), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((9941, 9984), 'django.shortcuts.render', 'render', (['request', '"""registration/search.html"""'], {}), "(request, 'registration/search.html')\n", (9947, 9984), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((10106, 10144), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Neighborhood'], {'id': 'id'}), '(Neighborhood, id=id)\n', (10123, 10144), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((10974, 11078), 'django.shortcuts.render', 'render', (['request', '"""neighborhood/edit_neighborhood.html"""', "{'neighborhood': neighborhood, 'n_id': n_id}"], {}), "(request, 'neighborhood/edit_neighborhood.html', {'neighborhood':\n neighborhood, 'n_id': n_id})\n", (10980, 11078), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((11200, 11238), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Neighborhood'], {'id': 'id'}), '(Neighborhood, id=id)\n', (11217, 11238), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((11289, 11314), 'django.shortcuts.redirect', 'redirect', (['"""neighborhoods"""'], {}), "('neighborhoods')\n", (11297, 11314), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((11411, 11441), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['User'], {'id': 'id'}), '(User, id=id)\n', (11428, 11441), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((11453, 11507), 'django.shortcuts.render', 'render', (['request', '"""home/view_user.html"""', "{'user': user}"], {}), "(request, 'home/view_user.html', {'user': user})\n", (11459, 11507), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((1148, 1177), 'neighbor.forms.SignupForm', 'SignupForm', ([], {'data': 'request.POST'}), '(data=request.POST)\n', (1158, 1177), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((1201, 1235), 'neighbor.forms.UserProfileForm', 'UserProfileForm', ([], {'data': 'request.POST'}), '(data=request.POST)\n', (1216, 1235), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((2490, 2502), 'neighbor.forms.SignupForm', 'SignupForm', ([], {}), '()\n', (2500, 2502), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((2526, 2560), 'neighbor.forms.UserProfileForm', 'UserProfileForm', ([], {'data': 'request.POST'}), '(data=request.POST)\n', (2541, 2560), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((2843, 2867), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': 'uid'}), '(pk=uid)\n', (2859, 2867), False, 'from django.contrib.auth.models import User\n'), ((2985, 3034), 'neighbor.tokens.account_activation_token.check_token', 'account_activation_token.check_token', (['user', 'token'], {}), '(user, token)\n', (3021, 3034), False, 'from neighbor.tokens import account_activation_token\n'), ((3094, 3114), 'django.contrib.auth.login', 'login', (['request', 'user'], {}), '(request, user)\n', (3099, 3114), False, 'from django.contrib.auth import login, authenticate, logout\n'), ((3131, 3175), 'django.shortcuts.render', 'render', (['request', '"""registration/success.html"""'], {}), "(request, 'registration/success.html')\n", (3137, 3175), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((3201, 3244), 'django.http.HttpResponse', 'HttpResponse', (['"""Activation link is invalid!"""'], {}), "('Activation link is invalid!')\n", (3213, 3244), False, 'from django.http import HttpResponseRedirect, HttpResponse\n'), ((3453, 3503), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': 'username', 'password': 'password'}), '(username=username, password=password)\n', (3465, 3503), False, 'from django.contrib.auth import login, authenticate, logout\n'), ((3953, 3995), 'django.shortcuts.render', 'render', (['request', '"""registration/login.html"""'], {}), "(request, 'registration/login.html')\n", (3959, 3995), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((4168, 4213), 'neighbor.forms.NeighborhoodForm', 'NeighborhoodForm', (['request.POST', 'request.FILES'], {}), '(request.POST, request.FILES)\n', (4184, 4213), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((5139, 5180), 'neighbor.forms.BusinessForm', 'BusinessForm', (['request.POST', 'request.FILES'], {}), '(request.POST, request.FILES)\n', (5151, 5180), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((5532, 5546), 'neighbor.forms.BusinessForm', 'BusinessForm', ([], {}), '()\n', (5544, 5546), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((6293, 6334), 'neighbor.forms.BusinessForm', 'BusinessForm', (['request.POST', 'request.FILES'], {}), '(request.POST, request.FILES)\n', (6305, 6334), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((6827, 6856), 'neighbor.forms.SignupForm', 'SignupForm', ([], {'data': 'request.POST'}), '(data=request.POST)\n', (6837, 6856), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((6882, 6916), 'neighbor.forms.UserProfileForm', 'UserProfileForm', ([], {'data': 'request.POST'}), '(data=request.POST)\n', (6897, 6916), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((8148, 8160), 'neighbor.forms.SignupForm', 'SignupForm', ([], {}), '()\n', (8158, 8160), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((8184, 8218), 'neighbor.forms.UserProfileForm', 'UserProfileForm', ([], {'data': 'request.POST'}), '(data=request.POST)\n', (8199, 8218), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((8236, 8369), 'django.shortcuts.render', 'render', (['request', '"""home/home.html"""'], {'context': "{'neighborhoods': neighborhoods, 'user_form': user_form, 'profile_form':\n profile_form}"}), "(request, 'home/home.html', context={'neighborhoods': neighborhoods,\n 'user_form': user_form, 'profile_form': profile_form})\n", (8242, 8369), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((8809, 8846), 'neighbor.forms.PostForm', 'PostForm', (['request.POST', 'request.FILES'], {}), '(request.POST, request.FILES)\n', (8817, 8846), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((9213, 9223), 'neighbor.forms.PostForm', 'PostForm', ([], {}), '()\n', (9221, 9223), False, 'from neighbor.forms import SignupForm, UserProfileForm, NeighborhoodForm, PostForm, BusinessForm\n'), ((9545, 9567), 'neighbor.models.Business.search', 'Business.search', (['query'], {}), '(query)\n', (9560, 9567), False, 'from neighbor.models import Neighborhood, UserProfile, Business, Post\n'), ((9596, 9622), 'neighbor.models.Neighborhood.search', 'Neighborhood.search', (['query'], {}), '(query)\n', (9615, 9622), False, 'from neighbor.models import Neighborhood, UserProfile, Business, Post\n'), ((9705, 9792), 'django.shortcuts.render', 'render', (['request', '"""registration/search.html"""', "{'output': output, 'results': results}"], {}), "(request, 'registration/search.html', {'output': output, 'results':\n results})\n", (9711, 9792), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((9866, 9931), 'django.shortcuts.render', 'render', (['request', '"""registration/search.html"""', "{'message': message}"], {}), "(request, 'registration/search.html', {'message': message})\n", (9872, 9931), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((10861, 10965), 'django.shortcuts.render', 'render', (['request', '"""neighborhood/edit_neighborhood.html"""', "{'neighborhood': neighborhood, 'n_id': n_id}"], {}), "(request, 'neighborhood/edit_neighborhood.html', {'neighborhood':\n neighborhood, 'n_id': n_id})\n", (10867, 10965), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((1518, 1543), 'django.contrib.sites.shortcuts.get_current_site', 'get_current_site', (['request'], {}), '(request)\n', (1534, 1543), False, 'from django.contrib.sites.shortcuts import get_current_site\n'), ((1976, 2026), 'django.core.mail.EmailMessage', 'EmailMessage', (['mail_subject', 'message'], {'to': '[to_email]'}), '(mail_subject, message, to=[to_email])\n', (1988, 2026), False, 'from django.core.mail import EmailMessage\n'), ((2797, 2826), 'django.utils.http.urlsafe_base64_decode', 'urlsafe_base64_decode', (['uidb64'], {}), '(uidb64)\n', (2818, 2826), False, 'from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode\n'), ((3881, 3925), 'django.shortcuts.render', 'render', (['request', '"""registration/invalid.html"""'], {}), "(request, 'registration/invalid.html')\n", (3887, 3925), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((4269, 4311), 'neighbor.models.Neighborhood', 'Neighborhood', ([], {'image': "request.FILES['image']"}), "(image=request.FILES['image'])\n", (4281, 4311), False, 'from neighbor.models import Neighborhood, UserProfile, Business, Post\n'), ((4383, 4408), 'django.shortcuts.redirect', 'redirect', (['"""neighborhoods"""'], {}), "('neighborhoods')\n", (4391, 4408), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((5480, 5506), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['next'], {}), '(next)\n', (5500, 5506), False, 'from django.http import HttpResponseRedirect, HttpResponse\n'), ((6494, 6519), 'django.shortcuts.redirect', 'redirect', (['"""view_business"""'], {}), "('view_business')\n", (6502, 6519), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((7224, 7249), 'django.contrib.sites.shortcuts.get_current_site', 'get_current_site', (['request'], {}), '(request)\n', (7240, 7249), False, 'from django.contrib.sites.shortcuts import get_current_site\n'), ((7682, 7732), 'django.core.mail.EmailMessage', 'EmailMessage', (['mail_subject', 'message'], {'to': '[to_email]'}), '(mail_subject, message, to=[to_email])\n', (7694, 7732), False, 'from django.core.mail import EmailMessage\n'), ((8677, 8695), 'neighbor.models.Post.objects.all', 'Post.objects.all', ([], {}), '()\n', (8693, 8695), False, 'from neighbor.models import Neighborhood, UserProfile, Business, Post\n'), ((9161, 9187), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['next'], {}), '(next)\n', (9181, 9187), False, 'from django.http import HttpResponseRedirect, HttpResponse\n'), ((9650, 9682), 'itertools.chain', 'chain', (['neighborhoods', 'businesses'], {}), '(neighborhoods, businesses)\n', (9655, 9682), False, 'from itertools import chain\n'), ((10671, 10709), 'django.shortcuts.redirect', 'redirect', (['"""show_neighborhood"""'], {'id': 'n_id'}), "('show_neighborhood', id=n_id)\n", (10679, 10709), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((10797, 10835), 'django.shortcuts.redirect', 'redirect', (['"""show_neighborhood"""'], {'id': 'n_id'}), "('show_neighborhood', id=n_id)\n", (10805, 10835), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2284, 2303), 'django.shortcuts.redirect', 'redirect', (['"""confirm"""'], {}), "('confirm')\n", (2292, 2303), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2439, 2458), 'django.shortcuts.redirect', 'redirect', (['"""confirm"""'], {}), "('confirm')\n", (2447, 2458), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((3559, 3579), 'django.contrib.auth.login', 'login', (['request', 'user'], {}), '(request, user)\n', (3564, 3579), False, 'from django.contrib.auth import login, authenticate, logout\n'), ((3656, 3682), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['next'], {}), '(next)\n', (3676, 3682), False, 'from django.http import HttpResponseRedirect, HttpResponse\n'), ((3725, 3766), 'django.http.HttpResponse', 'HttpResponse', (['"""Your account is disabled."""'], {}), "('Your account is disabled.')\n", (3737, 3766), False, 'from django.http import HttpResponseRedirect, HttpResponse\n'), ((6727, 6753), 'neighbor.models.Neighborhood.objects.all', 'Neighborhood.objects.all', ([], {}), '()\n', (6751, 6753), False, 'from neighbor.models import Neighborhood, UserProfile, Business, Post\n'), ((8090, 8116), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['next'], {}), '(next)\n', (8110, 8116), False, 'from django.http import HttpResponseRedirect, HttpResponse\n'), ((1839, 1880), 'neighbor.tokens.account_activation_token.make_token', 'account_activation_token.make_token', (['user'], {}), '(user)\n', (1874, 1880), False, 'from neighbor.tokens import account_activation_token\n'), ((7545, 7586), 'neighbor.tokens.account_activation_token.make_token', 'account_activation_token.make_token', (['user'], {}), '(user)\n', (7580, 7586), False, 'from neighbor.tokens import account_activation_token\n'), ((1791, 1811), 'django.utils.encoding.force_bytes', 'force_bytes', (['user.pk'], {}), '(user.pk)\n', (1802, 1811), False, 'from django.utils.encoding import force_bytes, force_text\n'), ((7497, 7517), 'django.utils.encoding.force_bytes', 'force_bytes', (['user.pk'], {}), '(user.pk)\n', (7508, 7517), False, 'from django.utils.encoding import force_bytes, force_text\n')]
|
import json
import graphene
import pytest
from django.shortcuts import reverse
from saleor.product.models import Category, Product, ProductAttribute
from prices import Money
from .utils import get_graphql_content
def test_category_query(client, product_in_stock):
category = Category.objects.first()
query = '''
query {
category(id: "%(category_pk)s") {
id
name
ancestors {
edges {
node {
name
}
}
}
children {
edges {
node {
name
}
}
}
siblings {
edges {
node {
name
}
}
}
}
}
''' % {'category_pk': graphene.Node.to_global_id('Category', category.pk)}
response = client.post(reverse('api'), {'query': query})
content = get_graphql_content(response)
assert 'errors' not in content
category_data = content['data']['category']
assert category_data is not None
assert category_data['name'] == category.name
assert (
len(category_data['ancestors']['edges']) ==
category.get_ancestors().count())
assert (
len(category_data['children']['edges']) ==
category.get_children().count())
assert (
len(category_data['siblings']['edges']) ==
category.get_siblings().count())
def test_fetch_all_products(client, product_in_stock):
query = '''
query {
products {
totalCount
edges {
node {
id
}
}
}
}
'''
response = client.post(reverse('api'), {'query': query})
content = get_graphql_content(response)
assert 'errors' not in content
num_products = Product.objects.count()
assert content['data']['products']['totalCount'] == num_products
assert len(content['data']['products']['edges']) == num_products
@pytest.mark.djangodb
def test_fetch_unavailable_products(client, product_in_stock):
Product.objects.update(is_published=False)
query = '''
query {
products {
totalCount
edges {
node {
id
}
}
}
}
'''
response = client.post(reverse('api'), {'query': query})
content = get_graphql_content(response)
assert 'errors' not in content
assert content['data']['products']['totalCount'] == 0
assert not content['data']['products']['edges']
def test_product_query(client, product_in_stock):
category = Category.objects.first()
product = category.products.first()
query = '''
query {
category(id: "%(category_id)s") {
products {
edges {
node {
id
name
url
thumbnailUrl
images { url }
variants {
name
stockQuantity
}
availability {
available,
priceRange {
start {
gross {
amount
currency
localized
}
net {
amount
currency
localized
}
currency
}
}
}
}
}
}
}
}
''' % {'category_id': graphene.Node.to_global_id('Category', category.id)}
response = client.post(reverse('api'), {'query': query})
content = get_graphql_content(response)
assert 'errors' not in content
assert content['data']['category'] is not None
product_edges_data = content['data']['category']['products']['edges']
assert len(product_edges_data) == category.products.count()
product_data = product_edges_data[0]['node']
assert product_data['name'] == product.name
assert product_data['url'] == product.get_absolute_url()
gross = product_data['availability']['priceRange']['start']['gross']
assert float(gross['amount']) == float(product.price.amount)
def test_filter_product_by_category(client, product_in_stock):
category = product_in_stock.category
query = '''
query getProducts($categoryId: ID) {
products(category: $categoryId) {
edges {
node {
name
}
}
}
}
'''
response = client.post(
reverse('api'),
{
'query': query,
'variables': json.dumps(
{
'categoryId': graphene.Node.to_global_id(
'Category', category.id)}),
'operationName': 'getProducts'})
content = get_graphql_content(response)
assert 'errors' not in content
product_data = content['data']['products']['edges'][0]['node']
assert product_data['name'] == product_in_stock.name
def test_fetch_product_by_id(client, product_in_stock):
query = '''
query ($productId: ID!) {
node(id: $productId) {
... on Product {
name
}
}
}
'''
response = client.post(
reverse('api'),
{
'query': query,
'variables': json.dumps(
{
'productId': graphene.Node.to_global_id(
'Product', product_in_stock.id)})})
content = get_graphql_content(response)
assert 'errors' not in content
product_data = content['data']['node']
assert product_data['name'] == product_in_stock.name
def test_filter_product_by_attributes(client, product_in_stock):
product_attr = product_in_stock.product_type.product_attributes.first()
category = product_in_stock.category
attr_value = product_attr.values.first()
filter_by = '%s:%s' % (product_attr.slug, attr_value.slug)
query = '''
query {
category(id: "%(category_id)s") {
products(attributes: ["%(filter_by)s"]) {
edges {
node {
name
}
}
}
}
}
''' % {
'category_id': graphene.Node.to_global_id('Category', category.id),
'filter_by': filter_by}
response = client.post(reverse('api'), {'query': query})
content = get_graphql_content(response)
assert 'errors' not in content
product_data = content['data']['category']['products']['edges'][0]['node']
assert product_data['name'] == product_in_stock.name
def test_sort_products(client, product_in_stock):
# set price of the first product
product_in_stock.price = Money('10.00', 'USD')
product_in_stock.save()
# create the second product with higher price
product_in_stock.pk = None
product_in_stock.price = Money('20.00', 'USD')
product_in_stock.save()
query = '''
query {
products(sortBy: "%(sort_by)s") {
edges {
node {
price {
amount
}
}
}
}
}
'''
asc_price_query = query % {'sort_by': 'price'}
response = client.post(reverse('api'), {'query': asc_price_query})
content = get_graphql_content(response)
assert 'errors' not in content
product_data = content['data']['products']['edges'][0]['node']
price_0 = content['data']['products']['edges'][0]['node']['price']['amount']
price_1 = content['data']['products']['edges'][1]['node']['price']['amount']
assert price_0 < price_1
desc_price_query = query % {'sort_by': '-price'}
response = client.post(reverse('api'), {'query': desc_price_query})
content = get_graphql_content(response)
assert 'errors' not in content
product_data = content['data']['products']['edges'][0]['node']
price_0 = content['data']['products']['edges'][0]['node']['price']['amount']
price_1 = content['data']['products']['edges'][1]['node']['price']['amount']
assert price_0 > price_1
def test_attributes_query(client, product_in_stock):
attributes = ProductAttribute.objects.prefetch_related('values')
query = '''
query {
attributes {
edges {
node {
id
name
slug
values {
id
name
slug
}
}
}
}
}
'''
response = client.post(reverse('api'), {'query': query})
content = get_graphql_content(response)
assert 'errors' not in content
attributes_data = content['data']['attributes']['edges']
assert len(attributes_data) == attributes.count()
def test_attributes_in_category_query(client, product_in_stock):
category = Category.objects.first()
query = '''
query {
attributes(inCategory: "%(category_id)s") {
edges {
node {
id
name
slug
values {
id
name
slug
}
}
}
}
}
''' % {'category_id': graphene.Node.to_global_id('Category', category.id)}
response = client.post(reverse('api'), {'query': query})
content = get_graphql_content(response)
assert 'errors' not in content
attributes_data = content['data']['attributes']['edges']
assert len(attributes_data) == ProductAttribute.objects.count()
def test_real_query(client, product_in_stock):
category = product_in_stock.category
product_attr = product_in_stock.product_type.product_attributes.first()
category = product_in_stock.category
attr_value = product_attr.values.first()
filter_by = '%s:%s' % (product_attr.slug, attr_value.slug)
query = '''
query Root($categoryId: ID!, $sortBy: String, $first: Int, $attributesFilter: [AttributeScalar], $minPrice: Float, $maxPrice: Float) {
category(id: $categoryId) {
...CategoryPageFragmentQuery
__typename
}
attributes(inCategory: $categoryId) {
edges {
node {
...ProductFiltersFragmentQuery
__typename
}
}
}
}
fragment CategoryPageFragmentQuery on Category {
id
name
url
ancestors {
edges {
node {
name
id
url
__typename
}
}
}
children {
edges {
node {
name
id
url
slug
__typename
}
}
}
products(first: $first, sortBy: $sortBy, attributes: $attributesFilter, price_Gte: $minPrice, price_Lte: $maxPrice) {
...ProductListFragmentQuery
__typename
}
__typename
}
fragment ProductListFragmentQuery on ProductCountableConnection {
edges {
node {
...ProductFragmentQuery
__typename
}
__typename
}
pageInfo {
hasNextPage
__typename
}
__typename
}
fragment ProductFragmentQuery on Product {
id
name
price {
amount
currency
localized
__typename
}
availability {
...ProductPriceFragmentQuery
__typename
}
thumbnailUrl1x: thumbnailUrl(size: "255x255")
thumbnailUrl2x: thumbnailUrl(size: "510x510")
url
__typename
}
fragment ProductPriceFragmentQuery on ProductAvailability {
available
discount {
gross {
amount
currency
__typename
}
__typename
}
priceRange {
stop {
gross {
amount
currency
localized
__typename
}
currency
__typename
}
start {
gross {
amount
currency
localized
__typename
}
currency
__typename
}
__typename
}
__typename
}
fragment ProductFiltersFragmentQuery on ProductAttribute {
id
name
slug
values {
id
name
slug
color
__typename
}
__typename
}
'''
response = client.post(
'/graphql/', {
'query': query,
'variables': json.dumps(
{
'categoryId': graphene.Node.to_global_id(
'Category', category.id),
'sortBy': 'name',
'first': 1,
'attributesFilter': [filter_by]})})
content = get_graphql_content(response)
assert 'errors' not in content
|
[
"saleor.product.models.Product.objects.count",
"prices.Money",
"graphene.Node.to_global_id",
"saleor.product.models.ProductAttribute.objects.count",
"saleor.product.models.ProductAttribute.objects.prefetch_related",
"saleor.product.models.Product.objects.update",
"django.shortcuts.reverse",
"saleor.product.models.Category.objects.first"
] |
[((283, 307), 'saleor.product.models.Category.objects.first', 'Category.objects.first', ([], {}), '()\n', (305, 307), False, 'from saleor.product.models import Category, Product, ProductAttribute\n'), ((1977, 2000), 'saleor.product.models.Product.objects.count', 'Product.objects.count', ([], {}), '()\n', (1998, 2000), False, 'from saleor.product.models import Category, Product, ProductAttribute\n'), ((2230, 2272), 'saleor.product.models.Product.objects.update', 'Product.objects.update', ([], {'is_published': '(False)'}), '(is_published=False)\n', (2252, 2272), False, 'from saleor.product.models import Category, Product, ProductAttribute\n'), ((2782, 2806), 'saleor.product.models.Category.objects.first', 'Category.objects.first', ([], {}), '()\n', (2804, 2806), False, 'from saleor.product.models import Category, Product, ProductAttribute\n'), ((7393, 7414), 'prices.Money', 'Money', (['"""10.00"""', '"""USD"""'], {}), "('10.00', 'USD')\n", (7398, 7414), False, 'from prices import Money\n'), ((7554, 7575), 'prices.Money', 'Money', (['"""20.00"""', '"""USD"""'], {}), "('20.00', 'USD')\n", (7559, 7575), False, 'from prices import Money\n'), ((8850, 8901), 'saleor.product.models.ProductAttribute.objects.prefetch_related', 'ProductAttribute.objects.prefetch_related', (['"""values"""'], {}), "('values')\n", (8891, 8901), False, 'from saleor.product.models import Category, Product, ProductAttribute\n'), ((9596, 9620), 'saleor.product.models.Category.objects.first', 'Category.objects.first', ([], {}), '()\n', (9618, 9620), False, 'from saleor.product.models import Category, Product, ProductAttribute\n'), ((1004, 1018), 'django.shortcuts.reverse', 'reverse', (['"""api"""'], {}), "('api')\n", (1011, 1018), False, 'from django.shortcuts import reverse\n'), ((1845, 1859), 'django.shortcuts.reverse', 'reverse', (['"""api"""'], {}), "('api')\n", (1852, 1859), False, 'from django.shortcuts import reverse\n'), ((2492, 2506), 'django.shortcuts.reverse', 'reverse', (['"""api"""'], {}), "('api')\n", (2499, 2506), False, 'from django.shortcuts import reverse\n'), ((4211, 4225), 'django.shortcuts.reverse', 'reverse', (['"""api"""'], {}), "('api')\n", (4218, 4225), False, 'from django.shortcuts import reverse\n'), ((5174, 5188), 'django.shortcuts.reverse', 'reverse', (['"""api"""'], {}), "('api')\n", (5181, 5188), False, 'from django.shortcuts import reverse\n'), ((5904, 5918), 'django.shortcuts.reverse', 'reverse', (['"""api"""'], {}), "('api')\n", (5911, 5918), False, 'from django.shortcuts import reverse\n'), ((7026, 7040), 'django.shortcuts.reverse', 'reverse', (['"""api"""'], {}), "('api')\n", (7033, 7040), False, 'from django.shortcuts import reverse\n'), ((7934, 7948), 'django.shortcuts.reverse', 'reverse', (['"""api"""'], {}), "('api')\n", (7941, 7948), False, 'from django.shortcuts import reverse\n'), ((8396, 8410), 'django.shortcuts.reverse', 'reverse', (['"""api"""'], {}), "('api')\n", (8403, 8410), False, 'from django.shortcuts import reverse\n'), ((9286, 9300), 'django.shortcuts.reverse', 'reverse', (['"""api"""'], {}), "('api')\n", (9293, 9300), False, 'from django.shortcuts import reverse\n'), ((10107, 10121), 'django.shortcuts.reverse', 'reverse', (['"""api"""'], {}), "('api')\n", (10114, 10121), False, 'from django.shortcuts import reverse\n'), ((10316, 10348), 'saleor.product.models.ProductAttribute.objects.count', 'ProductAttribute.objects.count', ([], {}), '()\n', (10346, 10348), False, 'from saleor.product.models import Category, Product, ProductAttribute\n'), ((924, 975), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Category"""', 'category.pk'], {}), "('Category', category.pk)\n", (950, 975), False, 'import graphene\n'), ((4131, 4182), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Category"""', 'category.id'], {}), "('Category', category.id)\n", (4157, 4182), False, 'import graphene\n'), ((6914, 6965), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Category"""', 'category.id'], {}), "('Category', category.id)\n", (6940, 6965), False, 'import graphene\n'), ((10027, 10078), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Category"""', 'category.id'], {}), "('Category', category.id)\n", (10053, 10078), False, 'import graphene\n'), ((5317, 5368), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Category"""', 'category.id'], {}), "('Category', category.id)\n", (5343, 5368), False, 'import graphene\n'), ((6046, 6104), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Product"""', 'product_in_stock.id'], {}), "('Product', product_in_stock.id)\n", (6072, 6104), False, 'import graphene\n'), ((13879, 13930), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Category"""', 'category.id'], {}), "('Category', category.id)\n", (13905, 13930), False, 'import graphene\n')]
|
from django.core.management.base import BaseCommand
from django.db.models import Count
from project.pastebin.models import Country
class Command(BaseCommand):
help = 'countries statistics'
def handle(self, *args, **kwargs):
countries = Country.objects.annotate(
pastes_count=Count('users__pastes')).order_by(
'-pastes_count')[:5].values(*['id', 'pastes_count', 'title'])
[print(country) for country in countries]
|
[
"django.db.models.Count"
] |
[((306, 328), 'django.db.models.Count', 'Count', (['"""users__pastes"""'], {}), "('users__pastes')\n", (311, 328), False, 'from django.db.models import Count\n')]
|
__author__ = "<NAME>"
__copyright__ = "Copyright, 2021, <NAME>"
__license__ = "3-Clause BSD License"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy as np
def polynomial_basis(theta: np.array, degree: int) -> np.array:
"""Calculates polynomial basis for the omnidirectional camera model.
Parameters
----------
theta : numpy.array
theta-angles for which the polynomial basis will be calculated for
degree : int
degree of the basis. E.g. degree = 2, basis = [1.0 theta theta^2]
Returns
-------
numpy.array
Polynomial basis vector/matrix. If theta = [theta1, theta2, theta3] and degree = 2, then
the basis will be:
[1.0, 1.0, 1.0;
theta1, theta2, theta3;
theta1^2, theta2^2, theta3^2]
"""
# Minimum degree is 1
if degree < 1:
raise Exception("Degree has to be 1 or greater!")
basis = np.empty((degree, theta.size), dtype=np.float)
basis[0,] = np.ones((1, theta.size))
for row in range(1, degree):
basis[row,] = theta
for row in range(2, degree):
basis[row,] *= basis[row - 1,]
return basis
def perspective_lut(image_shape: tuple, principal_point: np.array, focal_length: float,
model_coefficients: np.array) -> tuple:
"""
Calculates a look-up-table (LUT) for converting images captured with an omnidirectional camera, described by
the model coefficients, into perspective camera images (i.e. pin-hole camera). The relation between the 3D half-ray
emanating from the single point and the corresponding pixel, observed in the image plane, is described by a
polynomial basis and the model coefficients. The look-up-table values can be used for converting images into
perspective camera images, for example, by using OpenCV's remap function:
cv2.remap(image, u, v, cv2.INTER_LINEAR)
For more information, take a look at the paper:
"A Toolbox for Easily Calibrating Omnidirectional Cameras", <NAME>, <NAME> and <NAME>.
Parameters
----------
image_shape : tuple of ints
Shape of the image (rows, cols, channels)
principal_point : (float, float)
Principal point (i.e. optical centre of the camera) [px, py]
focal_length : float
Focal length
model_coefficients :
Coefficients of the omnidirectional lens model (https://sites.google.com/site/scarabotix/ocamcalib-toolbox)
Returns
-------
(u, v)
A tuple containing the look-up-table values for converting images into perspective camera images.
u and v both have the same shape as image_shape (for rows and columns)
"""
focal_length = np.abs(focal_length)
# Create image coordinate mesh-grids. As the name implies, these are in the image coordinate system
# with the origin at the top left corner
u, v = np.meshgrid(
np.arange(image_shape[1], dtype=np.float),
np.arange(image_shape[0], dtype=np.float)
)
# Convert the coordinates into sensor coordinates (origin is at the principal point, and the
# sensor is a focal length distance away from the lens optical centre)
u -= principal_point[0]
v -= principal_point[1]
sensor_coords = np.vstack((u.flatten(), v.flatten(), np.ones(u.size) * focal_length))
# Calculate the polynomial basis for the camera/lens model
# rho is the Euclidean distance of the sensor position from the principal point
rho = np.sqrt(np.square(sensor_coords[0, :]) + np.square(sensor_coords[1, :]))
theta = np.arctan(np.divide(-sensor_coords[2,], rho))
# calculate the polynomial basis, based on the angle
basis = polynomial_basis(theta, model_coefficients.size)
r = np.multiply(model_coefficients.reshape((model_coefficients.size, -1)), basis)
r = np.sum(r, axis=0)
r /= rho
x_result = principal_point[0] + sensor_coords[0,] * r
y_result = principal_point[1] + sensor_coords[1,] * r
x_result = x_result.reshape((image_shape[0], image_shape[1]))
y_result = y_result.reshape((image_shape[0], image_shape[1]))
return x_result.astype(np.float32), y_result.astype(np.float32)
|
[
"numpy.divide",
"numpy.sum",
"numpy.abs",
"numpy.empty",
"numpy.square",
"numpy.ones",
"numpy.arange"
] |
[((1709, 1755), 'numpy.empty', 'np.empty', (['(degree, theta.size)'], {'dtype': 'np.float'}), '((degree, theta.size), dtype=np.float)\n', (1717, 1755), True, 'import numpy as np\n'), ((1772, 1796), 'numpy.ones', 'np.ones', (['(1, theta.size)'], {}), '((1, theta.size))\n', (1779, 1796), True, 'import numpy as np\n'), ((3493, 3513), 'numpy.abs', 'np.abs', (['focal_length'], {}), '(focal_length)\n', (3499, 3513), True, 'import numpy as np\n'), ((4616, 4633), 'numpy.sum', 'np.sum', (['r'], {'axis': '(0)'}), '(r, axis=0)\n', (4622, 4633), True, 'import numpy as np\n'), ((3696, 3737), 'numpy.arange', 'np.arange', (['image_shape[1]'], {'dtype': 'np.float'}), '(image_shape[1], dtype=np.float)\n', (3705, 3737), True, 'import numpy as np\n'), ((3747, 3788), 'numpy.arange', 'np.arange', (['image_shape[0]'], {'dtype': 'np.float'}), '(image_shape[0], dtype=np.float)\n', (3756, 3788), True, 'import numpy as np\n'), ((4367, 4401), 'numpy.divide', 'np.divide', (['(-sensor_coords[2,])', 'rho'], {}), '(-sensor_coords[2,], rho)\n', (4376, 4401), True, 'import numpy as np\n'), ((4280, 4310), 'numpy.square', 'np.square', (['sensor_coords[0, :]'], {}), '(sensor_coords[0, :])\n', (4289, 4310), True, 'import numpy as np\n'), ((4313, 4343), 'numpy.square', 'np.square', (['sensor_coords[1, :]'], {}), '(sensor_coords[1, :])\n', (4322, 4343), True, 'import numpy as np\n'), ((4081, 4096), 'numpy.ones', 'np.ones', (['u.size'], {}), '(u.size)\n', (4088, 4096), True, 'import numpy as np\n')]
|
import tensorflow as tf
class TFrecordCreator:
"""
Creates TFRecord Dataset to store the memorization metric
"""
def __init__(self,path):
self.path = path
self.writer = tf.io.TFRecordWriter(path)
def _int64_feature(self,value):
"""Returns an int64_list from a bool / enum / int / uint."""
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def _float_feature(self,value):
"""Returns a float_list from a float / double."""
return tf.train.Feature(float_list=tf.train.FloatList(value=[value]))
def _serialize_example(self,result,index):
"""
Creates a tf.train.Example message ready to be written to a file.
"""
feature = {
'result':self._float_feature(result),
'index':self._int64_feature(index)
}
example_proto = tf.train.Example(features=tf.train.Features(feature=feature))
return example_proto.SerializeToString()
def write(self,result,index):
value = self._serialize_example(result,index)
self.writer.write(value)
def close(self):
"""
Closes the tfrecord stream
For some reason, using __del__ doesn't work
"""
self.writer.close()
class TFRecordLoader:
def __init__(self,path):
self.path = path
self.reader = tf.data.TFRecordDataset([path])
self.reader = self.reader.map(self._parse_fn)
def _parse_fn(self,example_proto):
feature_description = {
'result':tf.io.FixedLenFeature([], tf.float32, default_value=0.0),
'index':tf.io.FixedLenFeature([], tf.int64, default_value=0)
}
res = tf.io.parse_single_example(example_proto, feature_description)
return res['result'],res['index']
def __iter__(self):
return iter(self.reader)
if __name__ == '__main__':
rec = TFrecordCreator('temp.tfrecord')
for i in range(10):
rec.write(float(i),i)
rec.close()
reader = TFRecordLoader('temp.tfrecord')
for i in reader:
res,idx = i
print(res.numpy(),idx.numpy())
import os
os.remove('temp.tfrecord')
|
[
"os.remove",
"tensorflow.data.TFRecordDataset",
"tensorflow.train.Int64List",
"tensorflow.train.Features",
"tensorflow.io.parse_single_example",
"tensorflow.train.FloatList",
"tensorflow.io.FixedLenFeature",
"tensorflow.io.TFRecordWriter"
] |
[((2204, 2230), 'os.remove', 'os.remove', (['"""temp.tfrecord"""'], {}), "('temp.tfrecord')\n", (2213, 2230), False, 'import os\n'), ((201, 227), 'tensorflow.io.TFRecordWriter', 'tf.io.TFRecordWriter', (['path'], {}), '(path)\n', (221, 227), True, 'import tensorflow as tf\n'), ((1407, 1438), 'tensorflow.data.TFRecordDataset', 'tf.data.TFRecordDataset', (['[path]'], {}), '([path])\n', (1430, 1438), True, 'import tensorflow as tf\n'), ((1747, 1809), 'tensorflow.io.parse_single_example', 'tf.io.parse_single_example', (['example_proto', 'feature_description'], {}), '(example_proto, feature_description)\n', (1773, 1809), True, 'import tensorflow as tf\n'), ((1591, 1647), 'tensorflow.io.FixedLenFeature', 'tf.io.FixedLenFeature', (['[]', 'tf.float32'], {'default_value': '(0.0)'}), '([], tf.float32, default_value=0.0)\n', (1612, 1647), True, 'import tensorflow as tf\n'), ((1669, 1721), 'tensorflow.io.FixedLenFeature', 'tf.io.FixedLenFeature', (['[]', 'tf.int64'], {'default_value': '(0)'}), '([], tf.int64, default_value=0)\n', (1690, 1721), True, 'import tensorflow as tf\n'), ((381, 414), 'tensorflow.train.Int64List', 'tf.train.Int64List', ([], {'value': '[value]'}), '(value=[value])\n', (399, 414), True, 'import tensorflow as tf\n'), ((558, 591), 'tensorflow.train.FloatList', 'tf.train.FloatList', ([], {'value': '[value]'}), '(value=[value])\n', (576, 591), True, 'import tensorflow as tf\n'), ((929, 963), 'tensorflow.train.Features', 'tf.train.Features', ([], {'feature': 'feature'}), '(feature=feature)\n', (946, 963), True, 'import tensorflow as tf\n')]
|
import os
import logging
import sys
class WebAppModuleLoader:
folder = ""
blacklist = ['base_app.py', '__init__.py']
modules = []
logger = None
is_aggressive = False
module = None
def __init__(self, folder='../webapp', blacklist=[], is_aggressive=False, log_level=logging.INFO):
folder = os.path.join(os.path.dirname(__file__), '..', 'webapp')
self.blacklist.extend(blacklist)
self.folder = os.path.join(os.path.dirname(__file__), folder)
self.logger = logging.getLogger("Web App Scanner")
self.logger.setLevel(log_level)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(log_level)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
self.logger.addHandler(ch)
self.logger.debug("Loading WebApp scripts from %s" % self.folder )
self.is_aggressive = is_aggressive
def load(self, script):
sys.path.insert(0, os.path.dirname(__file__))
base = script.replace('.py', '')
try:
command_module = __import__("helios.webapp.%s" % base, fromlist=["helios.webapp"])
module = command_module.Scanner()
module.logger = self.logger
module.logger.setLevel(self.logger.getEffectiveLevel())
self.modules.append(module)
except ImportError as e:
self.logger.warning("Error importing script:%s %s" % (base, str(e)))
except Exception as e:
self.logger.warning("Error loading script:%s %s" % (base, str(e)))
def load_modules(self):
for f in os.listdir(self.folder):
if not f.endswith('.py'):
continue
if f in self.blacklist:
continue
self.load(f)
def run_scripts(self, base, headers={}, cookies={}, scope=None):
results = {}
for module in self.modules:
module.cookies = cookies
module.scope = scope
module.run(base)
results[module.name] = module.results
return results
|
[
"os.path.dirname",
"logging.StreamHandler",
"logging.Formatter",
"os.listdir",
"logging.getLogger"
] |
[((516, 552), 'logging.getLogger', 'logging.getLogger', (['"""Web App Scanner"""'], {}), "('Web App Scanner')\n", (533, 552), False, 'import logging\n'), ((606, 639), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (627, 639), False, 'import logging\n'), ((691, 764), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (708, 764), False, 'import logging\n'), ((1650, 1673), 'os.listdir', 'os.listdir', (['self.folder'], {}), '(self.folder)\n', (1660, 1673), False, 'import os\n'), ((340, 365), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (355, 365), False, 'import os\n'), ((459, 484), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (474, 484), False, 'import os\n'), ((1009, 1034), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1024, 1034), False, 'import os\n')]
|
from pydatanet.server import Server, ServerType
def onPacketSend(sender, data):
print(f"{sender.getIp()} sent me this!: {data}")
s = Server(
"localhost", 5050, ServerType.UDP
)
s.on_recv_bind(onPacketSend)
s.connect()
|
[
"pydatanet.server.Server"
] |
[((139, 180), 'pydatanet.server.Server', 'Server', (['"""localhost"""', '(5050)', 'ServerType.UDP'], {}), "('localhost', 5050, ServerType.UDP)\n", (145, 180), False, 'from pydatanet.server import Server, ServerType\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import unittest
import copy
import torch
# import modules to to register feature extractors
from fcos_core.modeling.backbone import build_backbone # NoQA
from fcos_core.modeling.roi_heads.roi_heads import build_roi_heads # NoQA
from fcos_core.modeling import registry
from fcos_core.structures.bounding_box import BoxList
from fcos_core.config import cfg as g_cfg
# from utils import load_config
from tests.utils import load_config
# overwrite configs if specified, otherwise default config is used
FEATURE_EXTRACTORS_CFGS = {}
# overwrite configs if specified, otherwise default config is used
FEATURE_EXTRACTORS_INPUT_CHANNELS = {
# in_channels was not used, load through config
"ResNet50Conv5ROIFeatureExtractor": 1024,
}
def _test_feature_extractors(
self, extractors, overwrite_cfgs, overwrite_in_channels
):
''' Make sure roi box feature extractors run '''
self.assertGreater(len(extractors), 0)
in_channels_default = 64
for name, builder in extractors.items():
print('Testing {}...'.format(name))
if name in overwrite_cfgs:
cfg = load_config(overwrite_cfgs[name])
else:
# Use default config if config file is not specified
cfg = copy.deepcopy(g_cfg)
in_channels = overwrite_in_channels.get(
name, in_channels_default)
fe = builder(cfg, in_channels)
self.assertIsNotNone(
getattr(fe, 'out_channels', None),
'Need to provide out_channels for feature extractor {}'.format(name)
)
N, C_in, H, W = 2, in_channels, 24, 32
input = torch.rand([N, C_in, H, W], dtype=torch.float32)
bboxes = [[1, 1, 10, 10], [5, 5, 8, 8], [2, 2, 3, 4]]
img_size = [384, 512]
box_list = BoxList(bboxes, img_size, "xyxy")
out = fe([input], [box_list] * N)
self.assertEqual(
out.shape[:2],
torch.Size([N * len(bboxes), fe.out_channels])
)
class TestFeatureExtractors(unittest.TestCase):
def test_roi_box_feature_extractors(self):
''' Make sure roi box feature extractors run '''
_test_feature_extractors(
self,
registry.ROI_BOX_FEATURE_EXTRACTORS,
FEATURE_EXTRACTORS_CFGS,
FEATURE_EXTRACTORS_INPUT_CHANNELS,
)
def test_roi_keypoints_feature_extractors(self):
''' Make sure roi keypoints feature extractors run '''
_test_feature_extractors(
self,
registry.ROI_KEYPOINT_FEATURE_EXTRACTORS,
FEATURE_EXTRACTORS_CFGS,
FEATURE_EXTRACTORS_INPUT_CHANNELS,
)
def test_roi_mask_feature_extractors(self):
''' Make sure roi mask feature extractors run '''
_test_feature_extractors(
self,
registry.ROI_MASK_FEATURE_EXTRACTORS,
FEATURE_EXTRACTORS_CFGS,
FEATURE_EXTRACTORS_INPUT_CHANNELS,
)
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"copy.deepcopy",
"tests.utils.load_config",
"fcos_core.structures.bounding_box.BoxList",
"torch.rand"
] |
[((3045, 3060), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3058, 3060), False, 'import unittest\n'), ((1685, 1733), 'torch.rand', 'torch.rand', (['[N, C_in, H, W]'], {'dtype': 'torch.float32'}), '([N, C_in, H, W], dtype=torch.float32)\n', (1695, 1733), False, 'import torch\n'), ((1845, 1878), 'fcos_core.structures.bounding_box.BoxList', 'BoxList', (['bboxes', 'img_size', '"""xyxy"""'], {}), "(bboxes, img_size, 'xyxy')\n", (1852, 1878), False, 'from fcos_core.structures.bounding_box import BoxList\n'), ((1172, 1205), 'tests.utils.load_config', 'load_config', (['overwrite_cfgs[name]'], {}), '(overwrite_cfgs[name])\n', (1183, 1205), False, 'from tests.utils import load_config\n'), ((1303, 1323), 'copy.deepcopy', 'copy.deepcopy', (['g_cfg'], {}), '(g_cfg)\n', (1316, 1323), False, 'import copy\n')]
|
"""
File system utils.
"""
import os
import sys
import errno
import shutil
import glob
import pwd
import codecs
import hashlib
import tarfile
import fnmatch
from datetime import datetime
from socket import gethostname
f_ext = os.path.splitext
f_expand = os.path.expanduser
f_size = os.path.getsize
is_file = os.path.isfile
is_dir = os.path.isdir
get_dir = os.path.dirname
def owner_name(filepath):
"""
Returns: file owner name, unix only
"""
return pwd.getpwuid(os.stat(filepath).st_uid).pw_name
def host_name():
"Get host name, alias with ``socket.gethostname()``"
return gethostname()
def host_id():
"""
Returns: first part of hostname up to '.'
"""
return host_name().split('.')[0]
def utf_open(fname, mode):
"""
Wrapper for codecs.open
"""
return codecs.open(fname, mode=mode, encoding='utf-8')
def is_txt(fpath):
"Test if file path is a text file"
_, ext = f_ext(fpath)
return ext == '.txt'
def f_exists(path):
return os.path.exists(f_expand(path))
def f_join(*fpaths):
"""
join file paths and expand special symbols like `~` for home dir
"""
return f_expand(os.path.join(*fpaths))
def f_mkdir(fpath):
"""
Recursively creates all the subdirs
If exist, do nothing.
"""
os.makedirs(f_expand(fpath), exist_ok=True)
def f_mkdir_in_path(fpath):
"""
fpath is a file,
recursively creates all the parent dirs that lead to the file
If exist, do nothing.
"""
os.makedirs(get_dir(f_expand(fpath)), exist_ok=True)
def f_last_part_in_path(fpath):
"""
https://stackoverflow.com/questions/3925096/how-to-get-only-the-last-part-of-a-path-in-python
"""
return os.path.basename(os.path.normpath(f_expand(fpath)))
def f_time(fpath):
"File modification time"
return str(os.path.getctime(fpath))
def f_append_before_ext(fpath, suffix):
"""
Append a suffix to file name and retain its extension
"""
name, ext = f_ext(fpath)
return name + suffix + ext
def f_add_ext(fpath, ext):
"""
Append an extension if not already there
Args:
ext: will add a preceding `.` if doesn't exist
"""
if not ext.startswith('.'):
ext = '.' + ext
if fpath.endswith(ext):
return fpath
else:
return fpath + ext
def f_remove(fpath):
"""
If exist, remove. Supports both dir and file. Supports glob wildcard.
"""
fpath = f_expand(fpath)
for f in glob.glob(fpath):
try:
shutil.rmtree(f)
except OSError as e:
if e.errno == errno.ENOTDIR:
try:
os.remove(f)
except: # final resort safeguard
pass
def f_copy(fsrc, fdst, exists_ok=False):
"""
Supports both dir and file. Supports glob wildcard.
"""
fsrc, fdst = f_expand(fsrc), f_expand(fdst)
for f in glob.glob(fsrc):
try:
f_copytree(f, fdst, exist_ok=exists_ok)
except OSError as e:
if e.errno == errno.ENOTDIR:
shutil.copy(f, fdst)
else:
raise
def _f_copytree(src, dst, symlinks=False,
ignore=None, exist_ok=True, copy_function=shutil.copy2,
ignore_dangling_symlinks=False):
"""Copied from python standard lib shutil.copytree
except that we allow exist_ok
Use f_copytree as entry
"""
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
os.makedirs(dst, exist_ok=exist_ok)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if os.path.islink(srcname):
linkto = os.readlink(srcname)
if symlinks:
# We can't just leave it to `copy_function` because legacy
# code with a custom `copy_function` may rely on copytree
# doing the right thing.
os.symlink(linkto, dstname)
shutil.copystat(srcname, dstname, follow_symlinks=not symlinks)
else:
# ignore dangling symlink if the flag is on
if not os.path.exists(linkto) and ignore_dangling_symlinks:
continue
# otherwise let the copy occurs. copy2 will raise an error
if os.path.isdir(srcname):
_f_copytree(srcname, dstname, symlinks, ignore,
exist_ok, copy_function)
else:
copy_function(srcname, dstname)
elif os.path.isdir(srcname):
_f_copytree(srcname, dstname, symlinks, ignore,
exist_ok, copy_function)
else:
# Will raise a SpecialFileError for unsupported file types
copy_function(srcname, dstname)
# catch the Error from the recursive copytree so that we can
# continue with other files
except shutil.Error as err:
errors.extend(err.args[0])
except OSError as why:
errors.append((srcname, dstname, str(why)))
try:
shutil.copystat(src, dst)
except OSError as why:
# Copying file access times may fail on Windows
if getattr(why, 'winerror', None) is None:
errors.append((src, dst, str(why)))
if errors:
raise shutil.Error(errors)
return dst
def _include_patterns(*patterns):
"""Factory function that can be used with copytree() ignore parameter.
Arguments define a sequence of glob-style patterns
that are used to specify what files to NOT ignore.
Creates and returns a function that determines this for each directory
in the file hierarchy rooted at the source directory when used with
shutil.copytree().
"""
def _ignore_patterns(path, names):
keep = set(name for pattern in patterns
for name in fnmatch.filter(names, pattern))
ignore = set(name for name in names
if name not in keep and
not os.path.isdir(os.path.join(path, name)))
return ignore
return _ignore_patterns
def f_copytree(src, dst, symlinks=False, ignore=None, include=None, exist_ok=False):
assert (ignore is None) or (include is None), \
'ignore= and include= are mutually exclusive'
if ignore:
ignore = shutil.ignore_patterns(*ignore)
elif include:
ignore = _include_patterns(*include)
_f_copytree(src, dst, ignore=ignore, symlinks=symlinks, exist_ok=exist_ok)
def f_move(fsrc, fdst):
fsrc, fdst = f_expand(fsrc), f_expand(fdst)
for f in glob.glob(fsrc):
shutil.move(f, fdst)
def f_split_path(fpath, normpath=True):
"""
Splits path into a list of its component folders
Args:
normpath: call os.path.normpath to remove redundant '/' and
up-level references like ".."
"""
if normpath:
fpath = os.path.normpath(fpath)
allparts = []
while 1:
parts = os.path.split(fpath)
if parts[0] == fpath: # sentinel for absolute paths
allparts.insert(0, parts[0])
break
elif parts[1] == fpath: # sentinel for relative paths
allparts.insert(0, parts[1])
break
else:
fpath = parts[0]
allparts.insert(0, parts[1])
return allparts
def script_dir():
"""
Returns: the dir of current script
"""
return os.path.dirname(os.path.realpath(sys.argv[0]))
def parent_dir(location, abspath=False):
"""
Args:
location: current directory or file
Returns:
parent directory absolute or relative path
"""
_path = os.path.abspath if abspath else os.path.relpath
return _path(f_join(location, os.pardir))
def md5_checksum(fpath):
"""
File md5 signature
"""
hash_md5 = hashlib.md5()
with open(f_expand(fpath), "rb") as f:
for chunk in iter(lambda: f.read(65536), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def make_tar(source_file, output_tarball, compress_mode='gz'):
"""
Args:
source_file: source file or folder
output_tarball: output tar file name
compress_mode: "gz", "bz2", "xz" or "" (empty for uncompressed write)
"""
source_file, output_tarball = f_expand(source_file), f_expand(output_tarball)
assert compress_mode in ['gz', 'bz2', 'xz', '']
with tarfile.open(output_tarball, 'w:'+compress_mode) as tar:
tar.add(source_file, arcname=os.path.basename(source_file))
def extract_tar(source_tarball, output_dir='.', members=None):
"""
Args:
source_tarball: extract members from archive
output_dir: default to current working dir
members: must be a subset of the list returned by getmembers()
"""
source_tarball, output_dir = f_expand(source_tarball), f_expand(output_dir)
with tarfile.open(source_tarball, 'r:*') as tar:
tar.extractall(output_dir, members=members)
def move_with_backup(path, suffix='.bak'):
"""
Ensures that a path is not occupied. If there is a file, rename it by
adding @suffix. Resursively backs up everything.
Args:
path: file path to clear
suffix: Add to backed up files (default: {'.bak'})
"""
path = str(path)
if os.path.exists(path):
move_with_backup(path + suffix)
shutil.move(path, path + suffix)
def insert_before_ext(name, insert):
"""
log.txt -> log.ep50.txt
"""
name, ext = os.path.splitext(name)
return name + insert + ext
def timestamp_file_name(fname):
timestr = datetime.now().strftime('_%H-%M-%S_%m-%d-%y')
return insert_before_ext(fname, timestr)
|
[
"shutil.ignore_patterns",
"os.remove",
"shutil.copystat",
"os.path.islink",
"glob.glob",
"shutil.rmtree",
"os.path.join",
"shutil.copy",
"codecs.open",
"os.path.exists",
"socket.gethostname",
"os.path.normpath",
"tarfile.open",
"os.path.getctime",
"datetime.datetime.now",
"hashlib.md5",
"shutil.Error",
"os.stat",
"os.path.basename",
"os.path.realpath",
"os.listdir",
"fnmatch.filter",
"os.makedirs",
"os.readlink",
"os.path.isdir",
"os.path.splitext",
"shutil.move",
"os.symlink",
"os.path.split"
] |
[((607, 620), 'socket.gethostname', 'gethostname', ([], {}), '()\n', (618, 620), False, 'from socket import gethostname\n'), ((821, 868), 'codecs.open', 'codecs.open', (['fname'], {'mode': 'mode', 'encoding': '"""utf-8"""'}), "(fname, mode=mode, encoding='utf-8')\n", (832, 868), False, 'import codecs\n'), ((2489, 2505), 'glob.glob', 'glob.glob', (['fpath'], {}), '(fpath)\n', (2498, 2505), False, 'import glob\n'), ((2923, 2938), 'glob.glob', 'glob.glob', (['fsrc'], {}), '(fsrc)\n', (2932, 2938), False, 'import glob\n'), ((3452, 3467), 'os.listdir', 'os.listdir', (['src'], {}), '(src)\n', (3462, 3467), False, 'import os\n'), ((3583, 3618), 'os.makedirs', 'os.makedirs', (['dst'], {'exist_ok': 'exist_ok'}), '(dst, exist_ok=exist_ok)\n', (3594, 3618), False, 'import os\n'), ((6895, 6910), 'glob.glob', 'glob.glob', (['fsrc'], {}), '(fsrc)\n', (6904, 6910), False, 'import glob\n'), ((8140, 8153), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (8151, 8153), False, 'import hashlib\n'), ((9615, 9635), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (9629, 9635), False, 'import os\n'), ((9817, 9839), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (9833, 9839), False, 'import os\n'), ((1172, 1193), 'os.path.join', 'os.path.join', (['*fpaths'], {}), '(*fpaths)\n', (1184, 1193), False, 'import os\n'), ((1839, 1862), 'os.path.getctime', 'os.path.getctime', (['fpath'], {}), '(fpath)\n', (1855, 1862), False, 'import os\n'), ((3731, 3754), 'os.path.join', 'os.path.join', (['src', 'name'], {}), '(src, name)\n', (3743, 3754), False, 'import os\n'), ((3773, 3796), 'os.path.join', 'os.path.join', (['dst', 'name'], {}), '(dst, name)\n', (3785, 3796), False, 'import os\n'), ((5380, 5405), 'shutil.copystat', 'shutil.copystat', (['src', 'dst'], {}), '(src, dst)\n', (5395, 5405), False, 'import shutil\n'), ((5617, 5637), 'shutil.Error', 'shutil.Error', (['errors'], {}), '(errors)\n', (5629, 5637), False, 'import shutil\n'), ((6634, 6665), 'shutil.ignore_patterns', 'shutil.ignore_patterns', (['*ignore'], {}), '(*ignore)\n', (6656, 6665), False, 'import shutil\n'), ((6920, 6940), 'shutil.move', 'shutil.move', (['f', 'fdst'], {}), '(f, fdst)\n', (6931, 6940), False, 'import shutil\n'), ((7206, 7229), 'os.path.normpath', 'os.path.normpath', (['fpath'], {}), '(fpath)\n', (7222, 7229), False, 'import os\n'), ((7277, 7297), 'os.path.split', 'os.path.split', (['fpath'], {}), '(fpath)\n', (7290, 7297), False, 'import os\n'), ((7746, 7775), 'os.path.realpath', 'os.path.realpath', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (7762, 7775), False, 'import os\n'), ((8719, 8769), 'tarfile.open', 'tarfile.open', (['output_tarball', "('w:' + compress_mode)"], {}), "(output_tarball, 'w:' + compress_mode)\n", (8731, 8769), False, 'import tarfile\n'), ((9199, 9234), 'tarfile.open', 'tarfile.open', (['source_tarball', '"""r:*"""'], {}), "(source_tarball, 'r:*')\n", (9211, 9234), False, 'import tarfile\n'), ((9685, 9717), 'shutil.move', 'shutil.move', (['path', '(path + suffix)'], {}), '(path, path + suffix)\n', (9696, 9717), False, 'import shutil\n'), ((2532, 2548), 'shutil.rmtree', 'shutil.rmtree', (['f'], {}), '(f)\n', (2545, 2548), False, 'import shutil\n'), ((3825, 3848), 'os.path.islink', 'os.path.islink', (['srcname'], {}), '(srcname)\n', (3839, 3848), False, 'import os\n'), ((9919, 9933), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9931, 9933), False, 'from datetime import datetime\n'), ((486, 503), 'os.stat', 'os.stat', (['filepath'], {}), '(filepath)\n', (493, 503), False, 'import os\n'), ((3875, 3895), 'os.readlink', 'os.readlink', (['srcname'], {}), '(srcname)\n', (3886, 3895), False, 'import os\n'), ((4815, 4837), 'os.path.isdir', 'os.path.isdir', (['srcname'], {}), '(srcname)\n', (4828, 4837), False, 'import os\n'), ((8813, 8842), 'os.path.basename', 'os.path.basename', (['source_file'], {}), '(source_file)\n', (8829, 8842), False, 'import os\n'), ((3091, 3111), 'shutil.copy', 'shutil.copy', (['f', 'fdst'], {}), '(f, fdst)\n', (3102, 3111), False, 'import shutil\n'), ((4147, 4174), 'os.symlink', 'os.symlink', (['linkto', 'dstname'], {}), '(linkto, dstname)\n', (4157, 4174), False, 'import os\n'), ((4195, 4258), 'shutil.copystat', 'shutil.copystat', (['srcname', 'dstname'], {'follow_symlinks': '(not symlinks)'}), '(srcname, dstname, follow_symlinks=not symlinks)\n', (4210, 4258), False, 'import shutil\n'), ((4560, 4582), 'os.path.isdir', 'os.path.isdir', (['srcname'], {}), '(srcname)\n', (4573, 4582), False, 'import os\n'), ((6171, 6201), 'fnmatch.filter', 'fnmatch.filter', (['names', 'pattern'], {}), '(names, pattern)\n', (6185, 6201), False, 'import fnmatch\n'), ((2660, 2672), 'os.remove', 'os.remove', (['f'], {}), '(f)\n', (2669, 2672), False, 'import os\n'), ((4372, 4394), 'os.path.exists', 'os.path.exists', (['linkto'], {}), '(linkto)\n', (4386, 4394), False, 'import os\n'), ((6331, 6355), 'os.path.join', 'os.path.join', (['path', 'name'], {}), '(path, name)\n', (6343, 6355), False, 'import os\n')]
|
#!/usr/bin/env python3
import math
from sys import path
path.append("..")
from utils.convert import numparse
import pygal
from pint import UnitRegistry
if __name__ == "__main__":
ureg = UnitRegistry()
length_datapoints = []
with open("../data/distances.txt", "r") as f:
for line in f.readlines():
dist_in = ureg.Quantity(numparse(line), ureg.inches)
length_datapoints.append(dist_in.to(ureg.meters))
expected = ureg.Quantity(1, ureg.meters)
err_m = [x.magnitude for x in length_datapoints]
avg_m = sum(err_m) / len(err_m)
offset_meter_data = [x - avg_m for x in err_m]
boxplot = pygal.Box(legend_at_bottom=True)
boxplot.add("linear goal error (meters, shifted to average)", offset_meter_data)
boxplot.render_to_file("distances.svg")
angular_datapoints = []
with open("../data/rotation_distances.txt", "r") as f:
for line in f.readlines():
dist_in = numparse(line)
# wheel distance from robot center is 20cm
# so angle of rotation, given these measurements as distance between two wheel positions,
# becomes 2 * arcsin((l/2)/20)
angular_datapoints.append(
ureg.Quantity(2 * math.asin((dist_in / 2) / 20), ureg.radians).to(
ureg.degrees
)
)
expected = ureg.Quantity(20, ureg.degrees)
err_d = [x.magnitude for x in angular_datapoints]
avg_d = sum(err_d) / len(err_d)
offset_angle_data = [x - avg_d for x in err_d]
boxplot = pygal.Box(legend_at_bottom=True)
boxplot.add("angular goal error (degrees, shifted to average)", offset_angle_data)
boxplot.render_to_file("rotation_distances.svg")
|
[
"sys.path.append",
"math.asin",
"pint.UnitRegistry",
"utils.convert.numparse",
"pygal.Box"
] |
[((57, 74), 'sys.path.append', 'path.append', (['""".."""'], {}), "('..')\n", (68, 74), False, 'from sys import path\n'), ((193, 207), 'pint.UnitRegistry', 'UnitRegistry', ([], {}), '()\n', (205, 207), False, 'from pint import UnitRegistry\n'), ((646, 678), 'pygal.Box', 'pygal.Box', ([], {'legend_at_bottom': '(True)'}), '(legend_at_bottom=True)\n', (655, 678), False, 'import pygal\n'), ((1557, 1589), 'pygal.Box', 'pygal.Box', ([], {'legend_at_bottom': '(True)'}), '(legend_at_bottom=True)\n', (1566, 1589), False, 'import pygal\n'), ((953, 967), 'utils.convert.numparse', 'numparse', (['line'], {}), '(line)\n', (961, 967), False, 'from utils.convert import numparse\n'), ((356, 370), 'utils.convert.numparse', 'numparse', (['line'], {}), '(line)\n', (364, 370), False, 'from utils.convert import numparse\n'), ((1241, 1268), 'math.asin', 'math.asin', (['(dist_in / 2 / 20)'], {}), '(dist_in / 2 / 20)\n', (1250, 1268), False, 'import math\n')]
|
#!/bin/python3
from __future__ import print_function
from functools import wraps
import os, sys, imp, random, string, re, errno, threading, time, queue
GOAL = 100 # goal score for Hog
STRATEGY_FUNC_ATTR = 'final_strategy' # attribute of student's module that contains the strategy function
TEAM_NAME_ATTRS = ['PLAYER_NAME', 'TEAM_NAME'] # allowed attributes of student's module that contains the team name
TEAM_NAME_MAX_LEN = 100 # max length for team names (set to 0 to remove limit)
DEF_EMPTY_TEAM_NAME = "<no name given, email starts with {}>" # name for teams with an empty team name
MIN_ROLLS, MAX_ROLLS = 0, 10 # min, max roll numbers
ERROR_DEFAULT_ROLL = 5 # default roll in case of invalid strategy function (will report error)
TIMEOUT_SECONDS = 45 # max time a student's submission should run
count, out_dir, out_sw = 0, '', False
# dict of names, used to check for duplicate team names
output_names = {}
empty_name_teams = []
# print to stderr
def eprint(*args, **kwargs):
""" print to stderr """
print(*args, file=sys.stderr, **kwargs)
class Worker(threading.Thread):
def __init__ (self, func, args, kwargs, q):
threading.Thread.__init__ (self)
self.func = func
self.args = args
self.kwargs = kwargs
self.q = q
self.setDaemon (True)
def run (self):
self.q.put((True, self.func(*self.args, **self.kwargs)))
class Timer(threading.Thread):
def __init__ (self, timeout, error_message, worker, q):
threading.Thread.__init__ (self)
self.timeout = timeout
self.error_message = error_message
self.worker = worker
self.q = q
self.setDaemon (True)
def run (self):
time.sleep(self.timeout)
self.q.put((False, None))
def timeout(seconds=10, error_message=os.strerror(errno.ETIME)):
""" makes function error if ran for too long """
def decorator(func):
def wrapper(*args, **kwargs):
q = queue.Queue()
worker = Worker(func, args, kwargs, q)
timer = Timer(seconds, error_message, worker, q)
worker.start()
timer.start()
code, result = q.get()
if worker.isAlive():
del worker
if timer.isAlive():
del timer
if code:
return result
else:
print("ERROR: Conversion timed out (> {} s) for: ".format(TIMEOUT_SECONDS) + args[0])
return wraps(func)(wrapper)
return decorator
@timeout(TIMEOUT_SECONDS)
def convert(file):
""" convert a single file """
import shutil, os, sys
sys.setrecursionlimit(200000)
module_path = file[:-3] # cut off .py
module_dir, module_name = os.path.split(module_path)
# make sure module's dependencies work
sys.path[-1] = module_dir
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
# import module
try:
module = imp.load_source(module_name, file)
# try to prevent use of dangerous libraries, although not going to be possible really
module.subprocess = module.shutil = "trolled"
if hasattr(module, "os"):
module.os.rmdir = module.os.remove = "trolled"
except Exception as e:
# report errors while importing
eprint ("\nERROR: error occurred while loading " + file + ":")
eprint (type(e).__name__ + ': ' + str(e))
eprint ("skipping...\n")
return
if hasattr(module, STRATEGY_FUNC_ATTR):
strat = getattr(module, STRATEGY_FUNC_ATTR)
else:
eprint ("ERROR: " + file + " has no attribute " + STRATEGY_FUNC_ATTR + " , skipping...")
del module
return
output_name = ""
for attr in TEAM_NAME_ATTRS:
if hasattr(module, attr):
val = str(getattr(module, attr))
if val:
output_name = getattr(module, attr)
setattr(module, attr, "")
if not output_name:
eprint ("WARNING: submission " + file + " has no team name. Using default name...")
module_dir_name = ""
if '/' in module_dir:
module_dir_name = module_dir[module_dir.index('/')+1:]
elif '\\' in module_dir:
module_dir_name = module_dir[module_dir.index('\\')+1:]
if not module_dir_name: module_dir_name = module_name
output_name = DEF_EMPTY_TEAM_NAME.format(module_dir_name[0])
empty_name_teams.append(module_dir_name)
# check for team names that are too long
if len(output_name) > TEAM_NAME_MAX_LEN and TEAM_NAME_MAX_LEN > 0:
eprint ("WARNING: " + file + " has a team name longer than " + str(TEAM_NAME_MAX_LEN) +
" chars. Truncating...")
output_name = output_name[:TEAM_NAME_MAX_LEN-3] + "..."
# check for duplicate team names
strat_name = re.sub(r"[\r\n]", "", output_name)
try:
output_name = output_name.encode('ascii','ignore').decode('ascii')
output_name = re.sub(r"[\\/:*?""<>|+=,\r\n]", "", output_name)
except:
output_name = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(12))
if output_name in output_names:
full_output_name = output_name + "_" + str(output_names[output_name]) + '.strat'
strat_name += "_" + str(output_names[output_name])
output_names[output_name] += 1
eprint("WARNING: found multiple teams with name", output_name + ". Writing to output file",
full_output_name, "instead to disambiguate...")
else:
output_names[output_name] = 1
full_output_name = output_name + '.strat'
# make sure output directories exist
if out_dir:
try:
os.makedirs(out_dir)
except:
pass
full_output_name = full_output_name.replace('\"', '')
out = open(os.path.join(out_dir, full_output_name), 'w', encoding='utf-8')
# write out new strategy
out.write('strategy ' + strat_name + '\n')
nerror = 0
errname = ""
for i in range(GOAL):
for j in range(GOAL):
if j: out.write(' ')
try:
rolls = strat(i, j)
# check if output valid
if type(rolls) != int or rolls < MIN_ROLLS or rolls > MAX_ROLLS:
if type(rolls) != int:
errname = "WARNING: team " + strat_name + "'s strategy function outputted something other than a number!"
else:
errname = "WARNING: team " + strat_name + "'s strategy function outputted an invalid number of rolls:" + str(rolls) + "!"
nerror+=1
rolls = ERROR_DEFAULT_ROLL
out.write(str(rolls))
except Exception as e:
# report errors while running strategy
nerror += 1
errname = type(e).__name__ + " " + str(e)
out.write(str(ERROR_DEFAULT_ROLL))
out.write('\n')
if nerror:
eprint ("\nERROR: " + str(nerror) + " error(s) occurred while running " + STRATEGY_FUNC_ATTR + ' for ' + strat_name + '(' + file + "):")
eprint (errname)
out.flush()
out.close()
print (">> converted: " + strat_name + " (" + file + ")")
del module
global count
count += 1 # counting how many converted
def convert_dir(dir = os.path.dirname(__file__)):
""" convert all files in a directory (does not recurse) """
for file in os.listdir(dir or None):
path = os.path.join(dir, file)
if os.path.isdir(path):
convert_dir(path)
elif file == '__init__.py' or file == __file__ or file[-3:] != '.py': continue
else: convert(path)
# add an empty entry to sys.path so that we can add dependencies for each student module
sys.path.append('')
print ('')
for i in range(1, len(sys.argv)):
path = sys.argv[i]
if out_sw:
out_sw = False
out_dir = path
continue
if path == '-o':
out_sw = True
continue
if os.path.exists(path):
if os.path.isdir(path):
convert_dir(path)
else:
convert(path)
else:
eprint ("ERROR: can't access " + path + ", skipping...")
if len(sys.argv) <= 1:
print ("""usage: python3 hogconv.py [-o output_dir] [file1] [file2] ...\n
Converts each Python Hog strategy to a .strat (space-separated matrix) file that may then be imported into Bacon.
Saves resulting files to the current directory by default. Use -o to specify a different directory.\n""")
else:
print ("\nconverted a total of " + str(count) + (" strategies." if count != 1 else " strategy."))
print ("\ntips: run 'bacon -i -f " + (out_dir + "/" if out_dir else "") + "*.strat' in bash to import the converted strategies into hog.")
print ("in powershell: 'bacon -i -f (ls " + (out_dir + "\\" if out_dir else "") + "*.strat | % FullName)'\n")
print ("after strategies have been imported, run 'bacon -t [num_threads] [-f output_path]' to run tournament.")
print ("to clear all imported strategies, use 'bacon -rm all'.")
if empty_name_teams:
print("WARNING: some teams " + str(empty_name_teams) + " did not specify team names in their submissions!")
|
[
"sys.path.append",
"queue.Queue",
"threading.Thread.__init__",
"os.path.join",
"os.makedirs",
"os.path.isdir",
"os.path.dirname",
"os.path.realpath",
"os.path.exists",
"random.choice",
"time.sleep",
"imp.load_source",
"os.strerror",
"functools.wraps",
"sys.setrecursionlimit",
"re.sub",
"os.listdir",
"os.path.split"
] |
[((7965, 7984), 'sys.path.append', 'sys.path.append', (['""""""'], {}), "('')\n", (7980, 7984), False, 'import shutil, os, sys\n'), ((1821, 1845), 'os.strerror', 'os.strerror', (['errno.ETIME'], {}), '(errno.ETIME)\n', (1832, 1845), False, 'import shutil, os, sys\n'), ((2655, 2684), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(200000)'], {}), '(200000)\n', (2676, 2684), False, 'import shutil, os, sys\n'), ((2762, 2788), 'os.path.split', 'os.path.split', (['module_path'], {}), '(module_path)\n', (2775, 2788), False, 'import shutil, os, sys\n'), ((4906, 4941), 're.sub', 're.sub', (['"""[\\\\r\\\\n]"""', '""""""', 'output_name'], {}), "('[\\\\r\\\\n]', '', output_name)\n", (4912, 4941), False, 'import os, sys, imp, random, string, re, errno, threading, time, queue\n'), ((7518, 7543), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (7533, 7543), False, 'import shutil, os, sys\n'), ((7626, 7649), 'os.listdir', 'os.listdir', (['(dir or None)'], {}), '(dir or None)\n', (7636, 7649), False, 'import shutil, os, sys\n'), ((8223, 8243), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (8237, 8243), False, 'import shutil, os, sys\n'), ((1153, 1184), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (1178, 1184), False, 'import os, sys, imp, random, string, re, errno, threading, time, queue\n'), ((1503, 1534), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (1528, 1534), False, 'import os, sys, imp, random, string, re, errno, threading, time, queue\n'), ((1716, 1740), 'time.sleep', 'time.sleep', (['self.timeout'], {}), '(self.timeout)\n', (1726, 1740), False, 'import os, sys, imp, random, string, re, errno, threading, time, queue\n'), ((2983, 3017), 'imp.load_source', 'imp.load_source', (['module_name', 'file'], {}), '(module_name, file)\n', (2998, 3017), False, 'import os, sys, imp, random, string, re, errno, threading, time, queue\n'), ((5047, 5094), 're.sub', 're.sub', (["'[\\\\\\\\/:*?<>|+=,\\r\\n]'", '""""""', 'output_name'], {}), "('[\\\\\\\\/:*?<>|+=,\\r\\n]', '', output_name)\n", (5053, 5094), False, 'import os, sys, imp, random, string, re, errno, threading, time, queue\n'), ((5922, 5961), 'os.path.join', 'os.path.join', (['out_dir', 'full_output_name'], {}), '(out_dir, full_output_name)\n', (5934, 5961), False, 'import shutil, os, sys\n'), ((7666, 7689), 'os.path.join', 'os.path.join', (['dir', 'file'], {}), '(dir, file)\n', (7678, 7689), False, 'import shutil, os, sys\n'), ((7701, 7720), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (7714, 7720), False, 'import shutil, os, sys\n'), ((8256, 8275), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (8269, 8275), False, 'import shutil, os, sys\n'), ((1980, 1993), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (1991, 1993), False, 'import os, sys, imp, random, string, re, errno, threading, time, queue\n'), ((2499, 2510), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (2504, 2510), False, 'from functools import wraps\n'), ((2903, 2929), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (2919, 2929), False, 'import shutil, os, sys\n'), ((5790, 5810), 'os.makedirs', 'os.makedirs', (['out_dir'], {}), '(out_dir)\n', (5801, 5810), False, 'import shutil, os, sys\n'), ((5138, 5191), 'random.choice', 'random.choice', (['(string.ascii_uppercase + string.digits)'], {}), '(string.ascii_uppercase + string.digits)\n', (5151, 5191), False, 'import os, sys, imp, random, string, re, errno, threading, time, queue\n')]
|
import time
def time_it(func):
# courtesy of <NAME>!
def wrapper(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
end = time.time()
print(func.__name__ + ' took ' + str((end - start) * 1000) + ' ms')
return result
return wrapper
|
[
"time.time"
] |
[((108, 119), 'time.time', 'time.time', ([], {}), '()\n', (117, 119), False, 'import time\n'), ((173, 184), 'time.time', 'time.time', ([], {}), '()\n', (182, 184), False, 'import time\n')]
|
import time
start = time.perf_counter()
import io
import os
import sys
from karl import KarlSDK
from datetime import datetime
karl = KarlSDK()
import torch, torchvision
from torchvision import transforms
from PIL import Image, ImageDraw
sys.stderr.write('imports\t\t%.3fs\n' % (time.perf_counter() - start))
torch.hub.set_dir('torch')
model = torchvision.models.detection.maskrcnn_resnet50_fpn(pretrained=True)
model.eval()
sys.stderr.write('init model \t%.3fs\n' % (time.perf_counter() - start))
img_path = 'tmp.png'
with open(img_path, 'wb') as f:
img_bytes = karl.get_triggered()
f.write(img_bytes)
sys.stderr.write('read img \t%.3fs (%s)\n' % (time.perf_counter() - start, img_path))
img = Image.open(img_path).convert("RGB")
img_tensor = transforms.ToTensor()(img)
sys.stderr.write('init img \t%.3fs (%s)\n' % (time.perf_counter() - start, img_path))
with torch.no_grad():
output = model([img_tensor])[0]
sys.stderr.write('inference \t%.3fs\n' % (time.perf_counter() - start))
# sys.stderr.write('{}\n'.format(output['boxes']))
# sys.stderr.write('{}\n'.format(output['labels']))
# sys.stderr.write('{}\n'.format(output['scores']))
# sys.stderr.write('Filtering by confidence > 0.6 and label == 1 (person)\n')
boxes = output['boxes'].tolist()
boxes_filtered = []
for i in range(len(output['labels'])):
if output['scores'][i] > 0.6 and output['labels'][i] == 1:
boxes_filtered.append(boxes[i])
draw = ImageDraw.Draw(img)
for box in boxes_filtered:
print(box)
draw.rectangle(box, outline="yellow", width=4)
img_byte_arr = io.BytesIO()
img.save(img_byte_arr, format='PNG')
img_byte_arr = img_byte_arr.getvalue()
sys.stderr.write('prepare results \t%.3fs\n' % (time.perf_counter() - start))
karl.push("box", img_byte_arr)
sys.stderr.write('send box \t%.3fs\n' % (time.perf_counter() - start))
karl.push("all_count", bytes([len(boxes)]))
karl.push("count", bytes([len(boxes_filtered)]))
sys.stderr.write('log output \t%.3fs\n' % (time.perf_counter() - start))
|
[
"io.BytesIO",
"karl.KarlSDK",
"time.perf_counter",
"PIL.Image.open",
"torchvision.models.detection.maskrcnn_resnet50_fpn",
"PIL.ImageDraw.Draw",
"torch.hub.set_dir",
"torch.no_grad",
"torchvision.transforms.ToTensor"
] |
[((20, 39), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (37, 39), False, 'import time\n'), ((134, 143), 'karl.KarlSDK', 'KarlSDK', ([], {}), '()\n', (141, 143), False, 'from karl import KarlSDK\n'), ((311, 337), 'torch.hub.set_dir', 'torch.hub.set_dir', (['"""torch"""'], {}), "('torch')\n", (328, 337), False, 'import torch, torchvision\n'), ((346, 413), 'torchvision.models.detection.maskrcnn_resnet50_fpn', 'torchvision.models.detection.maskrcnn_resnet50_fpn', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (396, 413), False, 'import torch, torchvision\n'), ((1437, 1456), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['img'], {}), '(img)\n', (1451, 1456), False, 'from PIL import Image, ImageDraw\n'), ((1565, 1577), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (1575, 1577), False, 'import io\n'), ((755, 776), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (774, 776), False, 'from torchvision import transforms\n'), ((874, 889), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (887, 889), False, 'import torch, torchvision\n'), ((706, 726), 'PIL.Image.open', 'Image.open', (['img_path'], {}), '(img_path)\n', (716, 726), False, 'from PIL import Image, ImageDraw\n'), ((280, 299), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (297, 299), False, 'import time\n'), ((470, 489), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (487, 489), False, 'import time\n'), ((969, 988), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (986, 988), False, 'import time\n'), ((1702, 1721), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (1719, 1721), False, 'import time\n'), ((1805, 1824), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (1822, 1824), False, 'import time\n'), ((1971, 1990), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (1988, 1990), False, 'import time\n'), ((660, 679), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (677, 679), False, 'import time\n'), ((828, 847), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (845, 847), False, 'import time\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from nailgun.errors import errors
@six.add_metaclass(abc.ABCMeta)
class MergePolicy(object):
"""Policy to merge attributes of plugins."""
@abc.abstractmethod
def apply_patch(self, target, patch):
"""Applies patch to the target.
:param target: the origin object, the target can be modified.
:param patch: the modifications for merging with original
:return: the patched object.
"""
class NetworkRoleMergePolicy(MergePolicy):
"""Policy for merging network roles."""
def __init__(self):
self.rules = {'vip': NetworkRoleMergePolicy._patch_vips}
@staticmethod
def _patch_vips(target, patch):
"""Patches VIP attribute for network role.
:param: target: the origin VIPs.
:type target: list
:param patch: the VIPs, that will be added to origin
:type patch: list
:return: the patched VIPs
"""
seen = dict((vip['name'], vip) for vip in target)
# This is a workaround for 8.0 release to allow
# moving controllers to different racks. Will be
# replaced by a different solution in 9.0
if len(patch) == 0:
return []
# END OF WORKAROUND
for vip in patch:
if vip['name'] in seen:
if vip != seen[vip['name']]:
raise ValueError(
"VIP '{0}' conflicts with existing one"
.format(vip['name'])
)
else:
seen[vip['name']] = vip
target.append(vip)
return target
def apply_patch(self, target, patch):
"""Tries to apply patch to the target.
Conflicts will be resolved according to the
predefined rules.
:param target: the origin network role
:type target: dict
:param patch: the modifications for merging with origin
:type patch: dict
:raises: errors.UnresolvableConflict
"""
target_props = target['properties']
patch_props = patch['properties']
conflict = set(target_props) & set(patch_props)
mergeable = set(self.rules)
# Exclude fields that has same value
for name in (conflict - mergeable):
if target_props[name] != patch_props[name]:
raise errors.UnresolvableConflict(
"Cannot apply patch for attribute {0}: conflict"
.format(name)
)
conflict.remove(name)
for name in conflict:
try:
target_props[name] = self.rules[name](
target_props[name],
patch_props[name]
)
except Exception as e:
raise errors.UnresolvableConflict(
"Cannot apply patch for attribute {0}: {1}"
.format(name, e)
)
|
[
"six.add_metaclass"
] |
[((695, 725), 'six.add_metaclass', 'six.add_metaclass', (['abc.ABCMeta'], {}), '(abc.ABCMeta)\n', (712, 725), False, 'import six\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from unittest.mock import MagicMock
from sqlalchemy import column
from superset.db_engine_specs.elasticsearch import (
ElasticSearchEngineSpec,
OpenDistroEngineSpec,
)
from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec
class TestElasticSearchDbEngineSpec(TestDbEngineSpec):
def test_convert_dttm(self):
dttm = self.get_dttm()
self.assertEqual(
ElasticSearchEngineSpec.convert_dttm("DATETIME", dttm),
"CAST('2019-01-02T03:04:05' AS DATETIME)",
)
def test_opendistro_convert_dttm(self):
"""
DB Eng Specs (opendistro): Test convert_dttm
"""
dttm = self.get_dttm()
self.assertEqual(
OpenDistroEngineSpec.convert_dttm("DATETIME", dttm),
"'2019-01-02T03:04:05'",
)
def test_opendistro_sqla_column_label(self):
"""
DB Eng Specs (opendistro): Test column label
"""
test_cases = {
"Col": "Col",
"Col.keyword": "Col_keyword",
}
for original, expected in test_cases.items():
actual = OpenDistroEngineSpec.make_label_compatible(column(original).name)
self.assertEqual(actual, expected)
def test_opendistro_strip_comments(self):
"""
DB Eng Specs (opendistro): Test execute sql strip comments
"""
mock_cursor = MagicMock()
mock_cursor.execute.return_value = []
OpenDistroEngineSpec.execute(
mock_cursor, "-- some comment \nSELECT 1\n --other comment"
)
mock_cursor.execute.assert_called_once_with("SELECT 1\n")
|
[
"superset.db_engine_specs.elasticsearch.ElasticSearchEngineSpec.convert_dttm",
"unittest.mock.MagicMock",
"superset.db_engine_specs.elasticsearch.OpenDistroEngineSpec.convert_dttm",
"superset.db_engine_specs.elasticsearch.OpenDistroEngineSpec.execute",
"sqlalchemy.column"
] |
[((2192, 2203), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2201, 2203), False, 'from unittest.mock import MagicMock\n'), ((2259, 2354), 'superset.db_engine_specs.elasticsearch.OpenDistroEngineSpec.execute', 'OpenDistroEngineSpec.execute', (['mock_cursor', '"""-- some comment \nSELECT 1\n --other comment"""'], {}), '(mock_cursor,\n """-- some comment \nSELECT 1\n --other comment""")\n', (2287, 2354), False, 'from superset.db_engine_specs.elasticsearch import ElasticSearchEngineSpec, OpenDistroEngineSpec\n'), ((1203, 1257), 'superset.db_engine_specs.elasticsearch.ElasticSearchEngineSpec.convert_dttm', 'ElasticSearchEngineSpec.convert_dttm', (['"""DATETIME"""', 'dttm'], {}), "('DATETIME', dttm)\n", (1239, 1257), False, 'from superset.db_engine_specs.elasticsearch import ElasticSearchEngineSpec, OpenDistroEngineSpec\n'), ((1516, 1567), 'superset.db_engine_specs.elasticsearch.OpenDistroEngineSpec.convert_dttm', 'OpenDistroEngineSpec.convert_dttm', (['"""DATETIME"""', 'dttm'], {}), "('DATETIME', dttm)\n", (1549, 1567), False, 'from superset.db_engine_specs.elasticsearch import ElasticSearchEngineSpec, OpenDistroEngineSpec\n'), ((1962, 1978), 'sqlalchemy.column', 'column', (['original'], {}), '(original)\n', (1968, 1978), False, 'from sqlalchemy import column\n')]
|
"""
Lines 5 and 6 were adapted from SO code:
http://stackoverflow.com/questions/4383571/importing-files-from-different-folder-in-python
"""
import sys
sys.path.insert(0, '..')
""" END """
import main as program
import pytest
def test_for_i_2():
assert program.span_without_basis(2) == program.binary_matrix([
[1,1]
])
def test_for_i_3():
assert program.span_without_basis(3) == program.binary_matrix([
[1,1,0],
[1,0,1],
[0,1,1],
[1,1,1]
])
|
[
"main.span_without_basis",
"main.binary_matrix",
"sys.path.insert"
] |
[((151, 175), 'sys.path.insert', 'sys.path.insert', (['(0)', '""".."""'], {}), "(0, '..')\n", (166, 175), False, 'import sys\n'), ((258, 287), 'main.span_without_basis', 'program.span_without_basis', (['(2)'], {}), '(2)\n', (284, 287), True, 'import main as program\n'), ((291, 322), 'main.binary_matrix', 'program.binary_matrix', (['[[1, 1]]'], {}), '([[1, 1]])\n', (312, 322), True, 'import main as program\n'), ((484, 513), 'main.span_without_basis', 'program.span_without_basis', (['(3)'], {}), '(3)\n', (510, 513), True, 'import main as program\n'), ((517, 584), 'main.binary_matrix', 'program.binary_matrix', (['[[1, 1, 0], [1, 0, 1], [0, 1, 1], [1, 1, 1]]'], {}), '([[1, 1, 0], [1, 0, 1], [0, 1, 1], [1, 1, 1]])\n', (538, 584), True, 'import main as program\n')]
|
#!/usr/bin/env python3
# The MIT License
# Copyright (c) 2016 Estonian Information System Authority (RIA), Population Register Centre (VRK)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Test case for verifying that the operational monitoring related data of
# X-Road requests and responses that contain attachments are stored by the
# operational monitoring daemon.
import os
import sys
sys.path.append('..')
import python_common as common
def _expected_keys_and_values_of_one_attachment_query_rec(
xroad_message_id, security_server_type):
return [
("clientMemberClass", "GOV"),
("clientMemberCode", "00000001"),
("clientSecurityServerAddress", "xtee9.ci.kit"),
("clientSubsystemCode", "System1"),
("clientXRoadInstance", "XTEE-CI-XM"),
("messageId", xroad_message_id),
("messageIssue", "attachmentsPlease"),
("messageProtocolVersion", "4.0"),
("requestAttachmentCount", 1),
("requestMimeSize", 1430),
("requestSoapSize", 1413),
("responseAttachmentCount", 3),
("responseMimeSize", 1648),
("responseSoapSize", 1600),
("securityServerType", security_server_type),
("serviceCode", "xroadGetRandom"),
("serviceMemberClass", "GOV"),
("serviceMemberCode", "00000000"),
("serviceSecurityServerAddress", "xtee8.ci.kit"),
("serviceSubsystemCode", "Center"),
("serviceVersion", "v1"),
("serviceXRoadInstance", "XTEE-CI-XM"),
("succeeded", True),
]
def _expected_keys_and_values_of_five_attachments_query_rec(
xroad_message_id, security_server_type):
return [
("clientMemberClass", "GOV"),
("clientMemberCode", "00000001"),
("clientSecurityServerAddress", "xtee9.ci.kit"),
("clientSubsystemCode", "System1"),
("clientXRoadInstance", "XTEE-CI-XM"),
("messageId", xroad_message_id),
("messageIssue", "453465"),
("messageProtocolVersion", "4.0"),
("messageUserId", "EE37702211230"),
("representedPartyClass", "COM"),
("representedPartyCode", "UNKNOWN_MEMBER"),
("requestAttachmentCount", 5),
("requestMimeSize", 1714),
("requestSoapSize", 1629),
("responseAttachmentCount", 0),
("responseSoapSize", 1519),
("securityServerType", security_server_type),
("serviceCode", "xroadGetRandom"),
("serviceMemberClass", "GOV"),
("serviceMemberCode", "00000000"),
("serviceSecurityServerAddress", "xtee8.ci.kit"),
("serviceSubsystemCode", "Center"),
("serviceVersion", "v1"),
("serviceXRoadInstance", "XTEE-CI-XM"),
("succeeded", True),
]
def run(client_security_server_address, producer_security_server_address,
ssh_user, request_template_dir):
xroad_request_template_filename = os.path.join(
request_template_dir, "simple_xroad_query_template.xml")
xroad_request_attachments_template_filename = os.path.join(
request_template_dir, "xroad_query_for_attachments_template.xml")
query_data_client_template_filename = os.path.join(
request_template_dir, "query_operational_data_client_template.xml")
query_data_producer_template_filename = os.path.join(
request_template_dir, "query_operational_data_producer_template.xml")
client_timestamp_before_requests = common.get_remote_timestamp(
client_security_server_address, ssh_user)
producer_timestamp_before_requests = common.get_remote_timestamp(
producer_security_server_address, ssh_user)
message_id_one_attachment = common.generate_message_id()
print("\nGenerated message ID %s for X-Road request with one " \
"attachment" % (message_id_one_attachment, ))
### Regular and operational data requests and the relevant checks
print("\n---- Sending an X-Road request with one attachment to the " \
"service that will respond with three attachments ----\n")
request_contents = common.format_xroad_request_template(
xroad_request_attachments_template_filename, message_id_one_attachment)
response = common.post_multipart_request(
client_security_server_address, request_contents,
attachment_count=1, get_raw_stream=True)
# Expecting a multipart response with attachments.
mime_parts, raw_response = common.parse_multipart_response(response)
print("Received the following X-Road response: \n")
print(raw_response.decode("utf-8"))
if not mime_parts:
common.parse_and_check_soap_response(raw_response)
message_id_five_attachments = common.generate_message_id()
print("\nGenerated message ID %s for X-Road request with five " \
"attachments" % (message_id_five_attachments, ))
print("\n---- Sending an X-Road request with five attachments to the " \
"client's security server ----\n")
request_contents = common.format_xroad_request_template(
xroad_request_template_filename, message_id_five_attachments)
# Expecting a simple SOAP response.
response = common.post_multipart_request(
client_security_server_address, request_contents, attachment_count=5)
print("Received the following X-Road response: \n")
xml = common.parse_and_clean_xml(response.text)
print(xml.toprettyxml())
common.check_soap_fault(xml)
common.wait_for_operational_data()
client_timestamp_after_request = common.get_remote_timestamp(
client_security_server_address, ssh_user)
producer_timestamp_after_request = common.get_remote_timestamp(
producer_security_server_address, ssh_user)
# Now make operational data requests to both security servers and check the
# response payloads.
print("\n---- Sending an operational data request to the client's security server ----\n")
message_id = common.generate_message_id()
print("Generated message ID %s for query data request" % (message_id, ))
request_contents = common.format_query_operational_data_request_template(
query_data_client_template_filename, message_id,
client_timestamp_before_requests, client_timestamp_after_request)
print("Generated the following query data request for the client's security server: \n")
print(request_contents)
response = common.post_xml_request(
client_security_server_address, request_contents,
get_raw_stream=True)
mime_parts, raw_response = common.parse_multipart_response(response)
if mime_parts:
soap_part, record_count = common.get_multipart_soap_and_record_count(mime_parts[0])
common.print_multipart_soap_and_record_count(soap_part, record_count)
json_payload = common.get_multipart_json_payload(mime_parts[1])
# Check the presence of all the required fields in at least one JSON structure.
# The record describing the query with one attachment
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_one_attachment_query_rec(
message_id_one_attachment, "Client"))
# The record describing the query with five attachments
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_five_attachments_query_rec(
message_id_five_attachments, "Client"))
# As operational data is queried by regular client, the field
# 'securityServerInternalIp' is not expected to be included
# in the response payload.
common.assert_missing_in_json(json_payload, "securityServerInternalIp")
# Check if the timestamps in the response are in the expected range.
common.assert_expected_timestamp_values(
json_payload, client_timestamp_before_requests, client_timestamp_after_request)
common.print_multipart_query_data_response(json_payload)
else:
common.parse_and_check_soap_response(raw_response)
print("\n---- Sending an operational data request to the producer's " \
"security server ----\n")
message_id = common.generate_message_id()
print("\nGenerated message ID %s for query data request" % (message_id, ))
request_contents = common.format_query_operational_data_request_template(
query_data_producer_template_filename, message_id,
producer_timestamp_before_requests, producer_timestamp_after_request)
print("Generated the following query data request for the producer's " \
"security server: \n")
print(request_contents)
response = common.post_xml_request(
producer_security_server_address, request_contents,
get_raw_stream=True)
mime_parts, raw_response = common.parse_multipart_response(response)
if mime_parts:
soap_part, record_count = common.get_multipart_soap_and_record_count(mime_parts[0])
common.print_multipart_soap_and_record_count(soap_part, record_count, is_client=False)
json_payload = common.get_multipart_json_payload(mime_parts[1])
# Check the presence of all the required fields in at least one JSON structure.
# The record describing the query with one attachment
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_one_attachment_query_rec(
message_id_one_attachment, "Producer"))
# The record describing the query with five attachments
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_five_attachments_query_rec(
message_id_five_attachments, "Producer"))
# As operational data is queried by regular client, the field
# 'securityServerInternalIp' is not expected to be included
# in the response payload.
common.assert_missing_in_json(json_payload, "securityServerInternalIp")
# Check if the timestamps in the response are in the expected range.
common.assert_expected_timestamp_values(
json_payload,
producer_timestamp_before_requests, producer_timestamp_after_request)
common.print_multipart_query_data_response(json_payload)
else:
common.parse_and_check_soap_response(raw_response)
|
[
"python_common.format_query_operational_data_request_template",
"python_common.parse_and_clean_xml",
"os.path.join",
"python_common.generate_message_id",
"sys.path.append",
"python_common.get_multipart_json_payload",
"python_common.parse_multipart_response",
"python_common.parse_and_check_soap_response",
"python_common.post_xml_request",
"python_common.assert_missing_in_json",
"python_common.print_multipart_soap_and_record_count",
"python_common.get_remote_timestamp",
"python_common.post_multipart_request",
"python_common.get_multipart_soap_and_record_count",
"python_common.check_soap_fault",
"python_common.format_xroad_request_template",
"python_common.print_multipart_query_data_response",
"python_common.wait_for_operational_data",
"python_common.assert_expected_timestamp_values"
] |
[((1404, 1425), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (1419, 1425), False, 'import sys\n'), ((3904, 3973), 'os.path.join', 'os.path.join', (['request_template_dir', '"""simple_xroad_query_template.xml"""'], {}), "(request_template_dir, 'simple_xroad_query_template.xml')\n", (3916, 3973), False, 'import os\n'), ((4037, 4115), 'os.path.join', 'os.path.join', (['request_template_dir', '"""xroad_query_for_attachments_template.xml"""'], {}), "(request_template_dir, 'xroad_query_for_attachments_template.xml')\n", (4049, 4115), False, 'import os\n'), ((4171, 4256), 'os.path.join', 'os.path.join', (['request_template_dir', '"""query_operational_data_client_template.xml"""'], {}), "(request_template_dir, 'query_operational_data_client_template.xml'\n )\n", (4183, 4256), False, 'import os\n'), ((4309, 4395), 'os.path.join', 'os.path.join', (['request_template_dir', '"""query_operational_data_producer_template.xml"""'], {}), "(request_template_dir,\n 'query_operational_data_producer_template.xml')\n", (4321, 4395), False, 'import os\n'), ((4445, 4514), 'python_common.get_remote_timestamp', 'common.get_remote_timestamp', (['client_security_server_address', 'ssh_user'], {}), '(client_security_server_address, ssh_user)\n', (4472, 4514), True, 'import python_common as common\n'), ((4569, 4640), 'python_common.get_remote_timestamp', 'common.get_remote_timestamp', (['producer_security_server_address', 'ssh_user'], {}), '(producer_security_server_address, ssh_user)\n', (4596, 4640), True, 'import python_common as common\n'), ((4687, 4715), 'python_common.generate_message_id', 'common.generate_message_id', ([], {}), '()\n', (4713, 4715), True, 'import python_common as common\n'), ((5081, 5194), 'python_common.format_xroad_request_template', 'common.format_xroad_request_template', (['xroad_request_attachments_template_filename', 'message_id_one_attachment'], {}), '(\n xroad_request_attachments_template_filename, message_id_one_attachment)\n', (5117, 5194), True, 'import python_common as common\n'), ((5219, 5343), 'python_common.post_multipart_request', 'common.post_multipart_request', (['client_security_server_address', 'request_contents'], {'attachment_count': '(1)', 'get_raw_stream': '(True)'}), '(client_security_server_address,\n request_contents, attachment_count=1, get_raw_stream=True)\n', (5248, 5343), True, 'import python_common as common\n'), ((5444, 5485), 'python_common.parse_multipart_response', 'common.parse_multipart_response', (['response'], {}), '(response)\n', (5475, 5485), True, 'import python_common as common\n'), ((5701, 5729), 'python_common.generate_message_id', 'common.generate_message_id', ([], {}), '()\n', (5727, 5729), True, 'import python_common as common\n'), ((6006, 6108), 'python_common.format_xroad_request_template', 'common.format_xroad_request_template', (['xroad_request_template_filename', 'message_id_five_attachments'], {}), '(xroad_request_template_filename,\n message_id_five_attachments)\n', (6042, 6108), True, 'import python_common as common\n'), ((6174, 6277), 'python_common.post_multipart_request', 'common.post_multipart_request', (['client_security_server_address', 'request_contents'], {'attachment_count': '(5)'}), '(client_security_server_address,\n request_contents, attachment_count=5)\n', (6203, 6277), True, 'import python_common as common\n'), ((6354, 6395), 'python_common.parse_and_clean_xml', 'common.parse_and_clean_xml', (['response.text'], {}), '(response.text)\n', (6380, 6395), True, 'import python_common as common\n'), ((6430, 6458), 'python_common.check_soap_fault', 'common.check_soap_fault', (['xml'], {}), '(xml)\n', (6453, 6458), True, 'import python_common as common\n'), ((6464, 6498), 'python_common.wait_for_operational_data', 'common.wait_for_operational_data', ([], {}), '()\n', (6496, 6498), True, 'import python_common as common\n'), ((6537, 6606), 'python_common.get_remote_timestamp', 'common.get_remote_timestamp', (['client_security_server_address', 'ssh_user'], {}), '(client_security_server_address, ssh_user)\n', (6564, 6606), True, 'import python_common as common\n'), ((6659, 6730), 'python_common.get_remote_timestamp', 'common.get_remote_timestamp', (['producer_security_server_address', 'ssh_user'], {}), '(producer_security_server_address, ssh_user)\n', (6686, 6730), True, 'import python_common as common\n'), ((6964, 6992), 'python_common.generate_message_id', 'common.generate_message_id', ([], {}), '()\n', (6990, 6992), True, 'import python_common as common\n'), ((7095, 7272), 'python_common.format_query_operational_data_request_template', 'common.format_query_operational_data_request_template', (['query_data_client_template_filename', 'message_id', 'client_timestamp_before_requests', 'client_timestamp_after_request'], {}), '(\n query_data_client_template_filename, message_id,\n client_timestamp_before_requests, client_timestamp_after_request)\n', (7148, 7272), True, 'import python_common as common\n'), ((7427, 7525), 'python_common.post_xml_request', 'common.post_xml_request', (['client_security_server_address', 'request_contents'], {'get_raw_stream': '(True)'}), '(client_security_server_address, request_contents,\n get_raw_stream=True)\n', (7450, 7525), True, 'import python_common as common\n'), ((7578, 7619), 'python_common.parse_multipart_response', 'common.parse_multipart_response', (['response'], {}), '(response)\n', (7609, 7619), True, 'import python_common as common\n'), ((9219, 9247), 'python_common.generate_message_id', 'common.generate_message_id', ([], {}), '()\n', (9245, 9247), True, 'import python_common as common\n'), ((9351, 9534), 'python_common.format_query_operational_data_request_template', 'common.format_query_operational_data_request_template', (['query_data_producer_template_filename', 'message_id', 'producer_timestamp_before_requests', 'producer_timestamp_after_request'], {}), '(\n query_data_producer_template_filename, message_id,\n producer_timestamp_before_requests, producer_timestamp_after_request)\n', (9404, 9534), True, 'import python_common as common\n'), ((9707, 9807), 'python_common.post_xml_request', 'common.post_xml_request', (['producer_security_server_address', 'request_contents'], {'get_raw_stream': '(True)'}), '(producer_security_server_address, request_contents,\n get_raw_stream=True)\n', (9730, 9807), True, 'import python_common as common\n'), ((9860, 9901), 'python_common.parse_multipart_response', 'common.parse_multipart_response', (['response'], {}), '(response)\n', (9891, 9901), True, 'import python_common as common\n'), ((5615, 5665), 'python_common.parse_and_check_soap_response', 'common.parse_and_check_soap_response', (['raw_response'], {}), '(raw_response)\n', (5651, 5665), True, 'import python_common as common\n'), ((7674, 7731), 'python_common.get_multipart_soap_and_record_count', 'common.get_multipart_soap_and_record_count', (['mime_parts[0]'], {}), '(mime_parts[0])\n', (7716, 7731), True, 'import python_common as common\n'), ((7740, 7809), 'python_common.print_multipart_soap_and_record_count', 'common.print_multipart_soap_and_record_count', (['soap_part', 'record_count'], {}), '(soap_part, record_count)\n', (7784, 7809), True, 'import python_common as common\n'), ((7834, 7882), 'python_common.get_multipart_json_payload', 'common.get_multipart_json_payload', (['mime_parts[1]'], {}), '(mime_parts[1])\n', (7867, 7882), True, 'import python_common as common\n'), ((8651, 8722), 'python_common.assert_missing_in_json', 'common.assert_missing_in_json', (['json_payload', '"""securityServerInternalIp"""'], {}), "(json_payload, 'securityServerInternalIp')\n", (8680, 8722), True, 'import python_common as common\n'), ((8809, 8932), 'python_common.assert_expected_timestamp_values', 'common.assert_expected_timestamp_values', (['json_payload', 'client_timestamp_before_requests', 'client_timestamp_after_request'], {}), '(json_payload,\n client_timestamp_before_requests, client_timestamp_after_request)\n', (8848, 8932), True, 'import python_common as common\n'), ((8955, 9011), 'python_common.print_multipart_query_data_response', 'common.print_multipart_query_data_response', (['json_payload'], {}), '(json_payload)\n', (8997, 9011), True, 'import python_common as common\n'), ((9031, 9081), 'python_common.parse_and_check_soap_response', 'common.parse_and_check_soap_response', (['raw_response'], {}), '(raw_response)\n', (9067, 9081), True, 'import python_common as common\n'), ((9956, 10013), 'python_common.get_multipart_soap_and_record_count', 'common.get_multipart_soap_and_record_count', (['mime_parts[0]'], {}), '(mime_parts[0])\n', (9998, 10013), True, 'import python_common as common\n'), ((10022, 10112), 'python_common.print_multipart_soap_and_record_count', 'common.print_multipart_soap_and_record_count', (['soap_part', 'record_count'], {'is_client': '(False)'}), '(soap_part, record_count,\n is_client=False)\n', (10066, 10112), True, 'import python_common as common\n'), ((10133, 10181), 'python_common.get_multipart_json_payload', 'common.get_multipart_json_payload', (['mime_parts[1]'], {}), '(mime_parts[1])\n', (10166, 10181), True, 'import python_common as common\n'), ((10953, 11024), 'python_common.assert_missing_in_json', 'common.assert_missing_in_json', (['json_payload', '"""securityServerInternalIp"""'], {}), "(json_payload, 'securityServerInternalIp')\n", (10982, 11024), True, 'import python_common as common\n'), ((11111, 11238), 'python_common.assert_expected_timestamp_values', 'common.assert_expected_timestamp_values', (['json_payload', 'producer_timestamp_before_requests', 'producer_timestamp_after_request'], {}), '(json_payload,\n producer_timestamp_before_requests, producer_timestamp_after_request)\n', (11150, 11238), True, 'import python_common as common\n'), ((11277, 11333), 'python_common.print_multipart_query_data_response', 'common.print_multipart_query_data_response', (['json_payload'], {}), '(json_payload)\n', (11319, 11333), True, 'import python_common as common\n'), ((11353, 11403), 'python_common.parse_and_check_soap_response', 'common.parse_and_check_soap_response', (['raw_response'], {}), '(raw_response)\n', (11389, 11403), True, 'import python_common as common\n')]
|
import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid
from retrying import retry
import yaml, pynvml
###############################################################################
# GPU
###############################################################################
HAS_CUDA = False
try:
pynvml.nvmlInit()
HAS_CUDA = True
except:
HAS_CUDA = False
def gpu_count():
if not HAS_CUDA:
return 0
else:
return pynvml.nvmlDeviceGetCount()
def gpu_mem_used_pct():
'''
return a list of percentage of memory used
'''
if gpu_count() == 0:
return []
l_GPUs = list(range(gpu_count()))
l_mm = []
for i in l_GPUs:
h = pynvml.nvmlDeviceGetHandleByIndex(i)
info = pynvml.nvmlDeviceGetMemoryInfo(h)
l_mm.append(info.used / info.total)
return l_mm
###############################################################################
# Machine Info
###############################################################################
def get_machine_data(verbose = False):
'''
return details about the machine in JSON format
'''
# Hostname Info
hostname = socket.gethostname()
# CPU Info
cpu_count = psutil.cpu_count()
cpu_usage = psutil.cpu_percent(interval=1)
# Memory Info
memory_stats = psutil.virtual_memory()
memory_total = memory_stats.total
memory_used = memory_stats.used
memory_used_percent = memory_stats.percent
# Disk Info
disk_info = psutil.disk_partitions()
disks = []
for x in disk_info:
# Try fixes issues with connected 'disk' such as CD-ROMS, Phones, etc.
try:
disk = {
"name" : x.device,
"mount_point" : x.mountpoint,
"type" : x.fstype,
"total_size" : psutil.disk_usage(x.mountpoint).total,
"used_size" : psutil.disk_usage(x.mountpoint).used,
"percent_used" : psutil.disk_usage(x.mountpoint).percent
}
disks.append(disk)
except:
print("")
# Bandwidth Info
network_stats = get_bandwidth()
# Network Info
nics = []
for name, snic_array in psutil.net_if_addrs().items():
# Create NIC object
nic = {
"name": name,
"mac": "",
"address": "",
"address6": "",
"netmask": ""
}
# Get NiC values
for snic in snic_array:
if snic.family == -1:
nic["mac"] = snic.address
elif snic.family == 2:
nic["address"] = snic.address
nic["netmask"] = snic.netmask
elif snic.family == 23:
nic["address6"] = snic.address
nics.append(nic)
# Platform Info
system = {
"name" : platform.system(),
"version" : platform.release()
}
# Time Info
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S+00:00")
uptime = int(time.time() - psutil.boot_time())
# System UUID
sys_uuid = uuid.getnode()
# Set Machine Info
machine = {
"hostname" : hostname,
"uuid" : sys_uuid,
"system" : system,
"uptime" : uptime,
"cpu_count" : cpu_count,
"cpu_usage" : cpu_usage,
"memory_total" : memory_total,
"memory_used" : memory_used,
"memory_used_percent" : memory_used_percent,
"drives" : disks,
"root_drive_used_percent": psutil.disk_usage("/").percent,
"network_up" : network_stats["traffic_out"],
"network_down" : network_stats["traffic_in"],
"network_cards": nics,
"timestamp" : timestamp
}
if HAS_CUDA:
machine["gpu_memory_max_used_percent"] = round(max(gpu_mem_used_pct())* 100,2)
if verbose:
print("\nData:")
pprint.pprint(machine, indent=4)
return machine
def get_bandwidth():
'''
Get net in/out
'''
net1_out = psutil.net_io_counters().bytes_sent
net1_in = psutil.net_io_counters().bytes_recv
time.sleep(1)
# Get new net in/out
net2_out = psutil.net_io_counters().bytes_sent
net2_in = psutil.net_io_counters().bytes_recv
# Compare and get current speed
if net1_in > net2_in:
current_in = 0
else:
current_in = net2_in - net1_in
if net1_out > net2_out:
current_out = 0
else:
current_out = net2_out - net1_out
network = {"traffic_in" : current_in, "traffic_out" : current_out}
return network
def format_machine_data(machine_data, l_keys = None):
'''
take machine's JSON and return as formatted string
'''
if l_keys:
machine_data = {k : v for k, v in machine_data.items() if k in l_keys}
text = ''
for k, v in machine_data.items():
text += f"*{k}* : `{v}`\n"
return text
###############################################################################
# Sending Alerts
###############################################################################
def send_data(data, endpoint = None, attempts = 30, timeout = 60):
'''
# [DEPRECATED]
# Attempt to send data up to 30 times
# endpoint = monitoring server
'''
if not endpoint:
return None
for attempt in attempts:
try:
response = requests.post(url = endpoint, data = data)
print("\nPOST:")
print("Response:", response.status_code)
print("Headers:")
pprint.pprint(response.headers)
print("Content:", response.content)
# Attempt printing response in JSON if possible
try:
print("JSON Content:")
pprint.pprint(response.json())
except:
print("No JSON content")
break
except requests.exceptions.RequestException as e:
print("\nPOST Error:\n",e)
# Sleep 1 minute before retrying
time.sleep(timeout)
else:
# If no connection established for attempts*timeout, kill script
exit(0)
def retry_if_result_none(result):
"""
Return True if we should retry (in this case when result is None), False otherwise
"""
return result is None
@retry(stop_max_attempt_number = 3, retry_on_result = retry_if_result_none)
def send_slack_data(endpoint, token, channel, str_msg, title = ''):
'''
send a slack message and return true if successful
'''
headers = {"authorization": f"Bearer {token}", "content-type": "application/json"}
text = f"*{title}*\n---------------------\n{str_msg}" if title else str_msg
payload = {"channel": channel, "text": text}
try:
response = requests.post( url = endpoint, data = json.dumps(payload), headers = headers)
if response.status_code != 200:
print(f'send_slack_data: POST response has status code {response.status_code}')
return None
else:
return True
except requests.exceptions.RequestException as e:
print(f'send_slack_data: POST error: {e}')
return None
###############################################################################
# App Entry Point
###############################################################################
def main(endpoint = None, machine_ulimit = None, slack_token = None, slack_channel = None, verbose = True):
'''
'''
if endpoint: # then slack data is required
if not slack_token or not slack_channel:
raise RuntimeError(f'server_monitor.main: args slack_token and slack_channel must be provided with endpoint={endpoint}')
machine = get_machine_data(verbose = verbose)
if machine_ulimit: # only send Slack Message if limit is reach
l_checks = [ machine[k] > float(v)
for k, v in machine_ulimit.items()]
if any(l_checks) and endpoint:
l_limit_reached = [f"`{k}`" for c, k in zip(l_checks, list(machine_ulimit.keys())) if c]
send_slack_data(endpoint = endpoint, token = slack_token,
channel = slack_channel,
str_msg = format_machine_data(machine,
l_keys= ["hostname", "system", "uptime", "cpu_count", "cpu_usage",
"memory_used_percent", "root_drive_used_percent", "gpu_memory_max_used_percent",
"timestamp"]
),
title = f'Server Monitor LIMIT ({", ".join(l_limit_reached)}) REACHED')
else: # send everything
if endpoint:
send_slack_data(endpoint = endpoint, token = slack_token,
channel = slack_channel, str_msg = format_machine_data(machine),
title = 'Server Monitor Update')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Monitoring script to send system info to Slack')
parser.add_argument('-d', '--dest', default= None, help='API Endpoint for Monitoring Data (Defaults to None)')
parser.add_argument('-i', '--interval', default=5, type=int, help='Interval between checks (Seconds. Defaults to 5 seconds)')
parser.add_argument('-a', '--attempts', default=30, type=int, help='Attempts to send data when sending failes (Defaults to 30)')
parser.add_argument('-t', '--timeout', default=60, type=int, help='Timeout between resend attempts (Seconds. Defaults to 60. If attempts is reached script will die)')
parser.add_argument('-c', '--config', default = None, help = 'optional config file path')
parser.add_argument('--slack_token', default = None, help = 'optional slack token')
parser.add_argument('--slack_channel', default = None, help = 'optional slack channel')
args = parser.parse_args()
# Factor in sleep for bandwidth checking
if args.interval >= 2:
args.interval -= 2
while True:
# read config file
if args.config:
with open(args.config, 'r') as ymlfile:
cfg = yaml.load(ymlfile, Loader = yaml.BaseLoader)
cfg = cfg['dev']['environment']
args.interval = int(cfg['interval'])
# --- DEPRECATED ---
# args.attempts = int(cfg['attempts'])
# args.timeout = int(cfg['timeout'])
main(endpoint = cfg['dest'], machine_ulimit=cfg['machine_ulimit'],
slack_token = cfg['slack']['token'],
slack_channel = cfg['slack']['channel'], verbose=True)
else:
main(verbose = True, endpoint = args.dest, slack_token = args.slack_token,
slack_channel = args.slack_channel)
print(f"--------------------- Server Monitor {args.interval}s Update ----------------------")
time.sleep(args.interval)
|
[
"psutil.virtual_memory",
"yaml.load",
"argparse.ArgumentParser",
"json.dumps",
"pynvml.nvmlDeviceGetHandleByIndex",
"datetime.datetime.utcnow",
"pprint.pprint",
"platform.release",
"requests.post",
"psutil.cpu_count",
"pynvml.nvmlInit",
"psutil.net_io_counters",
"psutil.disk_usage",
"socket.gethostname",
"retrying.retry",
"psutil.disk_partitions",
"time.sleep",
"uuid.getnode",
"pynvml.nvmlDeviceGetMemoryInfo",
"platform.system",
"psutil.net_if_addrs",
"psutil.boot_time",
"time.time",
"pynvml.nvmlDeviceGetCount",
"psutil.cpu_percent"
] |
[((6256, 6326), 'retrying.retry', 'retry', ([], {'stop_max_attempt_number': '(3)', 'retry_on_result': 'retry_if_result_none'}), '(stop_max_attempt_number=3, retry_on_result=retry_if_result_none)\n', (6261, 6326), False, 'from retrying import retry\n'), ((328, 345), 'pynvml.nvmlInit', 'pynvml.nvmlInit', ([], {}), '()\n', (343, 345), False, 'import yaml, pynvml\n'), ((1185, 1205), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (1203, 1205), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((1238, 1256), 'psutil.cpu_count', 'psutil.cpu_count', ([], {}), '()\n', (1254, 1256), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((1273, 1303), 'psutil.cpu_percent', 'psutil.cpu_percent', ([], {'interval': '(1)'}), '(interval=1)\n', (1291, 1303), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((1342, 1365), 'psutil.virtual_memory', 'psutil.virtual_memory', ([], {}), '()\n', (1363, 1365), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((1520, 1544), 'psutil.disk_partitions', 'psutil.disk_partitions', ([], {}), '()\n', (1542, 1544), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((3109, 3123), 'uuid.getnode', 'uuid.getnode', ([], {}), '()\n', (3121, 3123), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((4076, 4089), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4086, 4089), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((8790, 8880), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Monitoring script to send system info to Slack"""'}), "(description=\n 'Monitoring script to send system info to Slack')\n", (8813, 8880), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((476, 503), 'pynvml.nvmlDeviceGetCount', 'pynvml.nvmlDeviceGetCount', ([], {}), '()\n', (501, 503), False, 'import yaml, pynvml\n'), ((721, 757), 'pynvml.nvmlDeviceGetHandleByIndex', 'pynvml.nvmlDeviceGetHandleByIndex', (['i'], {}), '(i)\n', (754, 757), False, 'import yaml, pynvml\n'), ((773, 806), 'pynvml.nvmlDeviceGetMemoryInfo', 'pynvml.nvmlDeviceGetMemoryInfo', (['h'], {}), '(h)\n', (803, 806), False, 'import yaml, pynvml\n'), ((2864, 2881), 'platform.system', 'platform.system', ([], {}), '()\n', (2879, 2881), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((2903, 2921), 'platform.release', 'platform.release', ([], {}), '()\n', (2919, 2921), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((3861, 3893), 'pprint.pprint', 'pprint.pprint', (['machine'], {'indent': '(4)'}), '(machine, indent=4)\n', (3874, 3893), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((3985, 4009), 'psutil.net_io_counters', 'psutil.net_io_counters', ([], {}), '()\n', (4007, 4009), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((4035, 4059), 'psutil.net_io_counters', 'psutil.net_io_counters', ([], {}), '()\n', (4057, 4059), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((4131, 4155), 'psutil.net_io_counters', 'psutil.net_io_counters', ([], {}), '()\n', (4153, 4155), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((4181, 4205), 'psutil.net_io_counters', 'psutil.net_io_counters', ([], {}), '()\n', (4203, 4205), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((10718, 10743), 'time.sleep', 'time.sleep', (['args.interval'], {}), '(args.interval)\n', (10728, 10743), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((2228, 2249), 'psutil.net_if_addrs', 'psutil.net_if_addrs', ([], {}), '()\n', (2247, 2249), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((2961, 2987), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (2985, 2987), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((3041, 3052), 'time.time', 'time.time', ([], {}), '()\n', (3050, 3052), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((3055, 3073), 'psutil.boot_time', 'psutil.boot_time', ([], {}), '()\n', (3071, 3073), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((3505, 3527), 'psutil.disk_usage', 'psutil.disk_usage', (['"""/"""'], {}), "('/')\n", (3522, 3527), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((5334, 5372), 'requests.post', 'requests.post', ([], {'url': 'endpoint', 'data': 'data'}), '(url=endpoint, data=data)\n', (5347, 5372), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((5501, 5532), 'pprint.pprint', 'pprint.pprint', (['response.headers'], {}), '(response.headers)\n', (5514, 5532), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((5977, 5996), 'time.sleep', 'time.sleep', (['timeout'], {}), '(timeout)\n', (5987, 5996), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((6752, 6771), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (6762, 6771), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((9972, 10014), 'yaml.load', 'yaml.load', (['ymlfile'], {'Loader': 'yaml.BaseLoader'}), '(ymlfile, Loader=yaml.BaseLoader)\n', (9981, 10014), False, 'import yaml, pynvml\n'), ((1844, 1875), 'psutil.disk_usage', 'psutil.disk_usage', (['x.mountpoint'], {}), '(x.mountpoint)\n', (1861, 1875), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((1913, 1944), 'psutil.disk_usage', 'psutil.disk_usage', (['x.mountpoint'], {}), '(x.mountpoint)\n', (1930, 1944), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n'), ((1984, 2015), 'psutil.disk_usage', 'psutil.disk_usage', (['x.mountpoint'], {}), '(x.mountpoint)\n', (2001, 2015), False, 'import argparse, socket, time, json, datetime, platform, psutil, requests, pprint, uuid\n')]
|
from qtpy import QtWidgets, QtCore
from qtpy.QtCore import Qt
class AnnotationScene(QtWidgets.QGraphicsScene):
clickRequest = QtCore.Signal(int, int, bool)
def __init__(self, parent=None):
super(AnnotationScene, self).__init__(parent)
self.creating = False
self.polygon_items = []
def updatePolygonSize(self):
for poly in self.polygon_items:
for grip in poly.m_items:
grip.updateSize()
for line in poly.m_lines:
line.updateWidth()
def setCreating(self, creating=True):
self.creating = creating
def mousePressEvent(self, ev):
pos = ev.scenePos()
if not self.creating and not self.hovering:
if ev.buttons() in [Qt.LeftButton, Qt.RightButton]:
self.clickRequest.emit(
int(pos.x()), int(pos.y()), ev.buttons() == Qt.LeftButton
)
elif self.creating:
self.polygon_item.removeLastPoint()
self.polygon_item.addPointLast(ev.scenePos())
# movable element
self.polygon_item.addPointLast(ev.scenePos())
super(AnnotationScene, self).mousePressEvent(ev)
def mouseMoveEvent(self, ev):
if self.creating:
self.polygon_item.movePoint(
# self.polygon_item.number_of_points() - 1, ev.scenePos()
len(self.polygon_item) - 1,
ev.scenePos(),
)
super(AnnotationScene, self).mouseMoveEvent(ev)
@property
def item_hovering(self):
for poly in self.polygon_items:
if poly.item_hovering:
return True
return False
@property
def polygon_hovering(self):
for poly in self.polygon_items:
if poly.polygon_hovering:
return True
return False
@property
def line_hovering(self):
for poly in self.polygon_items:
if poly.line_hovering:
return True
return False
@property
def hovering(self):
return self.item_hovering or self.polygon_hovering or self.line_hovering
|
[
"qtpy.QtCore.Signal"
] |
[((132, 161), 'qtpy.QtCore.Signal', 'QtCore.Signal', (['int', 'int', 'bool'], {}), '(int, int, bool)\n', (145, 161), False, 'from qtpy import QtWidgets, QtCore\n')]
|
from __future__ import absolute_import, print_function, division
import os
import shutil
import unittest
from tempfile import mkdtemp
import numpy as np
import theano
from theano.sandbox.rng_mrg import MRG_RandomStreams
from theano.misc.pkl_utils import dump, load, StripPickler
class T_dump_load(unittest.TestCase):
def setUp(self):
# Work in a temporary directory to avoid cluttering the repository
self.origdir = os.getcwd()
self.tmpdir = mkdtemp()
os.chdir(self.tmpdir)
def tearDown(self):
# Get back to the original dir, and delete the temporary one
os.chdir(self.origdir)
if self.tmpdir is not None:
shutil.rmtree(self.tmpdir)
def test_dump_load_mrg(self):
rng = MRG_RandomStreams()
with open('test', 'wb') as f:
dump(rng, f)
with open('test', 'rb') as f:
rng = load(f)
assert type(rng) == MRG_RandomStreams
def test_dump_zip_names(self):
foo_1 = theano.shared(0, name='foo')
foo_2 = theano.shared(1, name='foo')
foo_3 = theano.shared(2, name='foo')
with open('model.zip', 'wb') as f:
dump((foo_1, foo_2, foo_3, np.array(3)), f)
keys = list(np.load('model.zip').keys())
assert keys == ['foo', 'foo_2', 'foo_3', 'array_0', 'pkl']
foo_3 = np.load('model.zip')['foo_3']
assert foo_3 == np.array(2)
with open('model.zip', 'rb') as f:
foo_1, foo_2, foo_3, array = load(f)
assert array == np.array(3)
class TestStripPickler(unittest.TestCase):
def setUp(self):
# Work in a temporary directory to avoid cluttering the repository
self.origdir = os.getcwd()
self.tmpdir = mkdtemp()
os.chdir(self.tmpdir)
def tearDown(self):
# Get back to the original dir, and delete the temporary one
os.chdir(self.origdir)
if self.tmpdir is not None:
shutil.rmtree(self.tmpdir)
def test0(self):
with open('test.pkl', 'wb') as f:
m = theano.tensor.matrix()
dest_pkl = 'my_test.pkl'
f = open(dest_pkl, 'wb')
strip_pickler = StripPickler(f, protocol=-1)
strip_pickler.dump(m)
|
[
"numpy.load",
"os.getcwd",
"theano.misc.pkl_utils.dump",
"theano.misc.pkl_utils.load",
"theano.sandbox.rng_mrg.MRG_RandomStreams",
"tempfile.mkdtemp",
"theano.shared",
"numpy.array",
"theano.misc.pkl_utils.StripPickler",
"shutil.rmtree",
"os.chdir",
"theano.tensor.matrix"
] |
[((441, 452), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (450, 452), False, 'import os\n'), ((475, 484), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (482, 484), False, 'from tempfile import mkdtemp\n'), ((493, 514), 'os.chdir', 'os.chdir', (['self.tmpdir'], {}), '(self.tmpdir)\n', (501, 514), False, 'import os\n'), ((617, 639), 'os.chdir', 'os.chdir', (['self.origdir'], {}), '(self.origdir)\n', (625, 639), False, 'import os\n'), ((764, 783), 'theano.sandbox.rng_mrg.MRG_RandomStreams', 'MRG_RandomStreams', ([], {}), '()\n', (781, 783), False, 'from theano.sandbox.rng_mrg import MRG_RandomStreams\n'), ((1012, 1040), 'theano.shared', 'theano.shared', (['(0)'], {'name': '"""foo"""'}), "(0, name='foo')\n", (1025, 1040), False, 'import theano\n'), ((1057, 1085), 'theano.shared', 'theano.shared', (['(1)'], {'name': '"""foo"""'}), "(1, name='foo')\n", (1070, 1085), False, 'import theano\n'), ((1102, 1130), 'theano.shared', 'theano.shared', (['(2)'], {'name': '"""foo"""'}), "(2, name='foo')\n", (1115, 1130), False, 'import theano\n'), ((1720, 1731), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1729, 1731), False, 'import os\n'), ((1754, 1763), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (1761, 1763), False, 'from tempfile import mkdtemp\n'), ((1772, 1793), 'os.chdir', 'os.chdir', (['self.tmpdir'], {}), '(self.tmpdir)\n', (1780, 1793), False, 'import os\n'), ((1896, 1918), 'os.chdir', 'os.chdir', (['self.origdir'], {}), '(self.origdir)\n', (1904, 1918), False, 'import os\n'), ((688, 714), 'shutil.rmtree', 'shutil.rmtree', (['self.tmpdir'], {}), '(self.tmpdir)\n', (701, 714), False, 'import shutil\n'), ((835, 847), 'theano.misc.pkl_utils.dump', 'dump', (['rng', 'f'], {}), '(rng, f)\n', (839, 847), False, 'from theano.misc.pkl_utils import dump, load, StripPickler\n'), ((905, 912), 'theano.misc.pkl_utils.load', 'load', (['f'], {}), '(f)\n', (909, 912), False, 'from theano.misc.pkl_utils import dump, load, StripPickler\n'), ((1362, 1382), 'numpy.load', 'np.load', (['"""model.zip"""'], {}), "('model.zip')\n", (1369, 1382), True, 'import numpy as np\n'), ((1416, 1427), 'numpy.array', 'np.array', (['(2)'], {}), '(2)\n', (1424, 1427), True, 'import numpy as np\n'), ((1512, 1519), 'theano.misc.pkl_utils.load', 'load', (['f'], {}), '(f)\n', (1516, 1519), False, 'from theano.misc.pkl_utils import dump, load, StripPickler\n'), ((1544, 1555), 'numpy.array', 'np.array', (['(3)'], {}), '(3)\n', (1552, 1555), True, 'import numpy as np\n'), ((1967, 1993), 'shutil.rmtree', 'shutil.rmtree', (['self.tmpdir'], {}), '(self.tmpdir)\n', (1980, 1993), False, 'import shutil\n'), ((2074, 2096), 'theano.tensor.matrix', 'theano.tensor.matrix', ([], {}), '()\n', (2094, 2096), False, 'import theano\n'), ((2199, 2227), 'theano.misc.pkl_utils.StripPickler', 'StripPickler', (['f'], {'protocol': '(-1)'}), '(f, protocol=-1)\n', (2211, 2227), False, 'from theano.misc.pkl_utils import dump, load, StripPickler\n'), ((1213, 1224), 'numpy.array', 'np.array', (['(3)'], {}), '(3)\n', (1221, 1224), True, 'import numpy as np\n'), ((1250, 1270), 'numpy.load', 'np.load', (['"""model.zip"""'], {}), "('model.zip')\n", (1257, 1270), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
'''
Configure the WiFi Preferred Networks settings on macOS
=======================================================
Support for configuring a WiFi Preferred Networks settings on macOS.
Requires the PyObjC Library, bundled with the macOS installer as of 2029.2.0.
.. note::
This state is primarily for changing and removing existing networks, if
you would like to add a network, please consider using a macOS profile.
To move a SSID to the top of the preferred networks list.
.. code-block:: yaml
move_PiedPiper_to_top:
wifi.top:
- name: PiedPiper
To move a SSID to the bottom of the preferred networks list.
.. code-block:: yaml
move_Hooli-Guest_to_the_bottom:
wifi.bottom:
- name: Hooli-Guest
To remove a SSID from the list.
.. code-block:: yaml
remove_Hooli-Guest_network:
wifi.remove:
- name: Hooli-Guest
You can also remove a SSID from the list but only when another SSID is present.
Remove Hooli but only when PiedPiper is already in the list.
.. code-block:: yaml
remove_Hooli_network_if_PiedPiper:
wifi.remove:
- name: Hooli
- required_ssid: PiedPiper
If you don't want to remove a SSID but you preffer it to not AutoJoin.
.. code-block:: yaml
disable_autojoin_for_Hooli-Guest:
wifi.disable_autojoin:
- name: Hooli-Guest
Or make sure that it is set to AutoJoin
.. code-block:: yaml
enable_autojoin_for_PiedPiper:
wifi.enable_autojoin:
- name: PiedPiper
'''
import salt.utils
import salt.utils.platform
import logging
import sys
import os.path
import collections
log = logging.getLogger(__name__)
try:
import objc, ctypes.util
from Foundation import NSOrderedSet
PYOBJC = True
except ImportError:
PYOBJC = False
__virtualname__ = 'wifi'
def __virtual__():
"""
Check if macOS and PyObjC is available
"""
if not salt.utils.platform.is_darwin():
return (False, 'module: mac_wifi only available on macOS.')
if not PYOBJC:
return (False, 'PyObjC not available.')
return __virtualname__
def top(name):
'''
Will place the provided SSID name to the top of the WiFi preferred network
list.
name:
The name of the SSID to move to the top of the list.
.. code-block:: yaml
move_PiedPiper_to_top:
wifi.top:
- name: PiedPiper
'''
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
# get the current wifi position
current_position = __salt__['wifi.ssid_index'](name)
# check the position of current ssid.
if current_position is 0:
ret['comment'] = 'SSID [{}] is already at the top'.format(name)
return ret
# if not found in the list we can return okay.
if current_position is None:
ret['comment'] = 'SSID [{}] was not found in the list.'.format(name)
# testing... check... check....
if __opts__['test'] == True:
ret['comment'] = ('SSID [{}] will be removed'.format(name))
ret['result'] = None
return ret
# need to change to top
move_to_top = __salt__['wifi.top'](name)
if not move_to_top:
ret['result'] = False
ret['comment'] = 'Failed to move SSID [{}] to the top'.format(name)
return ret
new_position = __salt__['wifi.ssid_index'](name)
if new_position is not 0:
# we failed
ret['result'] = False
ret['comment'] = ('Failed to change the position '
'of SSID [{}].'.format(name))
return ret
#sweet sweet success
ret['comment'] = 'Successfully moved SSID [{}] to the top '\
'of the Preferred Network\'s order list.'.format(name)
ret['changes'].update(
{name: {'old': 'Was [{}] position(s) from the top of '\
'the Preferred Network\'s order list.'.format(
current_position),
'new': 'At the top of the '\
'Preferred Network\'s order list.'}})
return ret
def bottom(name):
'''
Will move the provided SSID name to the bottom of the WiFi preferred network
list.
name:
The name of the SSID to move to the bottom.
.. code-block:: yaml
# To move an SSID to the bottom of the preferred networks list.
move_Hooli-Guest_to_the_bottom:
wifi.bottom:
- name: Hooli-Guest
.. note::
This state will return successful if the SSID name is not in the
preferred networks order list.
'''
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
# get the current wifi position
current_position = __salt__['wifi.ssid_index'](name, reverse=True)
# check the position of current ssid.
if current_position is 0:
ret['comment'] = 'SSID [{}] is already at the bottom'.format(name)
return ret
# if not found in the list we can return okay.
if current_position is None:
ret['comment'] = 'SSID [{}] was not found in the list.'.format(name)
# testing 1... 2... 3...
if __opts__['test'] == True:
ret['comment'] = ('SSID [{}] will be moved to the bottom'.format(name))
ret['result'] = None
return ret
# need to move to the bottom
move_to_bottom = __salt__['wifi.bottom'](name)
if not move_to_bottom:
ret['result'] = False
ret['comment'] = 'Failed to move SSID "{}" to the bottom'.format(name)
return ret
new_position = __salt__['wifi.ssid_index'](name, reverse=True)
if new_position is not 0:
# we failed to change the position.
ret['result'] = False
ret['comment'] = 'Failed to change the position'\
' of SSID "{}"'.format(name)
return ret
# success
ret['comment'] = 'Successfully moved SSID [{}] to the bottom '\
'of the Preferred Network\'s order list.'.format(name)
ret['changes'].update(
{name: {'old': 'Was [{}] position(s) from the bottom of '\
'the Preferred Network\'s order list.'.format(
current_position),
'new': 'At the bottom of the '\
'Preferred Network\'s order list.'}})
return ret
def remove(name, required_ssid=None):
'''
Make sure an SSID is in the network list is removed.
name
The name of the SSID to remove.
required_ssid
Name of the SSID that should be present in the network list before
removing the SSID. This should be set to the name of another SSID.
If this SSID is not in the list salt will not remove the given SSID.
.. code-block:: yaml
# remove a SSID from the list.
remove_Hooli-Guest_network:
wifi.remove:
- name: Hooli-Guest
'''
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
# check if our SSID is present
name_available = __salt__['wifi.exists'](name)
if required_ssid and not __salt__['wifi.exists'](required_ssid):
ret['comment'] = ('Could not find required SSID [{}] in order to make '
'changes to [{}].'.format(required_ssid, name))
return ret
if not name_available:
ret['comment'] = ('SSID [{}] is already removed.'.format(name))
return ret
if __opts__['test'] == True:
ret['comment'] = ('SSID [{}] will be removed'.format(name))
ret['result'] = None
return ret
# need to remove the SSID
remove = __salt__['wifi.remove'](name)
if not remove:
ret['result'] = False
ret['comment'] = 'Failed to remove SSID [{}] from list.'.format(name)
return ret
# we removed the SSID Successfully
ret['comment'] = ('Successfully removed SSID [{}] from '
'the Preferred Network\'s order list.'.format(name))
ret['changes'].update({name: {'old': 'Available.',
'new': 'Removed.'}})
return ret
def disable_autojoin(name, required_ssid=None, ignore_missing=True):
'''
Will turn off AutoJoin for the provided SSID.
name
The name of the SSID to disable autojoin.
required_ssid
Name of the SSID that should be present in the network list before
changing the SSID. This should be set to the name of another SSID.
If this SSID is not in the list salt will not change the given SSID.
ignore_missing : True
Salt will ignore return a success by default if the SSID is not
available, set this to `False` to have salt return as failed if the
given SSID is missing.
'''
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
# see if our SSID is present
name_available = __salt__['wifi.exists'](name)
if not name_available and ignore_missing:
ret['comment'] = ('SSID [{}] is not available, '
'no changes needed.'.format(name))
return ret
if not name_available and not ignore_missing:
ret['comment'] = ('SSID [{}] is not available and '
'ignore_missing is False.'.format(name))
ret['result'] = False
return ret
if required_ssid and not __salt__['wifi.exists'](required_ssid):
ret['comment'] = ('Could not find required SSID [{}] in order to make '
'changes to [{}].'.format(required_ssid, name))
return ret
# check the current state of autojoin.
autojoin_disabled = __salt__['wifi.autojoin_disabled'](name)
if autojoin_disabled:
ret['comment'] = ('AutoJoin for SSID [{}] is already '
'disabled.'.format(name))
return ret
if __opts__['test'] == True:
ret['comment'] = ('AutoJoin for SSID [{}] will be disabled'.format(name))
ret['result'] = None
return ret
# we need to disable autojoin
disable_autojoin = __salt__['wifi.disable_autojoin'](name)
if not disable_autojoin:
ret['result'] = False
ret['comment'] = ('Failed to disable AutoJoin on the'
' SSID [{}]'.format(name))
return ret
# all good
ret['comment'] = 'Successfully disabled autojoin for SSID [{}]'.format(name)
ret['changes'].update({name: {'old': 'AutoJoin Enabled.',
'new': 'AutoJoin Disabled.'}})
return ret
def enable_autojoin(name, required_ssid=None, ignore_missing=True):
'''
Will turn on AutoJoin for the provided SSID.
name
The name of the SSID to enable autojoin.
required_ssid
Name of the SSID that should be present in the network list before
changing the SSID. This should be set to the name of another SSID.
If this SSID is not in the list salt will not change the given SSID.
ignore_missing : True
Salt will return as successful by default if the SSID is not
available, set this to `False` to have salt return as failed if the
given SSID is missing.
'''
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
# see if our SSID is present
name_available = __salt__['wifi.exists'](name)
if not name_available and ignore_missing:
ret['comment'] = ('SSID [{}] is not available, '
'no changes needed.'.format(name))
return ret
if not name_available and not ignore_missing:
ret['comment'] = ('SSID [{}] is not available and '
'ignore_missing is False.'.format(name))
ret['result'] = False
return ret
if required_ssid and not __salt__['wifi.exists'](required_ssid):
ret['comment'] = ('Could not find required SSID [{}] in order to make '
'changes to [{}].'.format(required_ssid, name))
return ret
# check the current state of autojoin.
autojoin_enabled = __salt__['wifi.autojoin_enabled'](name)
if autojoin_enabled:
ret['comment'] = ('AutoJoin for SSID [{}] is already '
'enabled'.format(name))
return ret
if __opts__['test'] == True:
ret['comment'] = ('AutoJoin for SSID [{}] will be enabled'.format(name))
ret['result'] = None
return ret
# we need to disable autojoin
enable_autojoin = __salt__['wifi.enable_autojoin'](name)
if not enable_autojoin:
# we failed to enable autojoin
ret['result'] = False
ret['comment'] = ('Failed to enable AutoJoin on the'
' SSID [{}]'.format(name))
return ret
# all good
ret['comment'] = 'Successfully enabled autojoin for SSID [{}]'.format(name)
ret['changes'].update({name: {'old': 'AutoJoin Disabled.',
'new': 'AutoJoin Enabled.'}})
return ret
|
[
"logging.getLogger"
] |
[((1648, 1675), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1665, 1675), False, 'import logging\n')]
|
# Import configurations
import FWCore.ParameterSet.Config as cms
process = cms.Process("test")
process.load("CalibTracker.SiStripDCS.MessLogger_cfi")
process.SiStripConfigDb = cms.Service("SiStripConfigDb",
ConfDb = cms.untracked.string('username/password@cms_omds_nolb'),
TNS_ADMIN = cms.untracked.string('.'),
UsingDb = cms.untracked.bool(True),
Partitions = cms.untracked.PSet(
TPDD = cms.untracked.PSet(
PartitionName = cms.untracked.string('TP_08-AUG-2008_1'),
ForceVersions = cms.untracked.bool(True),
FecVersion = cms.untracked.vuint32(430,2),
DcuDetIdsVersion = cms.untracked.vuint32(9,0)
),
TMDD = cms.untracked.PSet(
PartitionName = cms.untracked.string('TM_08-AUG-2008_1'),
ForceVersions = cms.untracked.bool(True),
FecVersion = cms.untracked.vuint32(428,1),
DcuDetIdsVersion = cms.untracked.vuint32(9,0)
),
TIDD = cms.untracked.PSet(
PartitionName = cms.untracked.string('TI_08-AUG-2008_1'),
ForceVersions = cms.untracked.bool(True),
FecVersion = cms.untracked.vuint32(427,1),
DcuDetIdsVersion = cms.untracked.vuint32(9,0)
),
TODD = cms.untracked.PSet(
PartitionName = cms.untracked.string('TO_08-AUG-2008_1'),
ForceVersions = cms.untracked.bool(True),
FecVersion = cms.untracked.vuint32(415,3),
DcuDetIdsVersion = cms.untracked.vuint32(9,0)
),
TEPD2 = cms.untracked.PSet(
PartitionName = cms.untracked.string('TE_27-JUN-2008_2'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(211, 2)
),
TMPD = cms.untracked.PSet(
PartitionName = cms.untracked.string('TE_17-JUN-2008_12'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(163, 1)
),
TEPD1 = cms.untracked.PSet(
PartitionName = cms.untracked.string('TE_24-JUN-2008_2'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(204, 1)
),
TEPD4 = cms.untracked.PSet(
PartitionName = cms.untracked.string('TE_30-JUN-2008_1'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(229, 1)
),
TEPD3 = cms.untracked.PSet(
PartitionName = cms.untracked.string('TE_27-JUN-2008_4'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(214, 1)
),
TPPD = cms.untracked.PSet(
PartitionName = cms.untracked.string('TE_17-JUN-2008_11'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(162, 1)
),
TIPD = cms.untracked.PSet(
PartitionName = cms.untracked.string('TI_17-JUN-2008_2'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(157, 1)
),
TIPD2 = cms.untracked.PSet(
PartitionName = cms.untracked.string('TI_18-JUN-2008_1'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(165, 1)
),
TIPD3 = cms.untracked.PSet(
PartitionName = cms.untracked.string('TI_18-JUN-2008_10'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(179, 1)
),
TIPD4 = cms.untracked.PSet(
PartitionName = cms.untracked.string('TI_20-JUN-2008_1'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(192, 1)
),
TIPD5 = cms.untracked.PSet(
PartitionName = cms.untracked.string('TI_27-JUN-2008_1'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(212, 1)
),
TIPD6 = cms.untracked.PSet(
PartitionName = cms.untracked.string('TI_27-JUN-2008_3'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(218, 1)
),
TOPD = cms.untracked.PSet(
PartitionName = cms.untracked.string('TO_18-JUN-2008_1_TEST_1'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(177, 1)
),
TOPD2 = cms.untracked.PSet(
PartitionName = cms.untracked.string('TO_18-JUN-2008_2'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(178, 1)
),
TOPD3 = cms.untracked.PSet(
PartitionName = cms.untracked.string('TO_30-JUN-2008_1'),
ForceVersions = cms.untracked.bool(True),
DcuPsuMapVersion = cms.untracked.vuint32(228, 1)
)
)
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.source = cms.Source("EmptySource",
numberEventsInRun = cms.untracked.uint32(1),
firstRun = cms.untracked.uint32(1)
)
process.load("CondCore.DBCommon.CondDBCommon_cfi")
process.CondDBCommon.connect = cms.string('oracle://cms_omds_nolb/username')
process.SiStripModuleHVBuilder = cms.Service("SiStripModuleHVBuilder",
onlineDB = cms.untracked.string('oracle://cms_omds_nolb/username'),
authPath = cms.untracked.string('.'),
# Format for date/time vector: year, month, day, hour, minute, second, nanosecond
Tmin = cms.untracked.vint32(2008, 10, 13, 1, 0, 0, 0),
Tmax = cms.untracked.vint32(2008, 10, 13, 12, 0, 0, 0),
# Do NOT change this unless you know what you are doing!
TSetMin = cms.untracked.vint32(2007, 11, 26, 0, 0, 0, 0),
# queryType can be either STATUSCHANGE or LASTVALUE
queryType = cms.untracked.string('LASTVALUE'),
# if reading lastValue from file put insert file name here
lastValueFile = cms.untracked.string(''),
# flag to show if you are reading from file for lastValue or not
lastValueFromFile = cms.untracked.bool(False),
#
debugModeOn = cms.untracked.bool(False)
)
process.PoolDBOutputService = cms.Service("PoolDBOutputService",
BlobStreamerName = cms.untracked.string('TBufferBlobStreamingService'),
DBParameters = cms.PSet(
messageLevel = cms.untracked.int32(2),
authenticationPath = cms.untracked.string('/afs/cern.ch/cms/DB/conddb')
),
timetype = cms.untracked.string('timestamp'),
connect = cms.string('sqlite_file:dbfile.db'),
toPut = cms.VPSet(cms.PSet(
record = cms.string('SiStripDetVOffRcd'),
tag = cms.string('SiStripDetVOff_Fake_31X')
)),
logconnect = cms.untracked.string('sqlite_file:logfile.db')
)
process.siStripPopConModuleHV = cms.EDAnalyzer("SiStripPopConModuleHV",
record = cms.string('SiStripDetVOffRcd'),
loggingOn= cms.untracked.bool(True),
SinceAppendMode=cms.bool(True),
Source = cms.PSet(
name = cms.untracked.string('default')
)
)
process.p = cms.Path(process.siStripPopConModuleHV)
|
[
"FWCore.ParameterSet.Config.string",
"FWCore.ParameterSet.Config.untracked.int32",
"FWCore.ParameterSet.Config.untracked.vuint32",
"FWCore.ParameterSet.Config.untracked.vint32",
"FWCore.ParameterSet.Config.untracked.string",
"FWCore.ParameterSet.Config.untracked.bool",
"FWCore.ParameterSet.Config.Process",
"FWCore.ParameterSet.Config.bool",
"FWCore.ParameterSet.Config.untracked.uint32",
"FWCore.ParameterSet.Config.Path"
] |
[((76, 95), 'FWCore.ParameterSet.Config.Process', 'cms.Process', (['"""test"""'], {}), "('test')\n", (87, 95), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5331, 5376), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""oracle://cms_omds_nolb/username"""'], {}), "('oracle://cms_omds_nolb/username')\n", (5341, 5376), True, 'import FWCore.ParameterSet.Config as cms\n'), ((7362, 7401), 'FWCore.ParameterSet.Config.Path', 'cms.Path', (['process.siStripPopConModuleHV'], {}), '(process.siStripPopConModuleHV)\n', (7370, 7401), True, 'import FWCore.ParameterSet.Config as cms\n'), ((223, 278), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""username/password@cms_omds_nolb"""'], {}), "('username/password@cms_omds_nolb')\n", (243, 278), True, 'import FWCore.ParameterSet.Config as cms\n'), ((296, 321), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""."""'], {}), "('.')\n", (316, 321), True, 'import FWCore.ParameterSet.Config as cms\n'), ((337, 361), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (355, 361), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5090, 5112), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(1)'], {}), '(1)\n', (5109, 5112), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5182, 5205), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['(1)'], {}), '(1)\n', (5202, 5205), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5222, 5245), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['(1)'], {}), '(1)\n', (5242, 5245), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5464, 5519), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""oracle://cms_omds_nolb/username"""'], {}), "('oracle://cms_omds_nolb/username')\n", (5484, 5519), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5536, 5561), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""."""'], {}), "('.')\n", (5556, 5561), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5687, 5733), 'FWCore.ParameterSet.Config.untracked.vint32', 'cms.untracked.vint32', (['(2008)', '(10)', '(13)', '(1)', '(0)', '(0)', '(0)'], {}), '(2008, 10, 13, 1, 0, 0, 0)\n', (5707, 5733), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5746, 5793), 'FWCore.ParameterSet.Config.untracked.vint32', 'cms.untracked.vint32', (['(2008)', '(10)', '(13)', '(12)', '(0)', '(0)', '(0)'], {}), '(2008, 10, 13, 12, 0, 0, 0)\n', (5766, 5793), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5866, 5912), 'FWCore.ParameterSet.Config.untracked.vint32', 'cms.untracked.vint32', (['(2007)', '(11)', '(26)', '(0)', '(0)', '(0)', '(0)'], {}), '(2007, 11, 26, 0, 0, 0, 0)\n', (5886, 5912), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6057, 6090), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""LASTVALUE"""'], {}), "('LASTVALUE')\n", (6077, 6090), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6201, 6225), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['""""""'], {}), "('')\n", (6221, 6225), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6346, 6371), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(False)'], {}), '(False)\n', (6364, 6371), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6393, 6418), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(False)'], {}), '(False)\n', (6411, 6418), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6510, 6561), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TBufferBlobStreamingService"""'], {}), "('TBufferBlobStreamingService')\n", (6530, 6561), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6741, 6774), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""timestamp"""'], {}), "('timestamp')\n", (6761, 6774), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6790, 6825), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""sqlite_file:dbfile.db"""'], {}), "('sqlite_file:dbfile.db')\n", (6800, 6825), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6986, 7032), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""sqlite_file:logfile.db"""'], {}), "('sqlite_file:logfile.db')\n", (7006, 7032), True, 'import FWCore.ParameterSet.Config as cms\n'), ((7121, 7152), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""SiStripDetVOffRcd"""'], {}), "('SiStripDetVOffRcd')\n", (7131, 7152), True, 'import FWCore.ParameterSet.Config as cms\n'), ((7169, 7193), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (7187, 7193), True, 'import FWCore.ParameterSet.Config as cms\n'), ((7215, 7229), 'FWCore.ParameterSet.Config.bool', 'cms.bool', (['(True)'], {}), '(True)\n', (7223, 7229), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6615, 6637), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(2)'], {}), '(2)\n', (6634, 6637), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6668, 6718), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""/afs/cern.ch/cms/DB/conddb"""'], {}), "('/afs/cern.ch/cms/DB/conddb')\n", (6688, 6718), True, 'import FWCore.ParameterSet.Config as cms\n'), ((7269, 7300), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""default"""'], {}), "('default')\n", (7289, 7300), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6876, 6907), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""SiStripDetVOffRcd"""'], {}), "('SiStripDetVOffRcd')\n", (6886, 6907), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6923, 6960), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""SiStripDetVOff_Fake_31X"""'], {}), "('SiStripDetVOff_Fake_31X')\n", (6933, 6960), True, 'import FWCore.ParameterSet.Config as cms\n'), ((463, 503), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TP_08-AUG-2008_1"""'], {}), "('TP_08-AUG-2008_1')\n", (483, 503), True, 'import FWCore.ParameterSet.Config as cms\n'), ((533, 557), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (551, 557), True, 'import FWCore.ParameterSet.Config as cms\n'), ((587, 616), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(430)', '(2)'], {}), '(430, 2)\n', (608, 616), True, 'import FWCore.ParameterSet.Config as cms\n'), ((648, 675), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(9)', '(0)'], {}), '(9, 0)\n', (669, 675), True, 'import FWCore.ParameterSet.Config as cms\n'), ((749, 789), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TM_08-AUG-2008_1"""'], {}), "('TM_08-AUG-2008_1')\n", (769, 789), True, 'import FWCore.ParameterSet.Config as cms\n'), ((819, 843), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (837, 843), True, 'import FWCore.ParameterSet.Config as cms\n'), ((873, 902), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(428)', '(1)'], {}), '(428, 1)\n', (894, 902), True, 'import FWCore.ParameterSet.Config as cms\n'), ((934, 961), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(9)', '(0)'], {}), '(9, 0)\n', (955, 961), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1035, 1075), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TI_08-AUG-2008_1"""'], {}), "('TI_08-AUG-2008_1')\n", (1055, 1075), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1105, 1129), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (1123, 1129), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1159, 1188), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(427)', '(1)'], {}), '(427, 1)\n', (1180, 1188), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1220, 1247), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(9)', '(0)'], {}), '(9, 0)\n', (1241, 1247), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1321, 1361), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TO_08-AUG-2008_1"""'], {}), "('TO_08-AUG-2008_1')\n", (1341, 1361), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1391, 1415), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (1409, 1415), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1445, 1474), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(415)', '(3)'], {}), '(415, 3)\n', (1466, 1474), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1506, 1533), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(9)', '(0)'], {}), '(9, 0)\n', (1527, 1533), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1608, 1648), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TE_27-JUN-2008_2"""'], {}), "('TE_27-JUN-2008_2')\n", (1628, 1648), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1678, 1702), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (1696, 1702), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1735, 1764), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(211)', '(2)'], {}), '(211, 2)\n', (1756, 1764), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1839, 1880), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TE_17-JUN-2008_12"""'], {}), "('TE_17-JUN-2008_12')\n", (1859, 1880), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1910, 1934), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (1928, 1934), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1967, 1996), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(163)', '(1)'], {}), '(163, 1)\n', (1988, 1996), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2072, 2112), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TE_24-JUN-2008_2"""'], {}), "('TE_24-JUN-2008_2')\n", (2092, 2112), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2142, 2166), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (2160, 2166), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2199, 2228), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(204)', '(1)'], {}), '(204, 1)\n', (2220, 2228), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2304, 2344), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TE_30-JUN-2008_1"""'], {}), "('TE_30-JUN-2008_1')\n", (2324, 2344), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2374, 2398), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (2392, 2398), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2431, 2460), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(229)', '(1)'], {}), '(229, 1)\n', (2452, 2460), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2536, 2576), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TE_27-JUN-2008_4"""'], {}), "('TE_27-JUN-2008_4')\n", (2556, 2576), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2606, 2630), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (2624, 2630), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2663, 2692), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(214)', '(1)'], {}), '(214, 1)\n', (2684, 2692), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2767, 2808), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TE_17-JUN-2008_11"""'], {}), "('TE_17-JUN-2008_11')\n", (2787, 2808), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2838, 2862), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (2856, 2862), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2895, 2924), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(162)', '(1)'], {}), '(162, 1)\n', (2916, 2924), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2999, 3039), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TI_17-JUN-2008_2"""'], {}), "('TI_17-JUN-2008_2')\n", (3019, 3039), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3069, 3093), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (3087, 3093), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3126, 3155), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(157)', '(1)'], {}), '(157, 1)\n', (3147, 3155), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3231, 3271), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TI_18-JUN-2008_1"""'], {}), "('TI_18-JUN-2008_1')\n", (3251, 3271), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3301, 3325), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (3319, 3325), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3358, 3387), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(165)', '(1)'], {}), '(165, 1)\n', (3379, 3387), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3463, 3504), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TI_18-JUN-2008_10"""'], {}), "('TI_18-JUN-2008_10')\n", (3483, 3504), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3534, 3558), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (3552, 3558), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3591, 3620), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(179)', '(1)'], {}), '(179, 1)\n', (3612, 3620), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3696, 3736), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TI_20-JUN-2008_1"""'], {}), "('TI_20-JUN-2008_1')\n", (3716, 3736), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3766, 3790), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (3784, 3790), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3823, 3852), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(192)', '(1)'], {}), '(192, 1)\n', (3844, 3852), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3928, 3968), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TI_27-JUN-2008_1"""'], {}), "('TI_27-JUN-2008_1')\n", (3948, 3968), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3998, 4022), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (4016, 4022), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4055, 4084), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(212)', '(1)'], {}), '(212, 1)\n', (4076, 4084), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4160, 4200), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TI_27-JUN-2008_3"""'], {}), "('TI_27-JUN-2008_3')\n", (4180, 4200), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4230, 4254), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (4248, 4254), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4287, 4316), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(218)', '(1)'], {}), '(218, 1)\n', (4308, 4316), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4391, 4438), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TO_18-JUN-2008_1_TEST_1"""'], {}), "('TO_18-JUN-2008_1_TEST_1')\n", (4411, 4438), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4468, 4492), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (4486, 4492), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4525, 4554), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(177)', '(1)'], {}), '(177, 1)\n', (4546, 4554), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4630, 4670), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TO_18-JUN-2008_2"""'], {}), "('TO_18-JUN-2008_2')\n", (4650, 4670), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4700, 4724), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (4718, 4724), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4757, 4786), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(178)', '(1)'], {}), '(178, 1)\n', (4778, 4786), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4862, 4902), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TO_30-JUN-2008_1"""'], {}), "('TO_30-JUN-2008_1')\n", (4882, 4902), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4932, 4956), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (4950, 4956), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4989, 5018), 'FWCore.ParameterSet.Config.untracked.vuint32', 'cms.untracked.vuint32', (['(228)', '(1)'], {}), '(228, 1)\n', (5010, 5018), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
from __future__ import absolute_import, print_function, division
import contextlib
import blinker
import pprint
from mitmproxy import exceptions
"""
The base implementation for Options.
"""
class OptManager(object):
"""
.changed is a blinker Signal that triggers whenever options are
updated. If any handler in the chain raises an exceptions.OptionsError
exception, all changes are rolled back, the exception is suppressed,
and the .errored signal is notified.
"""
_initialized = False
attributes = []
def __new__(cls, *args, **kwargs):
# Initialize instance._opts before __init__ is called.
# This allows us to call super().__init__() last, which then sets
# ._initialized = True as the final operation.
instance = super(OptManager, cls).__new__(cls)
instance.__dict__["_opts"] = {}
return instance
def __init__(self):
self.__dict__["changed"] = blinker.Signal()
self.__dict__["errored"] = blinker.Signal()
self.__dict__["_initialized"] = True
@contextlib.contextmanager
def rollback(self, updated):
old = self._opts.copy()
try:
yield
except exceptions.OptionsError as e:
# Notify error handlers
self.errored.send(self, exc=e)
# Rollback
self.__dict__["_opts"] = old
self.changed.send(self, updated=updated)
def __eq__(self, other):
return self._opts == other._opts
def __copy__(self):
return self.__class__(**self._opts)
def __getattr__(self, attr):
if attr in self._opts:
return self._opts[attr]
else:
raise AttributeError("No such option: %s" % attr)
def __setattr__(self, attr, value):
if not self._initialized:
self._opts[attr] = value
return
self.update(**{attr: value})
def keys(self):
return set(self._opts.keys())
def get(self, k, d=None):
return self._opts.get(k, d)
def update(self, **kwargs):
updated = set(kwargs.keys())
for k in kwargs:
if k not in self._opts:
raise KeyError("No such option: %s" % k)
with self.rollback(updated):
self._opts.update(kwargs)
self.changed.send(self, updated=updated)
def setter(self, attr):
"""
Generate a setter for a given attribute. This returns a callable
taking a single argument.
"""
if attr not in self._opts:
raise KeyError("No such option: %s" % attr)
def setter(x):
setattr(self, attr, x)
return setter
def toggler(self, attr):
"""
Generate a toggler for a boolean attribute. This returns a callable
that takes no arguments.
"""
if attr not in self._opts:
raise KeyError("No such option: %s" % attr)
def toggle():
setattr(self, attr, not getattr(self, attr))
return toggle
def __repr__(self):
options = pprint.pformat(self._opts, indent=4).strip(" {}")
if "\n" in options:
options = "\n " + options + "\n"
return "{mod}.{cls}({{{options}}})".format(
mod=type(self).__module__,
cls=type(self).__name__,
options=options
)
|
[
"pprint.pformat",
"blinker.Signal"
] |
[((970, 986), 'blinker.Signal', 'blinker.Signal', ([], {}), '()\n', (984, 986), False, 'import blinker\n'), ((1022, 1038), 'blinker.Signal', 'blinker.Signal', ([], {}), '()\n', (1036, 1038), False, 'import blinker\n'), ((3127, 3163), 'pprint.pformat', 'pprint.pformat', (['self._opts'], {'indent': '(4)'}), '(self._opts, indent=4)\n', (3141, 3163), False, 'import pprint\n')]
|
# MIT License
#
# Copyright (c) 2018-2019 Red Hat, Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import logging
from typing import List, Optional, Dict, Set, Iterable
from urllib.parse import urlparse
from ogr.abstract import (
PRStatus,
GitTag,
CommitFlag,
CommitComment,
CommitStatus,
PullRequest,
Issue,
IssueStatus,
Release,
AccessLevel,
)
from ogr.exceptions import (
PagureAPIException,
OgrException,
OperationNotSupported,
)
from ogr.read_only import if_readonly, GitProjectReadOnly
from ogr.services import pagure as ogr_pagure
from ogr.services.base import BaseGitProject
from ogr.services.pagure.flag import PagureCommitFlag
from ogr.services.pagure.issue import PagureIssue
from ogr.services.pagure.pull_request import PagurePullRequest
from ogr.services.pagure.release import PagureRelease
from ogr.utils import RequestResponse, filter_paths, indirect
logger = logging.getLogger(__name__)
class PagureProject(BaseGitProject):
service: "ogr_pagure.PagureService"
def __init__(
self,
repo: str,
namespace: Optional[str],
service: "ogr_pagure.PagureService",
username: str = None,
is_fork: bool = False,
) -> None:
super().__init__(repo, service, namespace)
self.read_only = service.read_only
self._is_fork = is_fork
self._username = username
self.repo = repo
self.namespace = namespace
def __str__(self) -> str:
fork_info = ""
if self._is_fork:
fork_info = f', username="{self._username}", is_fork={self._is_fork}'
return f'PagureProject(namespace="{self.namespace}", repo="{self.repo}"{fork_info})'
def __eq__(self, o: object) -> bool:
if not isinstance(o, PagureProject):
return False
return (
self.repo == o.repo
and self.namespace == o.namespace
and self.service == o.service
and self._username == o._username
and self._is_fork == o._is_fork
and self.read_only == o.read_only
)
@property
def _user(self) -> str:
if not self._username:
self._username = self.service.user.get_username()
return self._username
def _call_project_api(
self,
*args,
add_fork_part: bool = True,
add_api_endpoint_part=True,
method: str = None,
params: dict = None,
data: dict = None,
) -> dict:
"""
Call project API endpoint.
:param args: str parts of the url (e.g. "a", "b" will call "project/a/b")
:param add_fork_part: If the projects is a fork, use "fork/username" prefix, True by default
:param add_api_endpoint_part: Add part with API endpoint "/api/0/"
:param method: "GET"/"POST"/...
:param params: http(s) query parameters
:param data: data to be sent
:return: dict
"""
request_url = self._get_project_url(
*args,
add_api_endpoint_part=add_api_endpoint_part,
add_fork_part=add_fork_part,
)
return self.service.call_api(
url=request_url, method=method, params=params, data=data
)
def _call_project_api_raw(
self,
*args,
add_fork_part: bool = True,
add_api_endpoint_part=True,
method: str = None,
params: dict = None,
data: dict = None,
) -> RequestResponse:
"""
Call project API endpoint.
:param args: str parts of the url (e.g. "a", "b" will call "project/a/b")
:param add_fork_part: If the projects is a fork, use "fork/username" prefix, True by default
:param add_api_endpoint_part: Add part with API endpoint "/api/0/"
:param method: "GET"/"POST"/...
:param params: http(s) query parameters
:param data: data to be sent
:return: RequestResponse
"""
request_url = self._get_project_url(
*args,
add_api_endpoint_part=add_api_endpoint_part,
add_fork_part=add_fork_part,
)
return self.service.call_api_raw(
url=request_url, method=method, params=params, data=data
)
def _get_project_url(self, *args, add_fork_part=True, add_api_endpoint_part=True):
additional_parts = []
if self._is_fork and add_fork_part:
additional_parts += ["fork", self._user]
return self.service.get_api_url(
*additional_parts,
self.namespace,
self.repo,
*args,
add_api_endpoint_part=add_api_endpoint_part,
)
def get_project_info(self):
return self._call_project_api(method="GET")
def get_branches(self) -> List[str]:
return_value = self._call_project_api("git", "branches", method="GET")
return return_value["branches"]
@property
def default_branch(self) -> str:
return_value = self._call_project_api("git", "branches", method="GET")
return return_value["default"]
def get_description(self) -> str:
return self.get_project_info()["description"]
@property
def description(self) -> str:
"""
Returns:
Project description.
"""
return self.get_project_info()["description"]
@description.setter
def description(self, new_description: str) -> None:
"""
Args:
new_description: description to set for project.
"""
raise OperationNotSupported("Not possible on Pagure")
def get_owners(self) -> List[str]:
project = self.get_project_info()
return project["access_users"]["owner"]
def who_can_close_issue(self) -> Set[str]:
users: Set[str] = set()
project = self.get_project_info()
users.update(project["access_users"]["admin"])
users.update(project["access_users"]["commit"])
users.update(project["access_users"]["ticket"])
users.update(project["access_users"]["owner"])
return users
def who_can_merge_pr(self) -> Set[str]:
users: Set[str] = set()
project = self.get_project_info()
users.update(project["access_users"]["admin"])
users.update(project["access_users"]["commit"])
users.update(project["access_users"]["owner"])
return users
def can_merge_pr(self, username) -> bool:
return username in self.who_can_merge_pr()
def request_access(self):
raise OperationNotSupported("Not possible on Pagure")
@indirect(PagureIssue.get_list)
def get_issue_list(
self,
status: IssueStatus = IssueStatus.open,
author: Optional[str] = None,
assignee: Optional[str] = None,
labels: Optional[List[str]] = None,
) -> List[Issue]:
pass
@indirect(PagureIssue.get)
def get_issue(self, issue_id: int) -> Issue:
pass
def delete(self) -> None:
self._call_project_api_raw("delete", method="POST")
@indirect(PagureIssue.create)
def create_issue(
self,
title: str,
body: str,
private: Optional[bool] = None,
labels: Optional[List[str]] = None,
assignees: Optional[List[str]] = None,
) -> Issue:
pass
@indirect(PagurePullRequest.get_list)
def get_pr_list(
self, status: PRStatus = PRStatus.open, assignee=None, author=None
) -> List[PullRequest]:
pass
@indirect(PagurePullRequest.get)
def get_pr(self, pr_id: int) -> PullRequest:
pass
@if_readonly(return_function=GitProjectReadOnly.create_pr)
@indirect(PagurePullRequest.create)
def create_pr(
self,
title: str,
body: str,
target_branch: str,
source_branch: str,
fork_username: str = None,
) -> PullRequest:
pass
@if_readonly(return_function=GitProjectReadOnly.fork_create)
def fork_create(self) -> "PagureProject":
request_url = self.service.get_api_url("fork")
self.service.call_api(
url=request_url,
method="POST",
data={"repo": self.repo, "namespace": self.namespace, "wait": True},
)
fork = self._construct_fork_project()
logger.debug(f"Forked to {fork.full_repo_name}")
return fork
def _construct_fork_project(self) -> "PagureProject":
return PagureProject(
service=self.service,
repo=self.repo,
namespace=self.namespace,
username=self._user,
is_fork=True,
)
def get_fork(self, create: bool = True) -> Optional["PagureProject"]:
"""
Provide GitProject instance of a fork of this project.
Returns None if this is a fork.
:param create: create a fork if it doesn't exist
:return: instance of GitProject or None
"""
if self.is_fork:
raise OgrException("Cannot create fork from fork.")
for fork in self.get_forks():
fork_info = fork.get_project_info()
if self._user in fork_info["user"]["name"]:
return fork
if not self.is_forked():
if create:
return self.fork_create()
else:
logger.info(
f"Fork of {self.repo}"
" does not exist and we were asked not to create it."
)
return None
return self._construct_fork_project()
def exists(self) -> bool:
response = self._call_project_api_raw()
return response.ok
def is_private(self) -> bool:
"""
Is this repo private? (accessible only by users with granted access)
:return: if yes, return True
"""
host = urlparse(self.service.instance_url).hostname
if host in [
"git.centos.org",
"git.stg.centos.org",
"pagure.io",
"src.fedoraproject.org",
"src.stg.fedoraproject.org",
]:
# private repositories are not allowed on generally used pagure instances
return False
raise OperationNotSupported(
f"is_private is not implemented for {self.service.instance_url}."
f"Please open issue in https://github.com/packit/ogr"
)
def is_forked(self) -> bool:
"""
Is this repo forked by the authenticated user?
:return: if yes, return True
"""
f = self._construct_fork_project()
return bool(f.exists() and f.parent.exists())
def get_is_fork_from_api(self) -> bool:
return bool(self.get_project_info()["parent"])
@property
def is_fork(self) -> bool:
return self._is_fork
@property
def parent(self) -> Optional["PagureProject"]:
"""
Return parent project if this project is a fork, otherwise return None
"""
if self.get_is_fork_from_api():
return PagureProject(
repo=self.repo,
namespace=self.get_project_info()["parent"]["namespace"],
service=self.service,
)
return None
def get_git_urls(self) -> Dict[str, str]:
return_value = self._call_project_api("git", "urls")
return return_value["urls"]
def add_user(self, user: str, access_level: AccessLevel) -> None:
"""
AccessLevel.pull => ticket
AccessLevel.triage => ticket
AccessLevel.push => commit
AccessLevel.admin => commit
AccessLevel.maintain => admin
"""
self.add_user_or_group(user, access_level, "user")
def add_group(self, group: str, access_level: AccessLevel):
"""
AccessLevel.pull => ticket
AccessLevel.triage => ticket
AccessLevel.push => commit
AccessLevel.admin => commit
AccessLevel.maintain => admin
"""
self.add_user_or_group(group, access_level, "group")
def add_user_or_group(
self, user: str, access_level: AccessLevel, user_type
) -> None:
access_dict = {
AccessLevel.pull: "ticket",
AccessLevel.triage: "ticket",
AccessLevel.push: "commit",
AccessLevel.admin: "commit",
AccessLevel.maintain: "admin",
}
response = self._call_project_api_raw(
"git",
"modifyacls",
method="POST",
data={
"user_type": user_type,
"name": user,
"acl": access_dict[access_level],
},
)
if response.status_code == 401:
raise PagureAPIException("You are not allowed to modify ACL's")
def change_token(self, new_token: str) -> None:
"""
Change an API token.
Only for this instance.
"""
self.service.change_token(new_token)
def get_file_content(self, path: str, ref=None) -> str:
ref = ref or self.default_branch
result = self._call_project_api_raw(
"raw", ref, "f", path, add_api_endpoint_part=False
)
if not result or result.reason == "NOT FOUND":
raise FileNotFoundError(f"File '{path}' on {ref} not found")
return result.content.decode()
def get_sha_from_tag(self, tag_name: str) -> str:
tags_dict = self.get_tags_dict()
if tag_name not in tags_dict:
raise PagureAPIException(f"Tag '{tag_name}' not found.")
return tags_dict[tag_name].commit_sha
def commit_comment(
self, commit: str, body: str, filename: str = None, row: int = None
) -> CommitComment:
raise OperationNotSupported("Commit comments are not supported on Pagure.")
@if_readonly(return_function=GitProjectReadOnly.set_commit_status)
@indirect(PagureCommitFlag.set)
def set_commit_status(
self,
commit: str,
state: CommitStatus,
target_url: str,
description: str,
context: str,
percent: int = None,
uid: str = None,
trim: bool = False,
) -> "CommitFlag":
pass
@indirect(PagureCommitFlag.get)
def get_commit_statuses(self, commit: str) -> List[CommitFlag]:
pass
def get_tags(self) -> List[GitTag]:
response = self._call_project_api("git", "tags", params={"with_commits": True})
return [GitTag(name=n, commit_sha=c) for n, c in response["tags"].items()]
def get_tags_dict(self) -> Dict[str, GitTag]:
response = self._call_project_api("git", "tags", params={"with_commits": True})
return {n: GitTag(name=n, commit_sha=c) for n, c in response["tags"].items()}
def get_releases(self) -> List[Release]:
# git tag for Pagure is shown as Release in Pagure UI
git_tags = self.get_tags()
return [self._release_from_git_tag(git_tag) for git_tag in git_tags]
def get_release(self, identifier=None, name=None, tag_name=None) -> PagureRelease:
raise OperationNotSupported
def get_latest_release(self) -> Optional[PagureRelease]:
raise OperationNotSupported
def _release_from_git_tag(self, git_tag: GitTag) -> PagureRelease:
return PagureRelease(
tag_name=git_tag.name,
url="",
created_at="",
tarball_url="",
git_tag=git_tag,
project=self,
)
def get_forks(self) -> List["PagureProject"]:
"""
Get forks of the project.
:return: [PagureProject]
"""
forks_url = self.service.get_api_url("projects")
projects_response = self.service.call_api(
url=forks_url, params={"fork": True, "pattern": self.repo}
)
return [
PagureProject(
repo=fork["name"],
namespace=fork["namespace"],
service=self.service,
username=fork["user"]["name"],
is_fork=True,
)
for fork in projects_response["projects"]
]
def get_web_url(self) -> str:
"""
Get web URL of the project.
:return: str
"""
return f'{self.service.instance_url}/{self.get_project_info()["url_path"]}'
@property
def full_repo_name(self) -> str:
"""
Get repo name with namespace
e.g. 'rpms/python-docker-py'
:return: str
"""
fork = f"fork/{self._user}/" if self.is_fork else ""
namespace = f"{self.namespace}/" if self.namespace else ""
return f"{fork}{namespace}{self.repo}"
def __get_files(
self, path: str, ref: str = None, recursive: bool = False
) -> Iterable[str]:
subfolders = ["."]
while subfolders:
path = subfolders.pop()
split_path = []
if path != ".":
split_path = ["f"] + path.split("/")
response = self._call_project_api("tree", ref, *split_path)
for file in response["content"]:
if file["type"] == "file":
yield file["path"]
elif recursive and file["type"] == "folder":
subfolders.append(file["path"])
def get_files(
self, ref: str = None, filter_regex: str = None, recursive: bool = False
) -> List[str]:
ref = ref or self.default_branch
paths = list(self.__get_files(".", ref, recursive))
if filter_regex:
paths = filter_paths(paths, filter_regex)
return paths
|
[
"urllib.parse.urlparse",
"ogr.read_only.if_readonly",
"ogr.abstract.GitTag",
"ogr.utils.filter_paths",
"ogr.exceptions.OperationNotSupported",
"ogr.exceptions.OgrException",
"ogr.exceptions.PagureAPIException",
"ogr.utils.indirect",
"logging.getLogger",
"ogr.services.pagure.release.PagureRelease"
] |
[((1943, 1970), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1960, 1970), False, 'import logging\n'), ((7643, 7673), 'ogr.utils.indirect', 'indirect', (['PagureIssue.get_list'], {}), '(PagureIssue.get_list)\n', (7651, 7673), False, 'from ogr.utils import RequestResponse, filter_paths, indirect\n'), ((7923, 7948), 'ogr.utils.indirect', 'indirect', (['PagureIssue.get'], {}), '(PagureIssue.get)\n', (7931, 7948), False, 'from ogr.utils import RequestResponse, filter_paths, indirect\n'), ((8108, 8136), 'ogr.utils.indirect', 'indirect', (['PagureIssue.create'], {}), '(PagureIssue.create)\n', (8116, 8136), False, 'from ogr.utils import RequestResponse, filter_paths, indirect\n'), ((8378, 8414), 'ogr.utils.indirect', 'indirect', (['PagurePullRequest.get_list'], {}), '(PagurePullRequest.get_list)\n', (8386, 8414), False, 'from ogr.utils import RequestResponse, filter_paths, indirect\n'), ((8558, 8589), 'ogr.utils.indirect', 'indirect', (['PagurePullRequest.get'], {}), '(PagurePullRequest.get)\n', (8566, 8589), False, 'from ogr.utils import RequestResponse, filter_paths, indirect\n'), ((8658, 8715), 'ogr.read_only.if_readonly', 'if_readonly', ([], {'return_function': 'GitProjectReadOnly.create_pr'}), '(return_function=GitProjectReadOnly.create_pr)\n', (8669, 8715), False, 'from ogr.read_only import if_readonly, GitProjectReadOnly\n'), ((8721, 8755), 'ogr.utils.indirect', 'indirect', (['PagurePullRequest.create'], {}), '(PagurePullRequest.create)\n', (8729, 8755), False, 'from ogr.utils import RequestResponse, filter_paths, indirect\n'), ((8960, 9019), 'ogr.read_only.if_readonly', 'if_readonly', ([], {'return_function': 'GitProjectReadOnly.fork_create'}), '(return_function=GitProjectReadOnly.fork_create)\n', (8971, 9019), False, 'from ogr.read_only import if_readonly, GitProjectReadOnly\n'), ((14881, 14946), 'ogr.read_only.if_readonly', 'if_readonly', ([], {'return_function': 'GitProjectReadOnly.set_commit_status'}), '(return_function=GitProjectReadOnly.set_commit_status)\n', (14892, 14946), False, 'from ogr.read_only import if_readonly, GitProjectReadOnly\n'), ((14952, 14982), 'ogr.utils.indirect', 'indirect', (['PagureCommitFlag.set'], {}), '(PagureCommitFlag.set)\n', (14960, 14982), False, 'from ogr.utils import RequestResponse, filter_paths, indirect\n'), ((15271, 15301), 'ogr.utils.indirect', 'indirect', (['PagureCommitFlag.get'], {}), '(PagureCommitFlag.get)\n', (15279, 15301), False, 'from ogr.utils import RequestResponse, filter_paths, indirect\n'), ((6597, 6644), 'ogr.exceptions.OperationNotSupported', 'OperationNotSupported', (['"""Not possible on Pagure"""'], {}), "('Not possible on Pagure')\n", (6618, 6644), False, 'from ogr.exceptions import PagureAPIException, OgrException, OperationNotSupported\n'), ((7589, 7636), 'ogr.exceptions.OperationNotSupported', 'OperationNotSupported', (['"""Not possible on Pagure"""'], {}), "('Not possible on Pagure')\n", (7610, 7636), False, 'from ogr.exceptions import PagureAPIException, OgrException, OperationNotSupported\n'), ((11268, 11416), 'ogr.exceptions.OperationNotSupported', 'OperationNotSupported', (['f"""is_private is not implemented for {self.service.instance_url}.Please open issue in https://github.com/packit/ogr"""'], {}), "(\n f'is_private is not implemented for {self.service.instance_url}.Please open issue in https://github.com/packit/ogr'\n )\n", (11289, 11416), False, 'from ogr.exceptions import PagureAPIException, OgrException, OperationNotSupported\n'), ((14805, 14874), 'ogr.exceptions.OperationNotSupported', 'OperationNotSupported', (['"""Commit comments are not supported on Pagure."""'], {}), "('Commit comments are not supported on Pagure.')\n", (14826, 14874), False, 'from ogr.exceptions import PagureAPIException, OgrException, OperationNotSupported\n'), ((16349, 16459), 'ogr.services.pagure.release.PagureRelease', 'PagureRelease', ([], {'tag_name': 'git_tag.name', 'url': '""""""', 'created_at': '""""""', 'tarball_url': '""""""', 'git_tag': 'git_tag', 'project': 'self'}), "(tag_name=git_tag.name, url='', created_at='', tarball_url='',\n git_tag=git_tag, project=self)\n", (16362, 16459), False, 'from ogr.services.pagure.release import PagureRelease\n'), ((10032, 10077), 'ogr.exceptions.OgrException', 'OgrException', (['"""Cannot create fork from fork."""'], {}), "('Cannot create fork from fork.')\n", (10044, 10077), False, 'from ogr.exceptions import PagureAPIException, OgrException, OperationNotSupported\n'), ((10899, 10934), 'urllib.parse.urlparse', 'urlparse', (['self.service.instance_url'], {}), '(self.service.instance_url)\n', (10907, 10934), False, 'from urllib.parse import urlparse\n'), ((13786, 13843), 'ogr.exceptions.PagureAPIException', 'PagureAPIException', (['"""You are not allowed to modify ACL\'s"""'], {}), '("You are not allowed to modify ACL\'s")\n', (13804, 13843), False, 'from ogr.exceptions import PagureAPIException, OgrException, OperationNotSupported\n'), ((14568, 14618), 'ogr.exceptions.PagureAPIException', 'PagureAPIException', (['f"""Tag \'{tag_name}\' not found."""'], {}), '(f"Tag \'{tag_name}\' not found.")\n', (14586, 14618), False, 'from ogr.exceptions import PagureAPIException, OgrException, OperationNotSupported\n'), ((15528, 15556), 'ogr.abstract.GitTag', 'GitTag', ([], {'name': 'n', 'commit_sha': 'c'}), '(name=n, commit_sha=c)\n', (15534, 15556), False, 'from ogr.abstract import PRStatus, GitTag, CommitFlag, CommitComment, CommitStatus, PullRequest, Issue, IssueStatus, Release, AccessLevel\n'), ((15753, 15781), 'ogr.abstract.GitTag', 'GitTag', ([], {'name': 'n', 'commit_sha': 'c'}), '(name=n, commit_sha=c)\n', (15759, 15781), False, 'from ogr.abstract import PRStatus, GitTag, CommitFlag, CommitComment, CommitStatus, PullRequest, Issue, IssueStatus, Release, AccessLevel\n'), ((18627, 18660), 'ogr.utils.filter_paths', 'filter_paths', (['paths', 'filter_regex'], {}), '(paths, filter_regex)\n', (18639, 18660), False, 'from ogr.utils import RequestResponse, filter_paths, indirect\n')]
|
from __future__ import (print_function, absolute_import)
import numpy as np
from .profiles import get_profiles
def interpolate_profile(nlower, nupper, nelec, temp, with_doppler=False):
"""
Interpolate profile tables of Lemke 1997 to get a Stark broadened line profile
Parameters
----------
nlower : int
lower level of transition
nupper : int
upper level of transition
nelec : float
number density of electrons in cm**-3
temp : float
temperature in K
Returns
-------
log_alpha : `np.ndarray`
alpha values
profile : `np.ndarray`
stark profile
fo : float
conversion between delta-alpha and delta-lambda
"""
meta, flags, data = get_profiles(nlower, nupper, with_doppler)
f0 = 1.25e-9*nelec**(2./3.) # normal field strength f0 (in esu)
log_ne = np.log10(nelec)
log_t = np.log10(temp)
log_ne_index = (log_ne - meta.log_ne_min) / meta.log_ne_increment
log_t_index = (log_t - meta.log_t_min) / meta.log_t_increment
low_ne_index = int(np.floor(log_ne_index))
high_ne_index = int(np.ceil(log_ne_index))
low_t_index = int(np.floor(log_t_index))
high_t_index = int(np.ceil(log_t_index))
# check we are within bounds
if low_ne_index < 0 or high_ne_index > meta.num_ne:
raise ValueError("electron density outside allowed range 10**10 to 10**18 cm**-3")
if low_t_index < 0 or high_t_index > meta.num_temp:
raise ValueError("temperature outside allowed range 2500 to 160000 K")
# points bracketing requested values
ne1 = meta.log_ne_min + low_ne_index*meta.log_ne_increment
ne2 = meta.log_ne_min + high_ne_index*meta.log_ne_increment
t1 = meta.log_t_min + low_t_index*meta.log_t_increment
t2 = meta.log_t_min + high_t_index*meta.log_t_increment
# profiles at these points
p1 = data[low_ne_index, low_t_index]
p2 = data[high_ne_index, low_t_index]
p3 = data[low_ne_index, high_t_index]
p4 = data[high_ne_index, high_t_index]
if ne1 == ne2 and t1 == t2:
# no interpolation needed
profile = p1
elif ne1 == ne2:
# interpolate in temp
profile = p1 + (p3 - p1) * (log_t - t1) / (t2 - t1)
elif t1 == t2:
# interpolate in nelec
profile = p1 + (p2 - p1) * (log_ne - ne1) / (ne2 - ne1)
else:
# otherwise do the full bilinear interpolation
# interpolate in temp at low_ne
r1 = p1 + (p3 - p1) * (log_t - t1) / (t2 - t1)
# interpolate in temp at high_ne
r3 = p2 + (p4 - p2) * (log_t - t1) / (t2 - t1)
# interpolate in ne
profile = r1 + (r3-r1) * (log_ne - ne1) / (ne2 - ne1)
# OK - now find matching alpha values
alpha = meta.log_alpha_min + np.arange(meta.num_alpha) * meta.log_alpha_increment
return alpha, profile, f0
|
[
"numpy.log10",
"numpy.arange",
"numpy.ceil",
"numpy.floor"
] |
[((871, 886), 'numpy.log10', 'np.log10', (['nelec'], {}), '(nelec)\n', (879, 886), True, 'import numpy as np\n'), ((899, 913), 'numpy.log10', 'np.log10', (['temp'], {}), '(temp)\n', (907, 913), True, 'import numpy as np\n'), ((1074, 1096), 'numpy.floor', 'np.floor', (['log_ne_index'], {}), '(log_ne_index)\n', (1082, 1096), True, 'import numpy as np\n'), ((1122, 1143), 'numpy.ceil', 'np.ceil', (['log_ne_index'], {}), '(log_ne_index)\n', (1129, 1143), True, 'import numpy as np\n'), ((1167, 1188), 'numpy.floor', 'np.floor', (['log_t_index'], {}), '(log_t_index)\n', (1175, 1188), True, 'import numpy as np\n'), ((1213, 1233), 'numpy.ceil', 'np.ceil', (['log_t_index'], {}), '(log_t_index)\n', (1220, 1233), True, 'import numpy as np\n'), ((2774, 2799), 'numpy.arange', 'np.arange', (['meta.num_alpha'], {}), '(meta.num_alpha)\n', (2783, 2799), True, 'import numpy as np\n')]
|
# Copyright (c) OpenMMLab. All rights reserved.
import logging
from typing import Any, Dict, Optional, Sequence, Tuple, Union
import mmcv
import numpy as np
import torch
from torch.utils.data import Dataset
from mmdeploy.codebase.base import BaseTask
from mmdeploy.utils import Task, get_root_logger
from mmdeploy.utils.config_utils import get_input_shape
from .mmclassification import MMCLS_TASK
def process_model_config(model_cfg: mmcv.Config,
imgs: Union[str, np.ndarray],
input_shape: Optional[Sequence[int]] = None):
"""Process the model config.
Args:
model_cfg (mmcv.Config): The model config.
imgs (str | np.ndarray): Input image(s), accepted data type are `str`,
`np.ndarray`.
input_shape (list[int]): A list of two integer in (width, height)
format specifying input shape. Default: None.
Returns:
mmcv.Config: the model config after processing.
"""
cfg = model_cfg.deepcopy()
if isinstance(imgs, str):
if cfg.data.test.pipeline[0]['type'] != 'LoadImageFromFile':
cfg.data.test.pipeline.insert(0, dict(type='LoadImageFromFile'))
else:
if cfg.data.test.pipeline[0]['type'] == 'LoadImageFromFile':
cfg.data.test.pipeline.pop(0)
# check whether input_shape is valid
if input_shape is not None:
if 'crop_size' in cfg.data.test.pipeline[2]:
crop_size = cfg.data.test.pipeline[2]['crop_size']
if tuple(input_shape) != (crop_size, crop_size):
logger = get_root_logger()
logger.warning(
f'`input shape` should be equal to `crop_size`: {crop_size},\
but given: {input_shape}')
return cfg
@MMCLS_TASK.register_module(Task.CLASSIFICATION.value)
class Classification(BaseTask):
"""Classification task class.
Args:
model_cfg (mmcv.Config): Original PyTorch model config file.
deploy_cfg (mmcv.Config): Deployment config file or loaded Config
object.
device (str): A string represents device type.
"""
def __init__(self, model_cfg: mmcv.Config, deploy_cfg: mmcv.Config,
device: str):
super(Classification, self).__init__(model_cfg, deploy_cfg, device)
def init_backend_model(self,
model_files: Sequence[str] = None,
**kwargs) -> torch.nn.Module:
"""Initialize backend model.
Args:
model_files (Sequence[str]): Input model files.
Returns:
nn.Module: An initialized backend model.
"""
from .classification_model import build_classification_model
model = build_classification_model(
model_files, self.model_cfg, self.deploy_cfg, device=self.device)
return model.eval()
def init_pytorch_model(self,
model_checkpoint: Optional[str] = None,
cfg_options: Optional[Dict] = None,
**kwargs) -> torch.nn.Module:
"""Initialize torch model.
Args:
model_checkpoint (str): The checkpoint file of torch model,
Default: None.
cfg_options (dict): Optional config key-pair parameters.
Returns:
nn.Module: An initialized torch model generated by OpenMMLab
codebases.
"""
from mmcls.apis import init_model
model = init_model(self.model_cfg, model_checkpoint, self.device,
cfg_options)
return model.eval()
def create_input(self,
imgs: Union[str, np.ndarray],
input_shape: Optional[Sequence[int]] = None) \
-> Tuple[Dict, torch.Tensor]:
"""Create input for classifier.
Args:
imgs (Any): Input image(s), accepted data type are `str`,
`np.ndarray`, `torch.Tensor`.
input_shape (list[int]): A list of two integer in (width, height)
format specifying input shape. Default: None.
Returns:
tuple: (data, img), meta information for the input image and input.
"""
from mmcls.datasets.pipelines import Compose
from mmcv.parallel import collate, scatter
cfg = process_model_config(self.model_cfg, imgs, input_shape)
if isinstance(imgs, str):
data = dict(img_info=dict(filename=imgs), img_prefix=None)
else:
data = dict(img=imgs)
test_pipeline = Compose(cfg.data.test.pipeline)
data = test_pipeline(data)
data = collate([data], samples_per_gpu=1)
data['img'] = [data['img']]
if self.device != 'cpu':
data = scatter(data, [self.device])[0]
return data, data['img']
def visualize(self,
model: torch.nn.Module,
image: Union[str, np.ndarray],
result: list,
output_file: str,
window_name: str = '',
show_result: bool = False):
"""Visualize predictions of a model.
Args:
model (nn.Module): Input model.
image (str | np.ndarray): Input image to draw predictions on.
result (list): A list of predictions.
output_file (str): Output file to save drawn image.
window_name (str): The name of visualization window. Defaults to
an empty string.
show_result (bool): Whether to show result in windows.
Default: False.
"""
show_img = mmcv.imread(image) if isinstance(image, str) else image
output_file = None if show_result else output_file
pred_score = np.max(result)
pred_label = np.argmax(result)
result = {'pred_label': pred_label, 'pred_score': float(pred_score)}
result['pred_class'] = model.CLASSES[result['pred_label']]
return model.show_result(
show_img,
result,
show=show_result,
win_name=window_name,
out_file=output_file)
@staticmethod
def run_inference(model: torch.nn.Module,
model_inputs: Dict[str, torch.Tensor]) -> list:
"""Run inference once for a classification model of mmcls.
Args:
model (nn.Module): Input model.
model_inputs (dict): A dict containing model inputs tensor and
meta info.
Returns:
list: The predictions of model inference.
"""
return model(**model_inputs, return_loss=False)
@staticmethod
def get_partition_cfg(partition_type: str) -> Dict:
"""Get a certain partition config.
Args:
partition_type (str): A string specifying partition type.
Returns:
dict: A dictionary of partition config.
"""
raise NotImplementedError('Not supported yet.')
@staticmethod
def get_tensor_from_input(input_data: Dict[str, Any]) -> torch.Tensor:
"""Get input tensor from input data.
Args:
input_data (tuple): Input data containing meta info and image
tensor.
Returns:
torch.Tensor: An image in `Tensor`.
"""
return input_data['img']
@staticmethod
def evaluate_outputs(model_cfg: mmcv.Config,
outputs: list,
dataset: Dataset,
metrics: Optional[str] = None,
out: Optional[str] = None,
metric_options: Optional[dict] = None,
format_only: bool = False,
log_file: Optional[str] = None) -> None:
"""Perform post-processing to predictions of model.
Args:
model_cfg (mmcv.Config): The model config.
outputs (list): A list of predictions of model inference.
dataset (Dataset): Input dataset to run test.
metrics (str): Evaluation metrics, which depends on
the codebase and the dataset, e.g., "mAP" in mmcls.
out (str): Output result file in pickle format, Default: None.
metric_options (dict): Custom options for evaluation, will be
kwargs for dataset.evaluate() function. Default: None.
format_only (bool): Format the output results without perform
evaluation. It is useful when you want to format the result
to a specific format and submit it to the test server.
Default: False.
log_file (str | None): The file to write the evaluation results.
Defaults to `None` and the results will only print on stdout.
"""
import warnings
from mmcv.utils import get_logger
logger = get_logger('test', log_file=log_file, log_level=logging.INFO)
if metrics:
results = dataset.evaluate(outputs, metrics, metric_options)
for k, v in results.items():
logger.info(f'{k} : {v:.2f}')
else:
warnings.warn('Evaluation metrics are not specified.')
scores = np.vstack(outputs)
pred_score = np.max(scores, axis=1)
pred_label = np.argmax(scores, axis=1)
pred_class = [dataset.CLASSES[lb] for lb in pred_label]
results = {
'pred_score': pred_score,
'pred_label': pred_label,
'pred_class': pred_class
}
if not out:
logger.info('the predicted result for the first element is '
f'pred_score = {pred_score[0]:.2f}, '
f'pred_label = {pred_label[0]} '
f'and pred_class = {pred_class[0]}. '
'Specify --out to save all results to files.')
if out:
logger.debug(f'writing results to {out}')
mmcv.dump(results, out)
def get_preprocess(self) -> Dict:
"""Get the preprocess information for SDK.
Return:
dict: Composed of the preprocess information.
"""
input_shape = get_input_shape(self.deploy_cfg)
cfg = process_model_config(self.model_cfg, '', input_shape)
preprocess = cfg.data.test.pipeline
return preprocess
def get_postprocess(self) -> Dict:
"""Get the postprocess information for SDK.
Return:
dict: Composed of the postprocess information.
"""
postprocess = self.model_cfg.model.head
assert 'topk' in postprocess, 'model config lack topk'
postprocess.topk = max(postprocess.topk)
return postprocess
def get_model_name(self) -> str:
"""Get the model name.
Return:
str: the name of the model.
"""
assert 'backbone' in self.model_cfg.model, 'backbone not in model '
'config'
assert 'type' in self.model_cfg.model.backbone, 'backbone contains '
'no type'
name = self.model_cfg.model.backbone.type.lower()
return name
|
[
"mmcv.utils.get_logger",
"numpy.argmax",
"mmcv.parallel.scatter",
"mmdeploy.utils.config_utils.get_input_shape",
"mmcls.datasets.pipelines.Compose",
"numpy.max",
"mmcv.dump",
"mmdeploy.utils.get_root_logger",
"warnings.warn",
"mmcls.apis.init_model",
"mmcv.parallel.collate",
"mmcv.imread",
"numpy.vstack"
] |
[((3527, 3597), 'mmcls.apis.init_model', 'init_model', (['self.model_cfg', 'model_checkpoint', 'self.device', 'cfg_options'], {}), '(self.model_cfg, model_checkpoint, self.device, cfg_options)\n', (3537, 3597), False, 'from mmcls.apis import init_model\n'), ((4615, 4646), 'mmcls.datasets.pipelines.Compose', 'Compose', (['cfg.data.test.pipeline'], {}), '(cfg.data.test.pipeline)\n', (4622, 4646), False, 'from mmcls.datasets.pipelines import Compose\n'), ((4697, 4731), 'mmcv.parallel.collate', 'collate', (['[data]'], {'samples_per_gpu': '(1)'}), '([data], samples_per_gpu=1)\n', (4704, 4731), False, 'from mmcv.parallel import collate, scatter\n'), ((5824, 5838), 'numpy.max', 'np.max', (['result'], {}), '(result)\n', (5830, 5838), True, 'import numpy as np\n'), ((5860, 5877), 'numpy.argmax', 'np.argmax', (['result'], {}), '(result)\n', (5869, 5877), True, 'import numpy as np\n'), ((8953, 9014), 'mmcv.utils.get_logger', 'get_logger', (['"""test"""'], {'log_file': 'log_file', 'log_level': 'logging.INFO'}), "('test', log_file=log_file, log_level=logging.INFO)\n", (8963, 9014), False, 'from mmcv.utils import get_logger\n'), ((10321, 10353), 'mmdeploy.utils.config_utils.get_input_shape', 'get_input_shape', (['self.deploy_cfg'], {}), '(self.deploy_cfg)\n', (10336, 10353), False, 'from mmdeploy.utils.config_utils import get_input_shape\n'), ((5688, 5706), 'mmcv.imread', 'mmcv.imread', (['image'], {}), '(image)\n', (5699, 5706), False, 'import mmcv\n'), ((9222, 9276), 'warnings.warn', 'warnings.warn', (['"""Evaluation metrics are not specified."""'], {}), "('Evaluation metrics are not specified.')\n", (9235, 9276), False, 'import warnings\n'), ((9298, 9316), 'numpy.vstack', 'np.vstack', (['outputs'], {}), '(outputs)\n', (9307, 9316), True, 'import numpy as np\n'), ((9342, 9364), 'numpy.max', 'np.max', (['scores'], {'axis': '(1)'}), '(scores, axis=1)\n', (9348, 9364), True, 'import numpy as np\n'), ((9390, 9415), 'numpy.argmax', 'np.argmax', (['scores'], {'axis': '(1)'}), '(scores, axis=1)\n', (9399, 9415), True, 'import numpy as np\n'), ((10098, 10121), 'mmcv.dump', 'mmcv.dump', (['results', 'out'], {}), '(results, out)\n', (10107, 10121), False, 'import mmcv\n'), ((1589, 1606), 'mmdeploy.utils.get_root_logger', 'get_root_logger', ([], {}), '()\n', (1604, 1606), False, 'from mmdeploy.utils import Task, get_root_logger\n'), ((4820, 4848), 'mmcv.parallel.scatter', 'scatter', (['data', '[self.device]'], {}), '(data, [self.device])\n', (4827, 4848), False, 'from mmcv.parallel import collate, scatter\n')]
|
import sys
import asyncio
def get_ioloop() -> asyncio.BaseEventLoop:
loop = asyncio.get_event_loop()
if sys.platform == 'win32' and not isinstance(loop, asyncio.ProactorEventLoop):
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
return loop
def async_corun(coroutine):
loop = get_ioloop()
return loop.run_until_complete(coroutine)
def async_run(func):
loop = get_ioloop()
return loop.run_until_complete(func())
async def async_call(func, *args, **kwargs):
if asyncio.iscoroutinefunction(func):
return await func(*args, **kwargs)
elif callable(func):
return func(*args, **kwargs)
def sync_call(func, *args, **kwargs):
if asyncio.iscoroutinefunction(func):
loop = get_ioloop()
return loop.run_until_complete(func(*args, **kwargs))
elif callable(func):
return func(*args, **kwargs)
|
[
"asyncio.set_event_loop",
"asyncio.get_event_loop",
"asyncio.ProactorEventLoop",
"asyncio.iscoroutinefunction"
] |
[((82, 106), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (104, 106), False, 'import asyncio\n'), ((531, 564), 'asyncio.iscoroutinefunction', 'asyncio.iscoroutinefunction', (['func'], {}), '(func)\n', (558, 564), False, 'import asyncio\n'), ((718, 751), 'asyncio.iscoroutinefunction', 'asyncio.iscoroutinefunction', (['func'], {}), '(func)\n', (745, 751), False, 'import asyncio\n'), ((206, 233), 'asyncio.ProactorEventLoop', 'asyncio.ProactorEventLoop', ([], {}), '()\n', (231, 233), False, 'import asyncio\n'), ((242, 270), 'asyncio.set_event_loop', 'asyncio.set_event_loop', (['loop'], {}), '(loop)\n', (264, 270), False, 'import asyncio\n')]
|
from os.path import basename
from zipfile import ZipFile
from zipfile import BadZipFile
class ContentReader:
def __init__(self):
self.__UNNEEDED_CONTENT_NAMES = [
'OEBPS/images',
'OEBPS/css',
'OEBPS/pagenav', # a number follows after 'pagenav'
'OEBPS/content.opf',
'OEBPS/cover.xhtml',
'OEBPS/toc.ncx',
'OEBPS/toc.xhtml',
'META-INF',
'mimetype',
'extracted'
]
def get_publication_extracts(self, pub_file):
meeting_extracts = []
try:
with ZipFile(pub_file) as epub_archive:
for entry_name in epub_archive.namelist():
if not self._unneeded_entry(entry_name):
content_string = epub_archive.read(entry_name).decode('utf-8')
if self._is_a_meeting_xhtml(content_string):
meeting_extracts.append(content_string)
except BadZipFile:
print(f"'{pub_file.name}' is not an EPUB file. Skipping...")
return
return {'file_name': basename(pub_file.name).replace('.epub', ''), 'string_extracts': meeting_extracts}
def _unneeded_entry(self, entry_name):
for test_name in self.__UNNEEDED_CONTENT_NAMES:
if test_name in entry_name:
return True
return False
@staticmethod
def _is_a_meeting_xhtml(content_string):
treasures_exists = 'shadedHeader treasures' in content_string
ministry_exists = 'shadedHeader ministry' in content_string
christian_living_exists = 'shadedHeader christianLiving' in content_string
return treasures_exists and ministry_exists and christian_living_exists
|
[
"zipfile.ZipFile",
"os.path.basename"
] |
[((617, 634), 'zipfile.ZipFile', 'ZipFile', (['pub_file'], {}), '(pub_file)\n', (624, 634), False, 'from zipfile import ZipFile\n'), ((1145, 1168), 'os.path.basename', 'basename', (['pub_file.name'], {}), '(pub_file.name)\n', (1153, 1168), False, 'from os.path import basename\n')]
|
"""
Template-based email system for Python.
"""
import logging
import os
from typing import Dict
from typing import List
import jinja2
from premailer import Premailer
class DeliveryEngineNotInstalled(Exception):
"""
This exception is raised when you attempt to use TemplateMail without a delivery engine installed.
"""
class DeliveryNotMade(Exception):
"""
Called when a delivery cannot be made
"""
def __init__(self, details, response=None):
self.details = details
self.response = response
class MailTemplate:
def __init__(self, template_dirs: List[str], delivery_engine=None, logger=None):
"""
:param template_dirs: Directories containing templates.
"""
_template_dirs = template_dirs
for path in _template_dirs:
assert os.path.exists(path)
assert os.path.isdir(path)
self.template_environment = jinja2.Environment(
undefined=jinja2.StrictUndefined,
loader=jinja2.FileSystemLoader(_template_dirs),
)
self.delivery_engine = delivery_engine
self.logger = logger or logging.getLogger("mailtemplate")
def _inline_css(self, html_body):
premailer = Premailer(allow_network=False,)
return premailer.transform(html_body)
def render(
self,
template_name: str,
template_layout: str = "basic",
options: Dict = None,
*args,
**kwargs
):
# @todo: validate for theme name
default_options = {"theme": "light", "inline_css": True}
final_options = default_options
if options is not None:
final_options = {**default_options, **options}
template_path = os.path.join(
template_layout, template_name, "content.html.jinja"
)
render_template = self.template_environment.get_template(template_path)
content = render_template.render(*args, **kwargs)
if final_options["inline_css"]:
content = self._inline_css(content)
return content.strip()
__all__ = ["MailTemplate", "DeliveryEngineNotInstalled", "DeliveryNotMade"]
|
[
"os.path.isdir",
"os.path.exists",
"jinja2.FileSystemLoader",
"premailer.Premailer",
"os.path.join",
"logging.getLogger"
] |
[((1238, 1268), 'premailer.Premailer', 'Premailer', ([], {'allow_network': '(False)'}), '(allow_network=False)\n', (1247, 1268), False, 'from premailer import Premailer\n'), ((1747, 1813), 'os.path.join', 'os.path.join', (['template_layout', 'template_name', '"""content.html.jinja"""'], {}), "(template_layout, template_name, 'content.html.jinja')\n", (1759, 1813), False, 'import os\n'), ((831, 851), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (845, 851), False, 'import os\n'), ((871, 890), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (884, 890), False, 'import os\n'), ((1145, 1178), 'logging.getLogger', 'logging.getLogger', (['"""mailtemplate"""'], {}), "('mailtemplate')\n", (1162, 1178), False, 'import logging\n'), ((1013, 1052), 'jinja2.FileSystemLoader', 'jinja2.FileSystemLoader', (['_template_dirs'], {}), '(_template_dirs)\n', (1036, 1052), False, 'import jinja2\n')]
|
import os
import argparse
import yaml
from pathlib import Path
modules = ['wgsRecomb', 'coreGen', 'coreRecomb','panRecomb', 'geneRecomb','ALL']
CWD = os.getcwd()
# declare an argparse variable
general_parser = argparse.ArgumentParser(prog="start_analysis", usage='%(prog)s MODULE [options]',
description='Please always specify the program to use in the first argument, \
\nor the whole pipeline will attemp to run',allow_abbrev=False,
epilog='Enjoy the program! :)')
# postional arguments required to provide by user
general_parser.add_argument('MODULE', action='store', type=str,
help='Specify the module you would like to run',
choices=['ALL','wgsRecomb', 'coreGen', 'coreRecomb','panRecomb', 'geneRecomb'])
curren_wd = Path(os.getcwd())
# General Arguments
general_arguments = general_parser.add_argument_group("general arguments")
general_arguments.add_argument("-i","--input", type=str, help= "path to input dir with assemlies", metavar='', default=os.path.join(curren_wd, "../assemblies"))
general_arguments.add_argument("-p", "--name", type=str, help= "provide name prefix for the output files",metavar='', default= "bacterial_analysis" )
general_arguments.add_argument('-t','--thread', type=int, help = "num of threads", metavar='', default = 1)
general_arguments.add_argument('-o','--output', type=str, help = "path to the output directory", metavar='', default=os.path.join(CWD, 'results'))
# output annotation arguments
annotate_arguments = general_parser.add_argument_group("arguments for if you would like to add metadata to output")
annotate_arguments.add_argument("-M", "--addMetadata", default=False, action='store_true', help= "must have the flag specify if want to allow annotation")
annotate_arguments.add_argument("-a", "--annotate", type=str, help= "path to a csv file containing sample metadata", metavar='', default="")
annotate_arguments.add_argument("-s", "--sample", type=int, help= "integer indicates which column the sample name is in the metadata csv file", metavar='',default=1)
annotate_arguments.add_argument("-m", "--metadata", type=str,help= "metadata chosen to annotate ML tree/alignment after the sample name",metavar='',default="")
# wgsRecomb arguments
Recomb_arguments = general_parser.add_argument_group("arguments for wgsRecomb module")
Recomb_arguments.add_argument('-r','--ref', type=str, help = "reference (required for wgsRecomb module)", metavar='',default="")
Recomb_arguments.add_argument("-v", "--phage", type=str,help= "phage region identified for masking (bed file)",metavar='',default="")
Recomb_arguments.add_argument("-G", "--gubbins", type=str,help= "any additional Gubbins arguments (PLEASE start the string with space) ",metavar='',default="")
# coreGen arguments
roary_arguments = general_parser.add_argument_group("arguments for coreGen module")
roary_arguments.add_argument("-g", "--gff", type=str, help= "path to input dir with gff (this can replace input assemblies dir in coreGen module Must be gff3 files)", metavar='')
roary_arguments.add_argument("-c", "--core", type=int,help= "define core gene definition by percentage for coreGen module (default=99)",metavar='',default=99)
roary_arguments.add_argument("-k", "--kingdom", type=str,help= "specify the kingom of input assemlies for genome annotation (default=Bacteria)",metavar='',default="Bacteria")
roary_arguments.add_argument("-R", "--roary", type=str,help= "any additional roary arguments (PLEASE start the string with space)",metavar='',default="")
# fastGear modules alignments (for all three fastGear moudles)
fastgear_arguments = general_parser.add_argument_group("arguments for all three fastGear modules (coreRecomb, panRecomb, geneRecomb)")
fastgear_arguments.add_argument("--mcr_path", type=str, help="path to mcr runtime (need to install before use any of the fastGear module", metavar='', default=os.path.join(CWD, 'resources/mcr/'))
fastgear_arguments.add_argument("--fastgear_exe", type=str, help="path to the excutable of fastGear", metavar='', default=str(os.path.join(CWD,'resources/fastGEARpackageLinux64bit/')))
fastgear_arguments.add_argument("--fg","--fastgear_param", type=str, help="path to fastGear params", metavar='', default="")
# geneRecomb arguments
fastgear_gene_arguments = general_parser.add_argument_group("arguments for geneRecomb module")
fastgear_gene_arguments.add_argument("-n", "--alignment", type=str,help= "input alignment\n(either -n/-fl is required for geneRecomb module) ",metavar='',default="")
fastgear_gene_arguments.add_argument("-fl", "--alnlist", type=str,help= "input alignment list with path to gene alignments\n(either -n/-fl is required for geneRecomb module) ",metavar='',default="")
args = general_parser.parse_args()
if args.MODULE == "geneRecomb" and (args.alignment == "" and args.alnlist == ""):
general_parser.error("gene alignment/alignment list (-n/-fl) must provided for geneRecomb module")
if args.alignment != "" and args.alnlist != "":
general_parser.error("please do not specify gene file and list and same time")
MODULE = args.MODULE
NAME=args.name
INPUT=args.input
REF=args.ref
OUT=args.output
THREAD=args.thread
GFF=args.gff
ROARY=args.roary
ADDANOT=args.addMetadata
ANOT=args.annotate
SAMPLE=args.sample
META=args.metadata
PHAGE=args.phage
GUBBINS=args.gubbins
CORE=args.core
KINGDOM=args.kingdom
ALN=args.alignment
FL=args.alnlist
SINGLE= True if ALN != "" else False
FASTGEAR_EXE=args.fastgear_exe
FASTGEAR_PARAM=args.fg if args.fg != "" else str(os.path.join(FASTGEAR_EXE,'fG_input_specs.txt'))
MCR_PATH=str(os.path.join(args.mcr_path,'v901'))
LD_LIB_PATH=str(os.path.join(MCR_PATH,'runtime/glnxa64:')) + \
str(os.path.join(MCR_PATH , 'bin/glnxa64:')) +\
str(os.path.join(MCR_PATH , 'sys/os/glnxa64'))
# open
if FL != "":
with open(FL) as f:
FILELIST = f.readlines()
FILELIST = [x.strip() for x in FILELIST]
elif ALN !="":
FILELIST = [ALN]
else:
FILELIST = ""
def get_geneNames():
gene_name=""
if ALN != "":
gene_name=os.path.basename(ALN).split(".")[0]
elif FL != "": # list input
with open(FL) as f:
for line in f:
gene_name=gene_name + os.path.basename(line).split(".")[0]+ ","
gene_name = gene_name[:-1]
# you may also want to remove whitespace characters like `\n` at the end of each line
else:
gene_name=""
return gene_name.split(",")
GENE_NAME=get_geneNames()
def get_annotated(module):
anot_files=None
if module == "coreGen":
anot_files=str(os.path.join(OUT,"roary","roary_iqtree",str(NAME + "_meta.coreConcate.newick"))) + "," + str(os.path.join(OUT,"roary",str(NAME + "_coreConcate_meta.fasta")))
if module == "wgsRecomb":
anot_files=str(os.path.join(OUT, "gubbins",str(NAME + "_meta.recombFreeSnpsAtcg.fasta"))) +"," +str(os.path.join(OUT ,'gubbins','iqtree' ,str(NAME + "_meta.GubbinsSNPs.newick")))
if module == "coreRecomb":
anot_files=str(os.path.join(OUT,"fastgear_core" , "fastgear_iqtree" , str(NAME + "_meta.coreSNPs.newick"))) + "," + str(os.path.join(OUT , "fastgear_core",str(NAME + "_core_mask_snp_meta.fasta")))
if module == "ALL":
anot_files=str(os.path.join(OUT,"roary","roary_iqtree",str(NAME + "_meta.coreConcate.newick"))) +","\
+ str(os.path.join(OUT,"roary",str(NAME + "_coreConcate_meta.fasta"))) + ","\
+ str(os.path.join(OUT, "gubbins",str(NAME + "_meta.recombFreeSnpsAtcg.fasta"))) +","\
+str(os.path.join(OUT ,'gubbins','iqtree' ,str(NAME + "_meta.GubbinsSNPs.newick"))) +","\
+ str(os.path.join(OUT,"fastgear_core" , "fastgear_iqtree" , str(NAME + "_meta.coreSNPs.newick"))) + ","\
+ str(os.path.join(OUT , "fastgear_core",str(NAME + "_core_mask_snp_meta.fasta")))
if module == "geneRecomb":
ADDANOT=False
anot_files=""
if module == "panRecomb":
ADDANOT=False
anot_files=""
return anot_files
def get_output(module):
if module == "coreGen":
output_files=str(os.path.join(OUT,'roary',"roary_iqtree" , str(NAME +".treefile")))
if ADDANOT:
output_files=str(output_files) + ","+get_annotated(module)
elif module == "wgsRecomb":
output_files=str(os.path.join(OUT , "gubbins", "iqtree" , str(NAME + ".recombFreeSnpsAtcg.treefile")))
if ADDANOT:
output_files=str(output_files) + ","+get_annotated(module)
elif module == "panRecomb":
output_files=str(os.path.join(OUT , "fastgear" , "plot_pangenome/pan_fastgear_plot_recombination_count.pdf"))
if ADDANOT:
output_files=str(output_files) + ","+get_annotated(module)
elif module == "coreRecomb":
output_files=str(os.path.join(OUT , "fastgear_core" , "plot_coregenome/core_fastgear_plot_recombination_count.pdf")) + ","\
+ str(os.path.join(OUT, "fastgear_core" , "fastgear_iqtree" , str(NAME + "_core_mask_snp.treefile")))
elif module == "geneRecomb":
if SINGLE:
output_files=",".join([str(os.path.join(OUT,"fastgear_gene" ,GENE_NAME[0],str(GENE_NAME[0] + ".mat")))])
else:
output_files=",".join([str(os.path.join(OUT,"fastgear_gene" ,file,str(file + ".mat"))) for file in GENE_NAME])
elif module == "ALL":
output_files=str(os.path.join(OUT,"roary","roary_iqtree" , str(NAME +".treefile")))+ ","\
+ str(os.path.join(OUT , "gubbins", "iqtree" , str(NAME + ".recombFreeSnpsAtcg.treefile")))+ ","\
+ str(os.path.join(OUT , "fastgear_core" , "plot_coregenome/core_fastgear_plot_recombination_count.pdf")) + ","\
+ str(os.path.join(OUT, "fastgear_core" , "fastgear_iqtree" , str(NAME + "_core_mask_snp.treefile"))) + ","\
+ str(os.path.join(OUT , "fastgear_core" , "plot_coregenome/core_fastgear_plot_recombination_count.pdf"))
if ADDANOT:
output_files=str(output_files) + "," + get_annotated(module)
output_files = output_files[:-1] if output_files[-1] is "," else output_files
output={'output': output_files}
return output
# construct the config file
config = {'project_name': NAME,
'asm_dir': INPUT,
'output_dir': OUT,
'reference': REF,
'threads_num': THREAD,
'sample_metadata': ANOT,
'metadata_include': META,
'biosample_column': SAMPLE,
'gff_dir': GFF,
'kingdom': KINGDOM,
'define_core': CORE,
'phage_region':PHAGE,
'LD_LIBRARY_PATH': LD_LIB_PATH,
'fastGear_exe_path': FASTGEAR_EXE,
'mcr_path': MCR_PATH,
'fastGear_params': FASTGEAR_PARAM,
'fastgear_gene_file_list': FILELIST,
'roary':ROARY,
'gubbins':GUBBINS}
BactPrep_path = os.path.abspath(os.path.dirname(__file__))
config.update(get_output(MODULE))
with open(os.path.join(BactPrep_path,"config/config.yaml"), "w") as configfile:
yaml.dump(config,configfile)
if __name__ == "__main__":
os.chdir(BactPrep_path)
os.system ("snakemake --cores %d --use-conda"%THREAD)
|
[
"argparse.ArgumentParser",
"os.path.basename",
"os.getcwd",
"os.path.dirname",
"yaml.dump",
"os.system",
"os.path.join",
"os.chdir"
] |
[((152, 163), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (161, 163), False, 'import os\n'), ((214, 487), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""start_analysis"""', 'usage': '"""%(prog)s MODULE [options]"""', 'description': '"""Please always specify the program to use in the first argument, \nor the whole pipeline will attemp to run"""', 'allow_abbrev': '(False)', 'epilog': '"""Enjoy the program! :)"""'}), '(prog=\'start_analysis\', usage=\n \'%(prog)s MODULE [options]\', description=\n """Please always specify the program to use in the first argument, \nor the whole pipeline will attemp to run"""\n , allow_abbrev=False, epilog=\'Enjoy the program! :)\')\n', (237, 487), False, 'import argparse\n'), ((736, 747), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (745, 747), False, 'import os\n'), ((5536, 5571), 'os.path.join', 'os.path.join', (['args.mcr_path', '"""v901"""'], {}), "(args.mcr_path, 'v901')\n", (5548, 5571), False, 'import os\n'), ((10706, 10731), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (10721, 10731), False, 'import os\n'), ((10856, 10885), 'yaml.dump', 'yaml.dump', (['config', 'configfile'], {}), '(config, configfile)\n', (10865, 10885), False, 'import yaml\n'), ((10919, 10942), 'os.chdir', 'os.chdir', (['BactPrep_path'], {}), '(BactPrep_path)\n', (10927, 10942), False, 'import os\n'), ((10947, 11001), 'os.system', 'os.system', (["('snakemake --cores %d --use-conda' % THREAD)"], {}), "('snakemake --cores %d --use-conda' % THREAD)\n", (10956, 11001), False, 'import os\n'), ((966, 1006), 'os.path.join', 'os.path.join', (['curren_wd', '"""../assemblies"""'], {}), "(curren_wd, '../assemblies')\n", (978, 1006), False, 'import os\n'), ((1385, 1413), 'os.path.join', 'os.path.join', (['CWD', '"""results"""'], {}), "(CWD, 'results')\n", (1397, 1413), False, 'import os\n'), ((3848, 3883), 'os.path.join', 'os.path.join', (['CWD', '"""resources/mcr/"""'], {}), "(CWD, 'resources/mcr/')\n", (3860, 3883), False, 'import os\n'), ((5474, 5522), 'os.path.join', 'os.path.join', (['FASTGEAR_EXE', '"""fG_input_specs.txt"""'], {}), "(FASTGEAR_EXE, 'fG_input_specs.txt')\n", (5486, 5522), False, 'import os\n'), ((5699, 5739), 'os.path.join', 'os.path.join', (['MCR_PATH', '"""sys/os/glnxa64"""'], {}), "(MCR_PATH, 'sys/os/glnxa64')\n", (5711, 5739), False, 'import os\n'), ((10782, 10831), 'os.path.join', 'os.path.join', (['BactPrep_path', '"""config/config.yaml"""'], {}), "(BactPrep_path, 'config/config.yaml')\n", (10794, 10831), False, 'import os\n'), ((4011, 4068), 'os.path.join', 'os.path.join', (['CWD', '"""resources/fastGEARpackageLinux64bit/"""'], {}), "(CWD, 'resources/fastGEARpackageLinux64bit/')\n", (4023, 4068), False, 'import os\n'), ((5588, 5630), 'os.path.join', 'os.path.join', (['MCR_PATH', '"""runtime/glnxa64:"""'], {}), "(MCR_PATH, 'runtime/glnxa64:')\n", (5600, 5630), False, 'import os\n'), ((5643, 5681), 'os.path.join', 'os.path.join', (['MCR_PATH', '"""bin/glnxa64:"""'], {}), "(MCR_PATH, 'bin/glnxa64:')\n", (5655, 5681), False, 'import os\n'), ((6012, 6033), 'os.path.basename', 'os.path.basename', (['ALN'], {}), '(ALN)\n', (6028, 6033), False, 'import os\n'), ((8547, 8640), 'os.path.join', 'os.path.join', (['OUT', '"""fastgear"""', '"""plot_pangenome/pan_fastgear_plot_recombination_count.pdf"""'], {}), "(OUT, 'fastgear',\n 'plot_pangenome/pan_fastgear_plot_recombination_count.pdf')\n", (8559, 8640), False, 'import os\n'), ((8791, 8891), 'os.path.join', 'os.path.join', (['OUT', '"""fastgear_core"""', '"""plot_coregenome/core_fastgear_plot_recombination_count.pdf"""'], {}), "(OUT, 'fastgear_core',\n 'plot_coregenome/core_fastgear_plot_recombination_count.pdf')\n", (8803, 8891), False, 'import os\n'), ((9842, 9942), 'os.path.join', 'os.path.join', (['OUT', '"""fastgear_core"""', '"""plot_coregenome/core_fastgear_plot_recombination_count.pdf"""'], {}), "(OUT, 'fastgear_core',\n 'plot_coregenome/core_fastgear_plot_recombination_count.pdf')\n", (9854, 9942), False, 'import os\n'), ((6174, 6196), 'os.path.basename', 'os.path.basename', (['line'], {}), '(line)\n', (6190, 6196), False, 'import os\n'), ((9576, 9676), 'os.path.join', 'os.path.join', (['OUT', '"""fastgear_core"""', '"""plot_coregenome/core_fastgear_plot_recombination_count.pdf"""'], {}), "(OUT, 'fastgear_core',\n 'plot_coregenome/core_fastgear_plot_recombination_count.pdf')\n", (9588, 9676), False, 'import os\n')]
|
from telegram import InlineKeyboardButton, InlineKeyboardMarkup
from django.contrib.auth import get_user_model
from django.test import TestCase
from .utils import create_reply_markup, create_users_list
User = get_user_model()
class StubUser():
def __init__(self, name, t_user, t_id):
self.name = name
self.telegram_user = t_user
self.telegram_id = t_id
def get_full_name(self):
return self.name
class BotUtilitiesTests(TestCase):
def setUp(self):
self.u1 = StubUser('u1', 'u1', '1111')
self.u2 = StubUser('u2', 'u2', '2222')
self.u3 = StubUser('u3', 'u3', '3333')
def test_create_reply_markup(self):
'''create_reply_markup returns same structures as if made manually'''
# Single type: callback_data
m1 = InlineKeyboardMarkup([[
InlineKeyboardButton('single_data_t1', callback_data='single_data_d1'),
InlineKeyboardButton('single_data_t2', callback_data='single_data_d2'),
]])
m2 = create_reply_markup([
('single_data_t1', 'single_data_d1'),
('single_data_t2', 'single_data_d2'),
])
self.assertDictEqual(
m1.to_dict(), m2.to_dict(),
'single type (callback_data) reply_markup not working'
)
# Single type: url
m1 = InlineKeyboardMarkup([[
InlineKeyboardButton('single_data_t1', url='single_data_u1'),
InlineKeyboardButton('single_data_t2', url='single_data_u2'),
]])
m2 = create_reply_markup([
('single_data_t1', None, 'single_data_u1'),
('single_data_t2', None, 'single_data_u2'),
])
self.assertDictEqual(
m1.to_dict(), m2.to_dict(),
'single type (url) reply_markup not working'
)
# Mixed type: callback_data and url
m1 = InlineKeyboardMarkup([[
InlineKeyboardButton('mixed_t1', callback_data='mixed_d1'),
InlineKeyboardButton('mixed_t2', url='mixed_u1'),
InlineKeyboardButton('mixed_t3', callback_data='mixed_d2'),
InlineKeyboardButton('mixed_t4', url='mixed_u2'),
]])
m2 = create_reply_markup([
('mixed_t1', 'mixed_d1'),
('mixed_t2', None, 'mixed_u1'),
('mixed_t3', 'mixed_d2'),
('mixed_t4', None, 'mixed_u2'),
])
self.assertDictEqual(
m1.to_dict(), m2.to_dict(),
'mixed types reply_markup not working'
)
# Multiple rows single type
m1 = InlineKeyboardMarkup([
[InlineKeyboardButton('multirow_single_t1', callback_data='multirow_single_d1')],
[InlineKeyboardButton('multirow_single_t2', callback_data='multirow_single_d2')],
])
m2 = create_reply_markup(
[('multirow_single_t1', 'multirow_single_d1')],
[('multirow_single_t2', 'multirow_single_d2')],
)
self.assertDictEqual(
m1.to_dict(), m2.to_dict(),
'multiple rows single type reply_markup not working'
)
# Multiple rows multiple type
m1 = InlineKeyboardMarkup([
[InlineKeyboardButton('multirow_mult_t1', callback_data='multirow_mult_d1')],
[InlineKeyboardButton('multirow_mult_t2', url='multirow_mult_u1')],
[InlineKeyboardButton('multirow_mult_t3', callback_data='multirow_mult_d2')],
[InlineKeyboardButton('multirow_mult_t4', url='multirow_mult_u2')],
])
m2 = create_reply_markup(
[('multirow_mult_t1', 'multirow_mult_d1')],
[('multirow_mult_t2', None, 'multirow_mult_u1')],
[('multirow_mult_t3', 'multirow_mult_d2')],
[('multirow_mult_t4', None, 'multirow_mult_u2')],
)
self.assertDictEqual(
m1.to_dict(), m2.to_dict(),
'multiple rows multiple type reply_markup not working'
)
def test_create_users_list(self):
'''Checks the returned values in create_users_list for valid inputs'''
# No user
users = []
l1 = '\nNo hay usuarios para mostrar...'
l2 = create_users_list(users)
self.assertEqual(l1, l2, 'empty list generated string not working')
# Single user
users = [self.u1]
l1 = '\n[u1](tg://user?id=1111)'
l2 = create_users_list(users)
self.assertEqual(l1, l2, 'single users generated string not working')
# Multiple users
users = [self.u1, self.u2, self.u3]
l1 = '\n[u1](tg://user?id=1111)\n[u2](tg://user?id=2222)\n[u3](tg://user?id=3333)'
l2 = create_users_list(users)
self.assertEqual(l1, l2, 'multiple users generated string not working')
|
[
"telegram.InlineKeyboardButton",
"django.contrib.auth.get_user_model"
] |
[((212, 228), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (226, 228), False, 'from django.contrib.auth import get_user_model\n'), ((846, 916), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""single_data_t1"""'], {'callback_data': '"""single_data_d1"""'}), "('single_data_t1', callback_data='single_data_d1')\n", (866, 916), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((930, 1000), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""single_data_t2"""'], {'callback_data': '"""single_data_d2"""'}), "('single_data_t2', callback_data='single_data_d2')\n", (950, 1000), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((1386, 1446), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""single_data_t1"""'], {'url': '"""single_data_u1"""'}), "('single_data_t1', url='single_data_u1')\n", (1406, 1446), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((1460, 1520), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""single_data_t2"""'], {'url': '"""single_data_u2"""'}), "('single_data_t2', url='single_data_u2')\n", (1480, 1520), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((1925, 1983), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""mixed_t1"""'], {'callback_data': '"""mixed_d1"""'}), "('mixed_t1', callback_data='mixed_d1')\n", (1945, 1983), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((1997, 2045), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""mixed_t2"""'], {'url': '"""mixed_u1"""'}), "('mixed_t2', url='mixed_u1')\n", (2017, 2045), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((2059, 2117), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""mixed_t3"""'], {'callback_data': '"""mixed_d2"""'}), "('mixed_t3', callback_data='mixed_d2')\n", (2079, 2117), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((2131, 2179), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""mixed_t4"""'], {'url': '"""mixed_u2"""'}), "('mixed_t4', url='mixed_u2')\n", (2151, 2179), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((2622, 2700), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""multirow_single_t1"""'], {'callback_data': '"""multirow_single_d1"""'}), "('multirow_single_t1', callback_data='multirow_single_d1')\n", (2642, 2700), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((2716, 2794), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""multirow_single_t2"""'], {'callback_data': '"""multirow_single_d2"""'}), "('multirow_single_t2', callback_data='multirow_single_d2')\n", (2736, 2794), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((3207, 3281), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""multirow_mult_t1"""'], {'callback_data': '"""multirow_mult_d1"""'}), "('multirow_mult_t1', callback_data='multirow_mult_d1')\n", (3227, 3281), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((3297, 3361), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""multirow_mult_t2"""'], {'url': '"""multirow_mult_u1"""'}), "('multirow_mult_t2', url='multirow_mult_u1')\n", (3317, 3361), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((3377, 3451), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""multirow_mult_t3"""'], {'callback_data': '"""multirow_mult_d2"""'}), "('multirow_mult_t3', callback_data='multirow_mult_d2')\n", (3397, 3451), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n'), ((3467, 3531), 'telegram.InlineKeyboardButton', 'InlineKeyboardButton', (['"""multirow_mult_t4"""'], {'url': '"""multirow_mult_u2"""'}), "('multirow_mult_t4', url='multirow_mult_u2')\n", (3487, 3531), False, 'from telegram import InlineKeyboardButton, InlineKeyboardMarkup\n')]
|
import argparse
import numpy as np
import pandas as pd
import os
from tqdm import tqdm
import torch.nn as nn
from torch import optim
import torch.nn.functional as F
from torch.utils.data import Dataset, DataLoader
import torch
#from apex import amp
from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
from torch.utils.data.distributed import DistributedSampler
import random
import re
import json
from transformers import BertTokenizer, AdamW, BertModel, BertPreTrainedModel, BertConfig, get_linear_schedule_with_warmup
def get_class_accuracy(logits, labels):
predictions = np.argmax(F.softmax(logits,dim=1).cpu().data.numpy(), axis=1)
return np.float32(np.sum(predictions=labels)) / len(labels), len(labels)
def get_position_accuracy(logits, labels):
predictions = np.argmax(F.softmax(logits,dim=1).cpu().data.numpy(), axis=1)
total_num = 0
sum_correct = 0
for i in range(len(labels)):
if labels[i] >= 0:
total_num += 1
if predictions[i] == labels[i]:
sum_correct += 1
if total_num == 0:
total_num = 1e-7
return np.float32(sum_correct) / total_num, total_num
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
class TFQADataset(Dataset):
def __init__(self, id_list):
self.id_list=id_list
def __len__(self):
return len(self.id_list)
def __getitem__(self, index):
return self.id_list[index]
class Collator(object):
def __init__(self, data_dict, tokenizer, max_seq_len=384, max_question_len=64):
self.data_dict = data_dict
self.tokenizer = tokenizer
self.max_seq_len = max_seq_len
self.max_question_len = max_question_len
def _get_positive_input_ids(self, data, question_tokens):
max_answer_tokens = self.max_seq_len-len(question_tokens)-3 # [CLS],[SEP],[SEP]
candidate_start = data['positive_start']
candidate_end = data['positive_end']
candidate_words = data['positive_text']
words_to_tokens_index = []
candidate_tokens = []
for i, word in enumerate(candidate_words):
words_to_tokens_index.append(len(candidate_tokens))
if re.match(r'<.+>', word): # remove paragraph tag
continue
tokens = self.tokenizer.tokenize(word) # Alfred creating tokens
if len(candidate_tokens)+len(tokens) > max_answer_tokens:
break
candidate_tokens += tokens
start_position = -1
end_position = -1
if data['annotations'][0]['short_answers']:
start_position1 = data['annotations'][0]['short_answers'][0]['start_token']
end_position1 = data['annotations'][0]['short_answers'][0]['end_token']
if (start_position1 >= candidate_start and end_position1 <= candidate_end) and ((end_position1-candidate_start) < len(words_to_tokens_index)):
start_position = words_to_tokens_index[start_position1-candidate_start]+len(question_tokens)+2
end_position = words_to_tokens_index[end_position1-candidate_start]+len(question_tokens)+2
return candidate_tokens, start_position, end_position
def _get_negative_input_ids(self, data, question_tokens):
max_answer_tokens = self.max_seq_len-len(question_tokens)-3 # [CLS],[SEP],[SEP]
candidate_start = data['negative_start']
candidate_end = data['negative_end']
candidate_words = data['negative_text']
words_to_tokens_index = []
candidate_tokens = []
for i, word in enumerate(candidate_words):
words_to_tokens_index.append(len(candidate_tokens))
if re.match(r'<.+>', word): # remove paragraph tag
continue
tokens = self.tokenizer.tokenize(word)
if len(candidate_tokens)+len(tokens) > max_answer_tokens:
break
candidate_tokens += tokens
start_position = -1
end_position = -1
return candidate_tokens, start_position, end_position
def __call__(self, batch_ids):
batch_size = 2*len(batch_ids)
batch_input_ids = np.zeros((batch_size, self.max_seq_len), dtype=np.int64)
batch_token_type_ids = np.ones((batch_size, self.max_seq_len), dtype=np.int64)
batch_y_start = np.zeros((batch_size,), dtype=np.int64)
batch_y_end = np.zeros((batch_size,), dtype=np.int64)
batch_y = np.zeros((batch_size,), dtype=np.int64)
for i, doc_id in enumerate(batch_ids):
data = self.data_dict[doc_id]
# get label
annotations = data['annotations'][0]
if annotations['yes_no_answer'] == 'YES':
batch_y[i*2] = 4
elif annotations['yes_no_answer'] == 'NO':
batch_y[i*2] = 3
elif annotations['short_answers']:
batch_y[i*2] = 2
elif annotations['long_answer']['candidate_index'] != -1:
batch_y[i*2] = 1
batch_y[i*2+1] = 0
# get positive and negative samples
question_tokens = self.tokenizer.tokenize(data['question_text'])[:self.max_question_len]
# positive
answer_tokens, start_position, end_position = self._get_positive_input_ids(data, question_tokens)
input_tokens = ['[CLS]'] + question_tokens + ['[SEP]'] + answer_tokens + ['[SEP]']
#if annotations['short_answers']:
# print(data['question_text'],"[AAA]",input_tokens[start_position:end_position])
input_ids = self.tokenizer.convert_tokens_to_ids(input_tokens)
batch_input_ids[i*2, :len(input_ids)] = input_ids
batch_token_type_ids[i*2, :len(input_ids)] = [0 if k<=input_ids.index(102) else 1 for k in range(len(input_ids))]
batch_y_start[i*2] = start_position
batch_y_end[i*2] = end_position
# negative
answer_tokens, start_position, end_position = self._get_negative_input_ids(data, question_tokens)
input_tokens = ['[CLS]'] + question_tokens + ['[SEP]'] + answer_tokens + ['[SEP]']
input_ids = self.tokenizer.convert_tokens_to_ids(input_tokens)
batch_token_type_ids[i*2+1, :len(input_ids)] = [0 if k<=input_ids.index(102) else 1 for k in range(len(input_ids))]
batch_input_ids[i*2+1, :len(input_ids)] = input_ids
batch_y_start[i*2+1] = start_position
batch_y_end[i*2+1] = end_position
batch_attention_mask = batch_input_ids > 0
return torch.from_numpy(batch_input_ids), torch.from_numpy(batch_attention_mask), torch.from_numpy(batch_token_type_ids), torch.LongTensor(batch_y_start), torch.LongTensor(batch_y_end), torch.LongTensor(batch_y)
class BertForQuestionAnswering(BertPreTrainedModel):
"""BERT model for QA and classification tasks.
Parameters
----------
config : transformers.BertConfig. Configuration class for BERT.
Returns
-------
start_logits : torch.Tensor with shape (batch_size, sequence_size).
Starting scores of each tokens.
end_logits : torch.Tensor with shape (batch_size, sequence_size).
Ending scores of each tokens.
classifier_logits : torch.Tensor with shape (batch_size, num_classes).
Classification scores of each labels.
"""
def __init__(self, config):
super(BertForQuestionAnswering, self).__init__(config)
self.bert = BertModel(config)
self.qa_outputs = nn.Linear(config.hidden_size, 2) # start/end
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
self.init_weights()
def forward(self, input_ids, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None):
outputs = self.bert(input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask)
sequence_output = outputs[0]
pooled_output = outputs[1]
# predict start & end position
qa_logits = self.qa_outputs(sequence_output)
start_logits, end_logits = qa_logits.split(1, dim=-1)
start_logits = start_logits.squeeze(-1)
end_logits = end_logits.squeeze(-1)
# classification
pooled_output = self.dropout(pooled_output)
classifier_logits = self.classifier(pooled_output)
return start_logits, end_logits, classifier_logits
def loss_fn(preds, labels):
start_preds, end_preds, class_preds = preds
start_labels, end_labels, class_labels = labels
start_loss = nn.CrossEntropyLoss(ignore_index=-1)(start_preds, start_labels)
end_loss = nn.CrossEntropyLoss(ignore_index=-1)(end_preds, end_labels)
class_loss = nn.CrossEntropyLoss()(class_preds, class_labels)
return start_loss, end_loss, class_loss
def random_sample_negative_candidates(distribution):
temp = np.random.random()
value = 0.
for index in range(len(distribution)):
value += distribution[index]
if value > temp:
break
return index
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--local_rank", type=int, default=-1, help="local_rank for distributed training on gpus")
args = parser.parse_args()
torch.cuda.set_device(args.local_rank)
device = torch.device("cuda", args.local_rank)
torch.distributed.init_process_group(backend="nccl")
args.device = device
seed = 1001
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.backends.cudnn.deterministic = True
# prepare input
json_dir = '../../input/simplified-nq-train.jsonl'
max_data = 9999999999
id_list = []
data_dict = {}
with open(json_dir) as f:
for n, line in tqdm(enumerate(f)):
if n > max_data:
break
data = json.loads(line)
is_pos = False
annotations = data['annotations'][0]
if annotations['yes_no_answer'] == 'YES':
is_pos = True
elif annotations['yes_no_answer'] == 'NO':
is_pos = True
elif annotations['short_answers']:
is_pos = True
elif annotations['long_answer']['candidate_index'] != -1:
is_pos = True
if is_pos and len(data['long_answer_candidates'])>1:
data_id = data['example_id']
id_list.append(data_id)
# uniform sampling
distribution = np.ones((len(data['long_answer_candidates']),),dtype=np.float32)
if is_pos:
distribution[data['annotations'][0]['long_answer']['candidate_index']] = 0.
distribution /= len(distribution)
negative_candidate_index = random_sample_negative_candidates(distribution)
#
doc_words = data['document_text'].split()
# negative
candidate = data['long_answer_candidates'][negative_candidate_index]
negative_candidate_words = doc_words[candidate['start_token']:candidate['end_token']]
negative_candidate_start = candidate['start_token']
negative_candidate_end = candidate['end_token']
# positive
candidate = data['long_answer_candidates'][annotations['long_answer']['candidate_index']]
positive_candidate_words = doc_words[candidate['start_token']:candidate['end_token']]
positive_candidate_start = candidate['start_token']
positive_candidate_end = candidate['end_token']
# initialize data_dict
data_dict[data_id] = {
'question_text': data['question_text'],
'annotations': data['annotations'],
'positive_text': positive_candidate_words,
'positive_start': positive_candidate_start,
'positive_end': positive_candidate_end,
'negative_text': negative_candidate_words,
'negative_start': negative_candidate_start,
'negative_end': negative_candidate_end,
}
print(len(id_list))
random.shuffle(id_list)
# hyperparameters
max_seq_len = 384
max_question_len = 64
learning_rate = 0.00002
batch_size = 4
ep = 0
# build model
if args.local_rank not in [-1, 0]:
# Make sure only the first process in distributed training will download model & vocab
torch.distributed.barrier()
model_path = '../../huggingface_pretrained/bert-base-uncased/'
config = BertConfig.from_pretrained(model_path)
config.num_labels = 5
tokenizer = BertTokenizer.from_pretrained(model_path, do_lower_case=True) # Alfred instantiation of BerkTokenizer
model = BertForQuestionAnswering.from_pretrained(model_path, config=config)
if args.local_rank == 0:
# Make sure only the first process in distributed training will download model & vocab
torch.distributed.barrier()
model.to(args.device)
optimizer = optim.Adam(model.parameters(), lr=learning_rate)
model, optimizer = amp.initialize(model, optimizer, opt_level="O1",verbosity=0)
model = torch.nn.parallel.DistributedDataParallel(
model, device_ids=[args.local_rank], output_device=args.local_rank, find_unused_parameters=True
)
# training
# iterator for training
train_datagen = TFQADataset(id_list=id_list)
train_sampler = DistributedSampler(train_datagen)
train_collate = Collator(data_dict=data_dict,
tokenizer=tokenizer, # Alfred adding tokenizer
max_seq_len=max_seq_len,
max_question_len=max_question_len)
train_generator = DataLoader(dataset=train_datagen,
sampler=train_sampler,
collate_fn=train_collate,
batch_size=batch_size,
num_workers=3,
pin_memory=True)
# train
losses1 = AverageMeter() # start
losses2 = AverageMeter() # end
losses3 = AverageMeter() # class
accuracies1 = AverageMeter() # start
accuracies2 = AverageMeter() # end
accuracies3 = AverageMeter() # class
model.train()
for j,(batch_input_ids, batch_attention_mask, batch_token_type_ids, batch_y_start, batch_y_end, batch_y) in enumerate(train_generator):
batch_input_ids = batch_input_ids.cuda()
batch_attention_mask = batch_attention_mask.cuda()
batch_token_type_ids = batch_token_type_ids.cuda()
labels1 = batch_y_start.cuda()
labels2 = batch_y_end.cuda()
labels3 = batch_y.cuda()
logits1, logits2, logits3 = model(batch_input_ids, batch_attention_mask, batch_token_type_ids)
y_true = (batch_y_start, batch_y_end, batch_y)
loss1, loss2, loss3 = loss_fn((logits1, logits2, logits3), (labels1, labels2, labels3))
loss = loss1+loss2+loss3
acc1, n_position1 = get_position_accuracy(logits1, labels1)
acc2, n_position2 = get_position_accuracy(logits2, labels2)
acc3, n_position3 = get_position_accuracy(logits3, labels3)
losses1.update(loss1.item(), n_position1)
losses2.update(loss2.item(), n_position2)
losses3.update(loss3.item(), n_position3)
accuracies1.update(acc1, n_position1)
accuracies2.update(acc2, n_position2)
accuracies3.update(acc3, n_position2)
optimizer.zero_grad()
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
optimizer.step()
if args.local_rank == 0:
print('epoch: {}, train_loss1: {}, train_loss2: {}, train_loss3: {}, train_acc1: {}, train_acc2: {}, train_acc3: {}'.format(ep,losses1.avg,losses2.avg,losses3.avg,accuracies1.avg,accuracies2.avg,accuracies3.avg), flush=True)
out_dir = 'weights/epoch0/'
if not os.path.exists(out_dir):
os.makedirs(out_dir)
torch.save(model.module.state_dict(), out_dir+'pytorch_model.bin')
if __name__ == "__main__":
main()
|
[
"torch.nn.Dropout",
"numpy.random.seed",
"argparse.ArgumentParser",
"numpy.sum",
"random.shuffle",
"transformers.BertModel",
"numpy.ones",
"torch.device",
"json.loads",
"torch.utils.data.DataLoader",
"torch.nn.parallel.DistributedDataParallel",
"os.path.exists",
"torch.utils.data.distributed.DistributedSampler",
"random.seed",
"torch.nn.Linear",
"torch.cuda.set_device",
"torch.manual_seed",
"torch.cuda.manual_seed",
"re.match",
"transformers.BertTokenizer.from_pretrained",
"torch.from_numpy",
"torch.distributed.init_process_group",
"os.makedirs",
"torch.LongTensor",
"numpy.float32",
"numpy.zeros",
"torch.nn.CrossEntropyLoss",
"torch.distributed.barrier",
"torch.nn.functional.softmax",
"numpy.random.random",
"transformers.BertConfig.from_pretrained"
] |
[((9488, 9506), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (9504, 9506), True, 'import numpy as np\n'), ((9689, 9714), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (9712, 9714), False, 'import argparse\n'), ((9867, 9905), 'torch.cuda.set_device', 'torch.cuda.set_device', (['args.local_rank'], {}), '(args.local_rank)\n', (9888, 9905), False, 'import torch\n'), ((9919, 9956), 'torch.device', 'torch.device', (['"""cuda"""', 'args.local_rank'], {}), "('cuda', args.local_rank)\n", (9931, 9956), False, 'import torch\n'), ((9961, 10013), 'torch.distributed.init_process_group', 'torch.distributed.init_process_group', ([], {'backend': '"""nccl"""'}), "(backend='nccl')\n", (9997, 10013), False, 'import torch\n'), ((10060, 10077), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (10071, 10077), False, 'import random\n'), ((10082, 10102), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (10096, 10102), True, 'import numpy as np\n'), ((10107, 10130), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (10124, 10130), False, 'import torch\n'), ((10135, 10163), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['seed'], {}), '(seed)\n', (10157, 10163), False, 'import torch\n'), ((13085, 13108), 'random.shuffle', 'random.shuffle', (['id_list'], {}), '(id_list)\n', (13099, 13108), False, 'import random\n'), ((13510, 13548), 'transformers.BertConfig.from_pretrained', 'BertConfig.from_pretrained', (['model_path'], {}), '(model_path)\n', (13536, 13548), False, 'from transformers import BertTokenizer, AdamW, BertModel, BertPreTrainedModel, BertConfig, get_linear_schedule_with_warmup\n'), ((13591, 13652), 'transformers.BertTokenizer.from_pretrained', 'BertTokenizer.from_pretrained', (['model_path'], {'do_lower_case': '(True)'}), '(model_path, do_lower_case=True)\n', (13620, 13652), False, 'from transformers import BertTokenizer, AdamW, BertModel, BertPreTrainedModel, BertConfig, get_linear_schedule_with_warmup\n'), ((14124, 14267), 'torch.nn.parallel.DistributedDataParallel', 'torch.nn.parallel.DistributedDataParallel', (['model'], {'device_ids': '[args.local_rank]', 'output_device': 'args.local_rank', 'find_unused_parameters': '(True)'}), '(model, device_ids=[args.\n local_rank], output_device=args.local_rank, find_unused_parameters=True)\n', (14165, 14267), False, 'import torch\n'), ((14400, 14433), 'torch.utils.data.distributed.DistributedSampler', 'DistributedSampler', (['train_datagen'], {}), '(train_datagen)\n', (14418, 14433), False, 'from torch.utils.data.distributed import DistributedSampler\n'), ((14704, 14846), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'train_datagen', 'sampler': 'train_sampler', 'collate_fn': 'train_collate', 'batch_size': 'batch_size', 'num_workers': '(3)', 'pin_memory': '(True)'}), '(dataset=train_datagen, sampler=train_sampler, collate_fn=\n train_collate, batch_size=batch_size, num_workers=3, pin_memory=True)\n', (14714, 14846), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler\n'), ((4543, 4599), 'numpy.zeros', 'np.zeros', (['(batch_size, self.max_seq_len)'], {'dtype': 'np.int64'}), '((batch_size, self.max_seq_len), dtype=np.int64)\n', (4551, 4599), True, 'import numpy as np\n'), ((4631, 4686), 'numpy.ones', 'np.ones', (['(batch_size, self.max_seq_len)'], {'dtype': 'np.int64'}), '((batch_size, self.max_seq_len), dtype=np.int64)\n', (4638, 4686), True, 'import numpy as np\n'), ((4712, 4751), 'numpy.zeros', 'np.zeros', (['(batch_size,)'], {'dtype': 'np.int64'}), '((batch_size,), dtype=np.int64)\n', (4720, 4751), True, 'import numpy as np\n'), ((4774, 4813), 'numpy.zeros', 'np.zeros', (['(batch_size,)'], {'dtype': 'np.int64'}), '((batch_size,), dtype=np.int64)\n', (4782, 4813), True, 'import numpy as np\n'), ((4832, 4871), 'numpy.zeros', 'np.zeros', (['(batch_size,)'], {'dtype': 'np.int64'}), '((batch_size,), dtype=np.int64)\n', (4840, 4871), True, 'import numpy as np\n'), ((7871, 7888), 'transformers.BertModel', 'BertModel', (['config'], {}), '(config)\n', (7880, 7888), False, 'from transformers import BertTokenizer, AdamW, BertModel, BertPreTrainedModel, BertConfig, get_linear_schedule_with_warmup\n'), ((7915, 7947), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', '(2)'], {}), '(config.hidden_size, 2)\n', (7924, 7947), True, 'import torch.nn as nn\n'), ((7984, 8022), 'torch.nn.Dropout', 'nn.Dropout', (['config.hidden_dropout_prob'], {}), '(config.hidden_dropout_prob)\n', (7994, 8022), True, 'import torch.nn as nn\n'), ((8049, 8097), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.num_labels'], {}), '(config.hidden_size, config.num_labels)\n', (8058, 8097), True, 'import torch.nn as nn\n'), ((9173, 9209), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {'ignore_index': '(-1)'}), '(ignore_index=-1)\n', (9192, 9209), True, 'import torch.nn as nn\n'), ((9252, 9288), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {'ignore_index': '(-1)'}), '(ignore_index=-1)\n', (9271, 9288), True, 'import torch.nn as nn\n'), ((9329, 9350), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (9348, 9350), True, 'import torch.nn as nn\n'), ((13401, 13428), 'torch.distributed.barrier', 'torch.distributed.barrier', ([], {}), '()\n', (13426, 13428), False, 'import torch\n'), ((13907, 13934), 'torch.distributed.barrier', 'torch.distributed.barrier', ([], {}), '()\n', (13932, 13934), False, 'import torch\n'), ((1126, 1149), 'numpy.float32', 'np.float32', (['sum_correct'], {}), '(sum_correct)\n', (1136, 1149), True, 'import numpy as np\n'), ((2552, 2574), 're.match', 're.match', (['"""<.+>"""', 'word'], {}), "('<.+>', word)\n", (2560, 2574), False, 'import re\n'), ((4053, 4075), 're.match', 're.match', (['"""<.+>"""', 'word'], {}), "('<.+>', word)\n", (4061, 4075), False, 'import re\n'), ((6959, 6992), 'torch.from_numpy', 'torch.from_numpy', (['batch_input_ids'], {}), '(batch_input_ids)\n', (6975, 6992), False, 'import torch\n'), ((6994, 7032), 'torch.from_numpy', 'torch.from_numpy', (['batch_attention_mask'], {}), '(batch_attention_mask)\n', (7010, 7032), False, 'import torch\n'), ((7034, 7072), 'torch.from_numpy', 'torch.from_numpy', (['batch_token_type_ids'], {}), '(batch_token_type_ids)\n', (7050, 7072), False, 'import torch\n'), ((7074, 7105), 'torch.LongTensor', 'torch.LongTensor', (['batch_y_start'], {}), '(batch_y_start)\n', (7090, 7105), False, 'import torch\n'), ((7107, 7136), 'torch.LongTensor', 'torch.LongTensor', (['batch_y_end'], {}), '(batch_y_end)\n', (7123, 7136), False, 'import torch\n'), ((7138, 7163), 'torch.LongTensor', 'torch.LongTensor', (['batch_y'], {}), '(batch_y)\n', (7154, 7163), False, 'import torch\n'), ((10493, 10509), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (10503, 10509), False, 'import json\n'), ((16934, 16957), 'os.path.exists', 'os.path.exists', (['out_dir'], {}), '(out_dir)\n', (16948, 16957), False, 'import os\n'), ((16971, 16991), 'os.makedirs', 'os.makedirs', (['out_dir'], {}), '(out_dir)\n', (16982, 16991), False, 'import os\n'), ((686, 712), 'numpy.sum', 'np.sum', ([], {'predictions': 'labels'}), '(predictions=labels)\n', (692, 712), True, 'import numpy as np\n'), ((612, 636), 'torch.nn.functional.softmax', 'F.softmax', (['logits'], {'dim': '(1)'}), '(logits, dim=1)\n', (621, 636), True, 'import torch.nn.functional as F\n'), ((813, 837), 'torch.nn.functional.softmax', 'F.softmax', (['logits'], {'dim': '(1)'}), '(logits, dim=1)\n', (822, 837), True, 'import torch.nn.functional as F\n')]
|
#!/usr/bin/env python
"""
See README.md
"""
from unittest import TestCase
from sr_sample_python_library.sample_python_class import SamplePythonClass
class TestSamplePythonClass(TestCase):
def test_get_parameter_name_from_value(self):
my_convertor = SamplePythonClass()
param1 = "my_python_param_1"
param2 = "my_python_param_2"
expected_value1 = "my_python_param_1_test"
expected_value2 = "my_python_param_2_test"
self.assertEqual(expected_value1,
my_convertor.get_parameter_name_from_value(param1))
self.assertEqual(expected_value2,
my_convertor.get_parameter_name_from_value(param2))
if __name__ == '__main__':
import rosunit
rosunit.rosrun('sr_sample_python_library', 'test_sample_python_class',
TestSamplePythonClass)
|
[
"sr_sample_python_library.sample_python_class.SamplePythonClass",
"rosunit.rosrun"
] |
[((756, 853), 'rosunit.rosrun', 'rosunit.rosrun', (['"""sr_sample_python_library"""', '"""test_sample_python_class"""', 'TestSamplePythonClass'], {}), "('sr_sample_python_library', 'test_sample_python_class',\n TestSamplePythonClass)\n", (770, 853), False, 'import rosunit\n'), ((266, 285), 'sr_sample_python_library.sample_python_class.SamplePythonClass', 'SamplePythonClass', ([], {}), '()\n', (283, 285), False, 'from sr_sample_python_library.sample_python_class import SamplePythonClass\n')]
|
import json
from urllib.request import urlopen
planet_url = 'https://swapi.dev/api/planets/'
def api_call(url: str) -> dict:
json_url = urlopen(url)
return json.loads(json_url.read()) # converts json to dict
|
[
"urllib.request.urlopen"
] |
[((143, 155), 'urllib.request.urlopen', 'urlopen', (['url'], {}), '(url)\n', (150, 155), False, 'from urllib.request import urlopen\n')]
|
"""Imports World Bank income groups to Walden.
Example usage:
```
poetry run python -m ingests.wb_income_groups
```
"""
import tempfile
import datetime as dt
import unicodedata
import pandas as pd
import click
from owid.walden import files, add_to_catalog
from owid.walden.catalog import Dataset
SOURCE_DATA_URL = "http://databank.worldbank.org/data/download/site-content/CLASS.xlsx"
SHORT_NAME = "wb_income"
def load_metadata_description():
df_new = pd.read_excel(SOURCE_DATA_URL, sheet_name="Notes")
s = "\n\n".join(df_new.dropna().Notes.tolist())
return unicodedata.normalize("NFKD", s)
def create_metadata():
return Dataset(
namespace="wb",
short_name=SHORT_NAME,
name="World Bank list of economies - World Bank (July 2021)",
source_name="World Bank",
url="https://datahelpdesk.worldbank.org/knowledgebase/articles/906519-world-bank-country-and-lending-groups",
source_data_url=SOURCE_DATA_URL,
description=load_metadata_description(),
date_accessed=dt.datetime.now().date().strftime("%Y-%m-%d"),
publication_year=2021,
publication_date=dt.date(2021, 7, 1),
file_extension="xlsx",
license_name="CC BY 4.0",
license_url="https://www.worldbank.org/en/about/legal/terms-of-use-for-datasets",
)
def check_date(metadata: Dataset):
s = "Income classifications set on 1 July 2021 remain in effect until 1 July 2022"
if s not in metadata.description:
raise ValueError(
"Source data is no longer from 2021. Or something has changed in Notes sheet!"
)
@click.command()
def main():
metadata = create_metadata()
check_date(metadata)
with tempfile.NamedTemporaryFile() as f:
# fetch the file locally
assert metadata.source_data_url is not None
files.download(metadata.source_data_url, f.name)
# add it to walden, both locally, and to our remote file cache
add_to_catalog(metadata, f.name, upload=True)
if __name__ == "__main__":
main()
|
[
"unicodedata.normalize",
"tempfile.NamedTemporaryFile",
"datetime.date",
"owid.walden.files.download",
"click.command",
"pandas.read_excel",
"owid.walden.add_to_catalog",
"datetime.datetime.now"
] |
[((1619, 1634), 'click.command', 'click.command', ([], {}), '()\n', (1632, 1634), False, 'import click\n'), ((461, 511), 'pandas.read_excel', 'pd.read_excel', (['SOURCE_DATA_URL'], {'sheet_name': '"""Notes"""'}), "(SOURCE_DATA_URL, sheet_name='Notes')\n", (474, 511), True, 'import pandas as pd\n'), ((575, 607), 'unicodedata.normalize', 'unicodedata.normalize', (['"""NFKD"""', 's'], {}), "('NFKD', s)\n", (596, 607), False, 'import unicodedata\n'), ((1714, 1743), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (1741, 1743), False, 'import tempfile\n'), ((1843, 1891), 'owid.walden.files.download', 'files.download', (['metadata.source_data_url', 'f.name'], {}), '(metadata.source_data_url, f.name)\n', (1857, 1891), False, 'from owid.walden import files, add_to_catalog\n'), ((1971, 2016), 'owid.walden.add_to_catalog', 'add_to_catalog', (['metadata', 'f.name'], {'upload': '(True)'}), '(metadata, f.name, upload=True)\n', (1985, 2016), False, 'from owid.walden import files, add_to_catalog\n'), ((1145, 1164), 'datetime.date', 'dt.date', (['(2021)', '(7)', '(1)'], {}), '(2021, 7, 1)\n', (1152, 1164), True, 'import datetime as dt\n'), ((1042, 1059), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (1057, 1059), True, 'import datetime as dt\n')]
|
#!/usr/bin/python
import sys
import cgi
import cgitb
import sqlite3
reload(sys)
#import sys
sys.setdefaultencoding('utf-8')
cgitb.enable()
# html
print("Content-type: text/html\n")
print('<meta charset="utf-8">')
print("<html><head>")
print('''<link rel="stylesheet" href="https://bioed.bu.edu/students_21/group_proj/group_K/css/nav.css">''')
print('''<style>
body {margin:30;padding:30;}
</style> </head>''')
print("<title>Reviewer Help Page</title>")
print('''</head>''')
print("<body>")
print('''<div id="bg-image">''')
print('''<div id ="topnav">
<a href="https://bioed.bu.edu/cgi-bin/students_21/group_proj/group_K/show_applicant.py">Applicant List</a>
<a href="https://bioed.bu.edu/cgi-bin/students_21/group_proj/group_K/applicant_stats.py">Applicant Statistics</a>
<a href="https://bioed.bu.edu/cgi-bin/students_21/group_proj/group_K/review_summary.py">My Past Reviews</a>
<a href="https://bioed.bu.edu/cgi-bin/students_21/group_proj/group_K/help_reviewer.py">Help</a>
<a class ="active" href="https://bioed.bu.edu/cgi-bin/students_21/group_proj/group_K/about_rev.py">About/Contact</a>
</div>''')
print('''<h3>About: </h3>
<p>The BRITE REU Program is an NSF funded summer mentored research program for undergraduate students who work full time in a lab for ten weeks and participate in various training activities.
The BRITE REU database is used to store all the applicant data. This website acts as a portal to the database information.
This website allows applicants to be easily reviewed. This website also helps the admin to assign candidates to faculty projects.
This database and website were created by BU Bioinformatics graduate students as apart of their coursework.
The project was completed under the supervision of <NAME>. </p>
<h3>Contact Information: </h3>
<p><NAME>: <EMAIL> <br />
<NAME>: <EMAIL> <br />
<NAME>: <EMAIL> <br />
<NAME>: <EMAIL> <br />
<NAME>: <EMAIL> </p></body>''')
print("</html>")
|
[
"cgitb.enable",
"sys.setdefaultencoding"
] |
[((93, 124), 'sys.setdefaultencoding', 'sys.setdefaultencoding', (['"""utf-8"""'], {}), "('utf-8')\n", (115, 124), False, 'import sys\n'), ((126, 140), 'cgitb.enable', 'cgitb.enable', ([], {}), '()\n', (138, 140), False, 'import cgitb\n')]
|
import unittest
from osvimdriver.openstack.heat.template import HeatInputUtil
from ignition.utils.propvaluemap import PropValueMap
class TestHeatInputUtil(unittest.TestCase):
def test_filter_used_properties(self):
util = HeatInputUtil()
heat_yml = '''
parameters:
propA:
type: string
propB:
type: string
'''
orig_props = {'propA': 'testA', 'propB': 'testB', 'propC': 'testC'}
new_props = util.filter_used_properties(heat_yml, orig_props)
self.assertEqual(new_props, {'propA': 'testA', 'propB': 'testB'})
def test_filter_used_properties_prop_value_map(self):
util = HeatInputUtil()
heat_yml = '''
parameters:
propA:
type: string
propB:
type: string
'''
orig_props = PropValueMap({
'propA': {'type': 'string', 'value': 'testA'},
'propB': {'type': 'string', 'value': 'testB'},
'propC': {'type': 'string', 'value': 'testC'}
})
new_props = util.filter_used_properties(heat_yml, orig_props)
self.assertEqual(new_props, {'propA': 'testA', 'propB': 'testB'})
def test_filter_used_properties_reference_key_name(self):
util = HeatInputUtil()
heat_yml = '''
parameters:
propA:
type: string
propB:
type: string
'''
orig_props = PropValueMap({
'propA': {'type': 'string', 'value': 'testA'},
'propB': {'type': 'key', 'keyName': 'keyB', 'privateKey': 'thisIsPrivate', 'publicKey': 'thisIsPublic'},
'propC': {'type': 'string', 'value': 'testC'}
})
new_props = util.filter_used_properties(heat_yml, orig_props)
self.assertEqual(new_props, {'propA': 'testA', 'propB': 'keyB'})
def test_filter_used_properties_reference_public(self):
util = HeatInputUtil()
heat_yml = '''
parameters:
propA:
type: string
propB_public:
type: string
'''
orig_props = PropValueMap({
'propA': {'type': 'string', 'value': 'testA'},
'propB': {'type': 'key', 'keyName': 'keyB', 'privateKey': 'thisIsPrivate', 'publicKey': 'thisIsPublic'},
'propC': {'type': 'string', 'value': 'testC'}
})
new_props = util.filter_used_properties(heat_yml, orig_props)
self.assertEqual(new_props, {'propA': 'testA', 'propB_public': 'thisIsPublic'})
def test_filter_used_properties_reference_private(self):
util = HeatInputUtil()
heat_yml = '''
parameters:
propA:
type: string
propB_private:
type: string
'''
orig_props = PropValueMap({
'propA': {'type': 'string', 'value': 'testA'},
'propB': {'type': 'key', 'keyName': 'keyB', 'privateKey': 'thisIsPrivate', 'publicKey': 'thisIsPublic'},
'propC': {'type': 'string', 'value': 'testC'}
})
new_props = util.filter_used_properties(heat_yml, orig_props)
self.assertEqual(new_props, {'propA': 'testA', 'propB_private': 'thisIsPrivate'})
def test_filter_used_properties_reference_all_parts_of_key(self):
util = HeatInputUtil()
heat_yml = '''
parameters:
propA:
type: string
propB:
type: string
propB_private:
type: string
propB_public:
type: string
'''
orig_props = PropValueMap({
'propA': {'type': 'string', 'value': 'testA'},
'propB': {'type': 'key', 'keyName': 'keyB', 'privateKey': 'thisIsPrivate', 'publicKey': 'thisIsPublic'},
'propC': {'type': 'string', 'value': 'testC'}
})
new_props = util.filter_used_properties(heat_yml, orig_props)
self.assertEqual(new_props, {'propA': 'testA', 'propB': 'keyB', 'propB_public': 'thisIsPublic', 'propB_private': 'thisIsPrivate'})
def test_filter_used_properties_allows_public_key_suffix_on_non_key_property(self):
util = HeatInputUtil()
heat_yml = '''
parameters:
propA_public:
type: string
'''
orig_props = PropValueMap({
'propA_public': {'type': 'string', 'value': 'testA'}
})
new_props = util.filter_used_properties(heat_yml, orig_props)
self.assertEqual(new_props, {'propA_public': 'testA'})
def test_filter_used_properties_allows_private_key_suffix_on_non_key_property(self):
util = HeatInputUtil()
heat_yml = '''
parameters:
propA_private:
type: string
'''
orig_props = PropValueMap({
'propA_private': {'type': 'string', 'value': 'testA'}
})
new_props = util.filter_used_properties(heat_yml, orig_props)
self.assertEqual(new_props, {'propA_private': 'testA'})
def test_filter_used_properties_supports_no_public_key(self):
util = HeatInputUtil()
heat_yml = '''
parameters:
propA_public:
type: string
'''
orig_props = PropValueMap({
'propA': {'type': 'key', 'keyName': 'keyA', 'privateKey': 'thisIsPrivate'}
})
# The property has no public key, so nothing is added to the used properties. Let Heat determine if this parameter is required
# (and ultimately throw an error if it is)
new_props = util.filter_used_properties(heat_yml, orig_props)
self.assertEqual(new_props, {})
|
[
"ignition.utils.propvaluemap.PropValueMap",
"osvimdriver.openstack.heat.template.HeatInputUtil"
] |
[((235, 250), 'osvimdriver.openstack.heat.template.HeatInputUtil', 'HeatInputUtil', ([], {}), '()\n', (248, 250), False, 'from osvimdriver.openstack.heat.template import HeatInputUtil\n'), ((686, 701), 'osvimdriver.openstack.heat.template.HeatInputUtil', 'HeatInputUtil', ([], {}), '()\n', (699, 701), False, 'from osvimdriver.openstack.heat.template import HeatInputUtil\n'), ((864, 1028), 'ignition.utils.propvaluemap.PropValueMap', 'PropValueMap', (["{'propA': {'type': 'string', 'value': 'testA'}, 'propB': {'type': 'string',\n 'value': 'testB'}, 'propC': {'type': 'string', 'value': 'testC'}}"], {}), "({'propA': {'type': 'string', 'value': 'testA'}, 'propB': {\n 'type': 'string', 'value': 'testB'}, 'propC': {'type': 'string',\n 'value': 'testC'}})\n", (876, 1028), False, 'from ignition.utils.propvaluemap import PropValueMap\n'), ((1282, 1297), 'osvimdriver.openstack.heat.template.HeatInputUtil', 'HeatInputUtil', ([], {}), '()\n', (1295, 1297), False, 'from osvimdriver.openstack.heat.template import HeatInputUtil\n'), ((1460, 1686), 'ignition.utils.propvaluemap.PropValueMap', 'PropValueMap', (["{'propA': {'type': 'string', 'value': 'testA'}, 'propB': {'type': 'key',\n 'keyName': 'keyB', 'privateKey': 'thisIsPrivate', 'publicKey':\n 'thisIsPublic'}, 'propC': {'type': 'string', 'value': 'testC'}}"], {}), "({'propA': {'type': 'string', 'value': 'testA'}, 'propB': {\n 'type': 'key', 'keyName': 'keyB', 'privateKey': 'thisIsPrivate',\n 'publicKey': 'thisIsPublic'}, 'propC': {'type': 'string', 'value':\n 'testC'}})\n", (1472, 1686), False, 'from ignition.utils.propvaluemap import PropValueMap\n'), ((1933, 1948), 'osvimdriver.openstack.heat.template.HeatInputUtil', 'HeatInputUtil', ([], {}), '()\n', (1946, 1948), False, 'from osvimdriver.openstack.heat.template import HeatInputUtil\n'), ((2118, 2344), 'ignition.utils.propvaluemap.PropValueMap', 'PropValueMap', (["{'propA': {'type': 'string', 'value': 'testA'}, 'propB': {'type': 'key',\n 'keyName': 'keyB', 'privateKey': 'thisIsPrivate', 'publicKey':\n 'thisIsPublic'}, 'propC': {'type': 'string', 'value': 'testC'}}"], {}), "({'propA': {'type': 'string', 'value': 'testA'}, 'propB': {\n 'type': 'key', 'keyName': 'keyB', 'privateKey': 'thisIsPrivate',\n 'publicKey': 'thisIsPublic'}, 'propC': {'type': 'string', 'value':\n 'testC'}})\n", (2130, 2344), False, 'from ignition.utils.propvaluemap import PropValueMap\n'), ((2607, 2622), 'osvimdriver.openstack.heat.template.HeatInputUtil', 'HeatInputUtil', ([], {}), '()\n', (2620, 2622), False, 'from osvimdriver.openstack.heat.template import HeatInputUtil\n'), ((2793, 3019), 'ignition.utils.propvaluemap.PropValueMap', 'PropValueMap', (["{'propA': {'type': 'string', 'value': 'testA'}, 'propB': {'type': 'key',\n 'keyName': 'keyB', 'privateKey': 'thisIsPrivate', 'publicKey':\n 'thisIsPublic'}, 'propC': {'type': 'string', 'value': 'testC'}}"], {}), "({'propA': {'type': 'string', 'value': 'testA'}, 'propB': {\n 'type': 'key', 'keyName': 'keyB', 'privateKey': 'thisIsPrivate',\n 'publicKey': 'thisIsPublic'}, 'propC': {'type': 'string', 'value':\n 'testC'}})\n", (2805, 3019), False, 'from ignition.utils.propvaluemap import PropValueMap\n'), ((3293, 3308), 'osvimdriver.openstack.heat.template.HeatInputUtil', 'HeatInputUtil', ([], {}), '()\n', (3306, 3308), False, 'from osvimdriver.openstack.heat.template import HeatInputUtil\n'), ((3571, 3797), 'ignition.utils.propvaluemap.PropValueMap', 'PropValueMap', (["{'propA': {'type': 'string', 'value': 'testA'}, 'propB': {'type': 'key',\n 'keyName': 'keyB', 'privateKey': 'thisIsPrivate', 'publicKey':\n 'thisIsPublic'}, 'propC': {'type': 'string', 'value': 'testC'}}"], {}), "({'propA': {'type': 'string', 'value': 'testA'}, 'propB': {\n 'type': 'key', 'keyName': 'keyB', 'privateKey': 'thisIsPrivate',\n 'publicKey': 'thisIsPublic'}, 'propC': {'type': 'string', 'value':\n 'testC'}})\n", (3583, 3797), False, 'from ignition.utils.propvaluemap import PropValueMap\n'), ((4138, 4153), 'osvimdriver.openstack.heat.template.HeatInputUtil', 'HeatInputUtil', ([], {}), '()\n', (4151, 4153), False, 'from osvimdriver.openstack.heat.template import HeatInputUtil\n'), ((4280, 4348), 'ignition.utils.propvaluemap.PropValueMap', 'PropValueMap', (["{'propA_public': {'type': 'string', 'value': 'testA'}}"], {}), "({'propA_public': {'type': 'string', 'value': 'testA'}})\n", (4292, 4348), False, 'from ignition.utils.propvaluemap import PropValueMap\n'), ((4611, 4626), 'osvimdriver.openstack.heat.template.HeatInputUtil', 'HeatInputUtil', ([], {}), '()\n', (4624, 4626), False, 'from osvimdriver.openstack.heat.template import HeatInputUtil\n'), ((4754, 4823), 'ignition.utils.propvaluemap.PropValueMap', 'PropValueMap', (["{'propA_private': {'type': 'string', 'value': 'testA'}}"], {}), "({'propA_private': {'type': 'string', 'value': 'testA'}})\n", (4766, 4823), False, 'from ignition.utils.propvaluemap import PropValueMap\n'), ((5060, 5075), 'osvimdriver.openstack.heat.template.HeatInputUtil', 'HeatInputUtil', ([], {}), '()\n', (5073, 5075), False, 'from osvimdriver.openstack.heat.template import HeatInputUtil\n'), ((5202, 5296), 'ignition.utils.propvaluemap.PropValueMap', 'PropValueMap', (["{'propA': {'type': 'key', 'keyName': 'keyA', 'privateKey': 'thisIsPrivate'}}"], {}), "({'propA': {'type': 'key', 'keyName': 'keyA', 'privateKey':\n 'thisIsPrivate'}})\n", (5214, 5296), False, 'from ignition.utils.propvaluemap import PropValueMap\n')]
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
from parl.utils import logger
__all__ = ['get_gpu_count']
def get_gpu_count():
""" get avaliable gpu count
Returns:
gpu_count: int
"""
gpu_count = 0
env_cuda_devices = os.environ.get('CUDA_VISIBLE_DEVICES', None)
if env_cuda_devices is not None:
assert isinstance(env_cuda_devices, str)
try:
if not env_cuda_devices:
return 0
gpu_count = len(
[x for x in env_cuda_devices.split(',') if int(x) >= 0])
logger.info(
'CUDA_VISIBLE_DEVICES found gpu count: {}'.format(gpu_count))
except:
logger.warn('Cannot find available GPU devices, using CPU now.')
gpu_count = 0
else:
try:
gpu_count = str(subprocess.check_output(["nvidia-smi",
"-L"])).count('UUID')
logger.info('nvidia-smi -L found gpu count: {}'.format(gpu_count))
except:
logger.warn('Cannot find available GPU devices, using CPU now.')
gpu_count = 0
return gpu_count
|
[
"os.environ.get",
"subprocess.check_output",
"parl.utils.logger.warn"
] |
[((847, 891), 'os.environ.get', 'os.environ.get', (['"""CUDA_VISIBLE_DEVICES"""', 'None'], {}), "('CUDA_VISIBLE_DEVICES', None)\n", (861, 891), False, 'import os\n'), ((1286, 1350), 'parl.utils.logger.warn', 'logger.warn', (['"""Cannot find available GPU devices, using CPU now."""'], {}), "('Cannot find available GPU devices, using CPU now.')\n", (1297, 1350), False, 'from parl.utils import logger\n'), ((1649, 1713), 'parl.utils.logger.warn', 'logger.warn', (['"""Cannot find available GPU devices, using CPU now."""'], {}), "('Cannot find available GPU devices, using CPU now.')\n", (1660, 1713), False, 'from parl.utils import logger\n'), ((1428, 1473), 'subprocess.check_output', 'subprocess.check_output', (["['nvidia-smi', '-L']"], {}), "(['nvidia-smi', '-L'])\n", (1451, 1473), False, 'import subprocess\n')]
|
# https://stackoverflow.com/questions/273192/how-can-i-create-a-directory-if-it-does-not-exist
import os
import errno
# Not recommend this one:
# if not os.path.exists(directory):
# os.makedirs(directory)
try:
os.makedirs("D:/tim")
except OSError as e:
if e.errno != errno.EEXIST:
raise
# https://www.programcreek.com/python/example/444/errno.EEXIST
def make_dir(path):
"""
Creates 'path' if it does not exist
If creation fails, an exception will be thrown
errno.EEXIST: file exists
:param logger: the logger
:param path: the path to ensure it exists
"""
try:
os.makedirs(path)
except OSError as ex:
if ex.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
print('An error happened trying to create ' + path)
raise
|
[
"os.path.isdir",
"os.makedirs"
] |
[((229, 250), 'os.makedirs', 'os.makedirs', (['"""D:/tim"""'], {}), "('D:/tim')\n", (240, 250), False, 'import os\n'), ((659, 676), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (670, 676), False, 'import os\n'), ((745, 764), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (758, 764), False, 'import os\n')]
|
"""
Scrapy Shell
See documentation in docs/topics/shell.rst
"""
from threading import Thread
from scrapy.commands import ScrapyCommand
from scrapy.shell import Shell
from scrapy.http import Request
from scrapy.utils.url import add_http_if_no_scheme
from scrapy.utils.spider import spidercls_for_request, DefaultSpider
class Command(ScrapyCommand):
requires_project = False
default_settings = {'KEEP_ALIVE': True, 'LOGSTATS_INTERVAL': 0}
def syntax(self):
return "[url|file]"
def short_desc(self):
return "Interactive scraping console"
def long_desc(self):
return "Interactive console for scraping the given url"
def add_options(self, parser):
ScrapyCommand.add_options(self, parser)
parser.add_option("-c", dest="code",
help="evaluate the code in the shell, print the result and exit")
parser.add_option("--spider", dest="spider",
help="use this spider")
def update_vars(self, vars):
"""You can use this function to update the Scrapy objects that will be
available in the shell
"""
pass
def run(self, args, opts):
url = args[0] if args else None
if url:
url = add_http_if_no_scheme(url)
spider_loader = self.crawler_process.spider_loader
spidercls = DefaultSpider
if opts.spider:
spidercls = spider_loader.load(opts.spider)
elif url:
spidercls = spidercls_for_request(spider_loader, Request(url),
spidercls, log_multiple=True)
# The crawler is created this way since the Shell manually handles the
# crawling engine, so the set up in the crawl method won't work
crawler = self.crawler_process._create_crawler(spidercls)
# The Shell class needs a persistent engine in the crawler
crawler.engine = crawler._create_engine()
crawler.engine.start()
self._start_crawler_thread()
shell = Shell(crawler, update_vars=self.update_vars, code=opts.code)
shell.start(url=url)
def _start_crawler_thread(self):
t = Thread(target=self.crawler_process.start,
kwargs={'stop_after_crawl': False})
t.daemon = True
t.start()
|
[
"threading.Thread",
"scrapy.http.Request",
"scrapy.shell.Shell",
"scrapy.utils.url.add_http_if_no_scheme",
"scrapy.commands.ScrapyCommand.add_options"
] |
[((709, 748), 'scrapy.commands.ScrapyCommand.add_options', 'ScrapyCommand.add_options', (['self', 'parser'], {}), '(self, parser)\n', (734, 748), False, 'from scrapy.commands import ScrapyCommand\n'), ((2027, 2087), 'scrapy.shell.Shell', 'Shell', (['crawler'], {'update_vars': 'self.update_vars', 'code': 'opts.code'}), '(crawler, update_vars=self.update_vars, code=opts.code)\n', (2032, 2087), False, 'from scrapy.shell import Shell\n'), ((2167, 2244), 'threading.Thread', 'Thread', ([], {'target': 'self.crawler_process.start', 'kwargs': "{'stop_after_crawl': False}"}), "(target=self.crawler_process.start, kwargs={'stop_after_crawl': False})\n", (2173, 2244), False, 'from threading import Thread\n'), ((1236, 1262), 'scrapy.utils.url.add_http_if_no_scheme', 'add_http_if_no_scheme', (['url'], {}), '(url)\n', (1257, 1262), False, 'from scrapy.utils.url import add_http_if_no_scheme\n'), ((1516, 1528), 'scrapy.http.Request', 'Request', (['url'], {}), '(url)\n', (1523, 1528), False, 'from scrapy.http import Request\n')]
|
#### PATTERN | XX ##################################################################################
# -*- coding: utf-8 -*-
# Copyright (c) year, institute, country
# Author: Name (e-mail)
# License: BSD (see LICENSE.txt for details).
# http://www.clips.ua.ac.be/pages/pattern
####################################################################################################
# Template for pattern.xx, bundling natural language processing tools for language XXXXX.
# The module bundles a shallow parser (part-of-speech tagger, chunker, lemmatizer)
# with functions for word inflection (singularization, pluralization, conjugation)
# and sentiment analysis.
# Base classes for the parser, verb table and sentiment lexicon are inherited from pattern.text.
# The parser can be subclassed with a custom tokenizer (finds sentence boundaries)
# and lemmatizer (uses word inflection to find the base form of words).
# The part-of-speech tagger requires a lexicon of tagged known words and rules for unknown words.
# Tools for word inflection should be bundled in pattern.text.xx.inflect.
import os
import sys
try:
MODULE = os.path.dirname(os.path.abspath(__file__))
except:
MODULE = ""
sys.path.insert(0, os.path.join(MODULE, "..", "..", "..", ".."))
# Import parser base classes.
from pattern.text import (
Lexicon, Model, Morphology, Context, Parser as _Parser, ngrams, pprint, commandline,
PUNCTUATION
)
# Import parse tree base classes.
from pattern.text.tree import (
Tree, Text, Sentence, Slice, Chunk, PNPChunk, Chink, Word, table,
SLASH, WORD, POS, CHUNK, PNP, REL, ANCHOR, LEMMA, AND, OR
)
# Import sentiment analysis base classes.
from pattern.text import (
Sentiment,
NOUN, VERB, ADJECTIVE, ADVERB,
MOOD, IRONY
)
# Import spelling base class.
from pattern.text import (
Spelling
)
# Import verb tenses.
from pattern.text import (
INFINITIVE, PRESENT, PAST, FUTURE,
FIRST, SECOND, THIRD,
SINGULAR, PLURAL, SG, PL,
PROGRESSIVE,
PARTICIPLE
)
# Import inflection functions.
from pattern.text.xx.inflect import (
article, referenced, DEFINITE, INDEFINITE,
pluralize, singularize, NOUN, VERB, ADJECTIVE,
verbs, conjugate, lemma, lexeme, tenses,
predicative, attributive
)
# Import all submodules.
from pattern.text.xx import inflect
sys.path.pop(0)
#--- PARSER ----------------------------------------------------------------------------------------
# Pattern uses the Penn Treebank II tagset (http://www.clips.ua.ac.be/pages/penn-treebank-tagset).
# The lexicon for pattern.xx may be using a different tagset (e.g., PAROLE, WOTAN).
# The following functions are meant to map the tags to Penn Treebank II, see Parser.find_chunks().
TAGSET = {"??": "NN"} # pattern.xx tagset => Penn Treebank II.
def tagset2penntreebank(tag):
return TAGSET.get(tag, tag)
# Different languages have different contractions (e.g., English "I've" or French "j'ai")
# and abbreviations. The following functions define contractions and abbreviations
# for pattern.xx, see also Parser.find_tokens().
REPLACEMENTS = {"'s": " 's", "'ve": " 've"}
ABBREVIATIONS = set(("e.g.", "etc.", "i.e."))
# A lemmatizer can be constructed if we have a pattern.xx.inflect,
# with functions for noun singularization and verb conjugation (i.e., infinitives).
def find_lemmata(tokens):
""" Annotates the tokens with lemmata for plural nouns and conjugated verbs,
where each token is a [word, part-of-speech] list.
"""
for token in tokens:
word, pos, lemma = token[0], token[1], token[0]
if pos.startswith("JJ"):
lemma = predicative(word)
if pos == "NNS":
lemma = singularize(word)
if pos.startswith(("VB", "MD")):
lemma = conjugate(word, INFINITIVE) or word
token.append(lemma.lower())
return tokens
# Subclass the base parser with the language-specific functionality:
class Parser(_Parser):
def find_tokens(self, tokens, **kwargs):
kwargs.setdefault("abbreviations", ABBREVIATIONS)
kwargs.setdefault("replace", REPLACEMENTS)
return _Parser.find_tokens(self, tokens, **kwargs)
def find_tags(self, tokens, **kwargs):
kwargs.setdefault("map", tagset2penntreebank)
return _Parser.find_tags(self, tokens, **kwargs)
def find_chunks(self, tokens, **kwargs):
return _Parser.find_chunks(self, tokens, **kwargs)
def find_lemmata(self, tokens, **kwargs):
return find_lemmata(tokens)
# The parser's part-of-speech tagger requires a lexicon of tagged known words,
# and rules for unknown words. See pattern.text.Morphology and pattern.text.Context
# for further details. A tutorial on how to acquire data for the lexicon is here:
# http://www.clips.ua.ac.be/pages/using-wiktionary-to-build-an-italian-part-of-speech-tagger
# Create the parser with default tags for unknown words:
# (noun, proper noun, numeric).
parser = Parser(
lexicon = os.path.join(MODULE, "xx-lexicon.txt"),
morphology = os.path.join(MODULE, "xx-morphology.txt"),
context = os.path.join(MODULE, "xx-context.txt"),
entities = os.path.join(MODULE, "xx-entities.txt"),
default = ("NN", "NNP", "CD"),
language = "xx"
)
lexicon = parser.lexicon # Expose lexicon.
# Create the sentiment lexicon,
# see pattern/text/xx/xx-sentiment.xml for further details.
# We also need to define the tag for modifiers,
# words that modify the score of the following word
# (e.g., *very* good, *not good, ...)
sentiment = Sentiment(
path = os.path.join(MODULE, "xx-sentiment.xml"),
synset = None,
negations = ("no", "not", "never"),
modifiers = ("RB",),
modifier = lambda w: w.endswith("ly"), # brilliantly, hardly, partially, ...
language = "xx"
)
# Nothing should be changed below.
def tokenize(s, *args, **kwargs):
""" Returns a list of sentences, where punctuation marks have been split from words.
"""
return parser.find_tokens(s, *args, **kwargs)
def parse(s, *args, **kwargs):
""" Returns a tagged Unicode string.
"""
return parser.parse(s, *args, **kwargs)
def parsetree(s, *args, **kwargs):
""" Returns a parsed Text from the given string.
"""
return Text(parse(s, *args, **kwargs))
def tree(s, token=[WORD, POS, CHUNK, PNP, REL, LEMMA]):
""" Returns a parsed Text from the given parsed string.
"""
return Text(s, token)
def tag(s, tokenize=True, encoding="utf-8", **kwargs):
""" Returns a list of (token, tag)-tuples from the given string.
"""
tags = []
for sentence in parse(s, tokenize, True, False, False, False, encoding, **kwargs).split():
for token in sentence:
tags.append((token[0], token[1]))
return tags
def polarity(s, **kwargs):
""" Returns the sentence polarity (positive/negative) between -1.0 and 1.0.
"""
return sentiment(s, **kwargs)[0]
def subjectivity(s, **kwargs):
""" Returns the sentence subjectivity (objective/subjective) between 0.0 and 1.0.
"""
return sentiment(s, **kwargs)[1]
def positive(s, threshold=0.1, **kwargs):
""" Returns True if the given sentence has a positive sentiment.
"""
return polarity(s, **kwargs) >= threshold
split = tree # Backwards compatibility.
#---------------------------------------------------------------------------------------------------
# python -m pattern.xx xml -s "..." -OTCL
if __name__ == "__main__":
commandline(parse)
|
[
"sys.path.pop",
"os.path.abspath",
"pattern.text.Parser.find_chunks",
"pattern.text.xx.inflect.predicative",
"pattern.text.xx.inflect.singularize",
"pattern.text.commandline",
"pattern.text.tree.Text",
"pattern.text.Parser.find_tokens",
"pattern.text.xx.inflect.conjugate",
"pattern.text.Parser.find_tags",
"pattern.text.xx.inflect.lemma.lower",
"os.path.join"
] |
[((2319, 2334), 'sys.path.pop', 'sys.path.pop', (['(0)'], {}), '(0)\n', (2331, 2334), False, 'import sys\n'), ((1216, 1260), 'os.path.join', 'os.path.join', (['MODULE', '""".."""', '""".."""', '""".."""', '""".."""'], {}), "(MODULE, '..', '..', '..', '..')\n", (1228, 1260), False, 'import os\n'), ((6423, 6437), 'pattern.text.tree.Text', 'Text', (['s', 'token'], {}), '(s, token)\n', (6427, 6437), False, 'from pattern.text.tree import Tree, Text, Sentence, Slice, Chunk, PNPChunk, Chink, Word, table, SLASH, WORD, POS, CHUNK, PNP, REL, ANCHOR, LEMMA, AND, OR\n'), ((7482, 7500), 'pattern.text.commandline', 'commandline', (['parse'], {}), '(parse)\n', (7493, 7500), False, 'from pattern.text import Lexicon, Model, Morphology, Context, Parser as _Parser, ngrams, pprint, commandline, PUNCTUATION\n'), ((1145, 1170), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1160, 1170), False, 'import os\n'), ((4125, 4168), 'pattern.text.Parser.find_tokens', '_Parser.find_tokens', (['self', 'tokens'], {}), '(self, tokens, **kwargs)\n', (4144, 4168), True, 'from pattern.text import Lexicon, Model, Morphology, Context, Parser as _Parser, ngrams, pprint, commandline, PUNCTUATION\n'), ((4290, 4331), 'pattern.text.Parser.find_tags', '_Parser.find_tags', (['self', 'tokens'], {}), '(self, tokens, **kwargs)\n', (4307, 4331), True, 'from pattern.text import Lexicon, Model, Morphology, Context, Parser as _Parser, ngrams, pprint, commandline, PUNCTUATION\n'), ((4401, 4444), 'pattern.text.Parser.find_chunks', '_Parser.find_chunks', (['self', 'tokens'], {}), '(self, tokens, **kwargs)\n', (4420, 4444), True, 'from pattern.text import Lexicon, Model, Morphology, Context, Parser as _Parser, ngrams, pprint, commandline, PUNCTUATION\n'), ((4990, 5028), 'os.path.join', 'os.path.join', (['MODULE', '"""xx-lexicon.txt"""'], {}), "(MODULE, 'xx-lexicon.txt')\n", (5002, 5028), False, 'import os\n'), ((5046, 5087), 'os.path.join', 'os.path.join', (['MODULE', '"""xx-morphology.txt"""'], {}), "(MODULE, 'xx-morphology.txt')\n", (5058, 5087), False, 'import os\n'), ((5105, 5143), 'os.path.join', 'os.path.join', (['MODULE', '"""xx-context.txt"""'], {}), "(MODULE, 'xx-context.txt')\n", (5117, 5143), False, 'import os\n'), ((5160, 5199), 'os.path.join', 'os.path.join', (['MODULE', '"""xx-entities.txt"""'], {}), "(MODULE, 'xx-entities.txt')\n", (5172, 5199), False, 'import os\n'), ((5574, 5614), 'os.path.join', 'os.path.join', (['MODULE', '"""xx-sentiment.xml"""'], {}), "(MODULE, 'xx-sentiment.xml')\n", (5586, 5614), False, 'import os\n'), ((3623, 3640), 'pattern.text.xx.inflect.predicative', 'predicative', (['word'], {}), '(word)\n', (3634, 3640), False, 'from pattern.text.xx.inflect import article, referenced, DEFINITE, INDEFINITE, pluralize, singularize, NOUN, VERB, ADJECTIVE, verbs, conjugate, lemma, lexeme, tenses, predicative, attributive\n'), ((3688, 3705), 'pattern.text.xx.inflect.singularize', 'singularize', (['word'], {}), '(word)\n', (3699, 3705), False, 'from pattern.text.xx.inflect import article, referenced, DEFINITE, INDEFINITE, pluralize, singularize, NOUN, VERB, ADJECTIVE, verbs, conjugate, lemma, lexeme, tenses, predicative, attributive\n'), ((3824, 3837), 'pattern.text.xx.inflect.lemma.lower', 'lemma.lower', ([], {}), '()\n', (3835, 3837), False, 'from pattern.text.xx.inflect import article, referenced, DEFINITE, INDEFINITE, pluralize, singularize, NOUN, VERB, ADJECTIVE, verbs, conjugate, lemma, lexeme, tenses, predicative, attributive\n'), ((3767, 3794), 'pattern.text.xx.inflect.conjugate', 'conjugate', (['word', 'INFINITIVE'], {}), '(word, INFINITIVE)\n', (3776, 3794), False, 'from pattern.text.xx.inflect import article, referenced, DEFINITE, INDEFINITE, pluralize, singularize, NOUN, VERB, ADJECTIVE, verbs, conjugate, lemma, lexeme, tenses, predicative, attributive\n')]
|
import json
from importlib import import_module
from time import sleep
from unittest import mock
from django.conf import settings
from django.shortcuts import resolve_url
from django.test import RequestFactory, TestCase
from django.test.utils import override_settings
from django.urls import reverse
from django_otp import DEVICE_ID_SESSION_KEY
from django_otp.oath import totp
from django_otp.util import random_hex
from two_factor.views.core import LoginView
from .utils import UserMixin, totp_str
class LoginTest(UserMixin, TestCase):
def _post(self, data=None):
return self.client.post(reverse('two_factor:login'), data=data)
def test_form(self):
response = self.client.get(reverse('two_factor:login'))
self.assertContains(response, 'Password:')
def test_invalid_login(self):
response = self._post({'auth-username': 'unknown',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Please enter a correct')
self.assertContains(response, 'and password.')
@mock.patch('two_factor.views.core.signals.user_verified.send')
def test_valid_login(self, mock_signal):
self.create_user()
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
# No signal should be fired for non-verified user logins.
self.assertFalse(mock_signal.called)
def test_valid_login_with_custom_redirect(self):
redirect_url = reverse('two_factor:setup')
self.create_user()
response = self.client.post(
'%s?%s' % (reverse('two_factor:login'), 'next=' + redirect_url),
{'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertRedirects(response, redirect_url)
def test_valid_login_with_custom_post_redirect(self):
redirect_url = reverse('two_factor:setup')
self.create_user()
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth',
'next': redirect_url})
self.assertRedirects(response, redirect_url)
def test_valid_login_with_redirect_field_name(self):
redirect_url = reverse('two_factor:setup')
self.create_user()
response = self.client.post(
'%s?%s' % (reverse('custom-field-name-login'), 'next_page=' + redirect_url),
{'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertRedirects(response, redirect_url)
def test_valid_login_with_allowed_external_redirect(self):
redirect_url = 'https://test.allowed-success-url.com'
self.create_user()
response = self.client.post(
'%s?%s' % (reverse('custom-allowed-success-url-login'), 'next=' + redirect_url),
{'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertRedirects(response, redirect_url, fetch_redirect_response=False)
def test_valid_login_with_disallowed_external_redirect(self):
redirect_url = 'https://test.disallowed-success-url.com'
self.create_user()
response = self.client.post(
'%s?%s' % (reverse('custom-allowed-success-url-login'), 'next=' + redirect_url),
{'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertRedirects(response, reverse('two_factor:profile'), fetch_redirect_response=False)
@mock.patch('two_factor.views.core.time')
def test_valid_login_primary_key_stored(self, mock_time):
mock_time.time.return_value = 12345.12
user = self.create_user()
user.totpdevice_set.create(name='default',
key=random_hex())
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
self.assertEqual(self.client.session['wizard_login_view']['user_pk'], str(user.pk))
self.assertEqual(
self.client.session['wizard_login_view']['user_backend'],
'django.contrib.auth.backends.ModelBackend')
self.assertEqual(self.client.session['wizard_login_view']['authentication_time'], 12345)
@mock.patch('two_factor.views.core.time')
def test_valid_login_post_auth_session_clear_of_form_data(self, mock_time):
mock_time.time.return_value = 12345.12
user = self.create_user()
user.totpdevice_set.create(name='default',
key=random_hex())
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
self.assertEqual(self.client.session['wizard_login_view']['user_pk'], str(user.pk))
self.assertEqual(self.client.session['wizard_login_view']['step'], 'token')
self.assertEqual(self.client.session['wizard_login_view']['step_data'], {'auth': None})
self.assertEqual(self.client.session['wizard_login_view']['step_files'], {'auth': {}})
self.assertEqual(self.client.session['wizard_login_view']['validated_step_data'], {})
@mock.patch('two_factor.views.core.logger')
@mock.patch('two_factor.views.core.time')
def test_valid_login_expired(self, mock_time, mock_logger):
mock_time.time.return_value = 12345.12
user = self.create_user()
device = user.totpdevice_set.create(name='default',
key=random_hex())
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
self.assertEqual(self.client.session['wizard_login_view']['user_pk'], str(user.pk))
self.assertEqual(
self.client.session['wizard_login_view']['user_backend'],
'django.contrib.auth.backends.ModelBackend')
self.assertEqual(self.client.session['wizard_login_view']['authentication_time'], 12345)
mock_time.time.return_value = 20345.12
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token'})
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'Token:')
self.assertContains(response, 'Password:')
self.assertContains(response, 'Your session has timed out. Please login again.')
# Check that a message was logged.
mock_logger.info.assert_called_with(
"User's authentication flow has timed out. The user "
"has been redirected to the initial auth form.")
@override_settings(TWO_FACTOR_LOGIN_TIMEOUT=0)
@mock.patch('two_factor.views.core.time')
def test_valid_login_no_timeout(self, mock_time):
mock_time.time.return_value = 12345.12
user = self.create_user()
device = user.totpdevice_set.create(name='default',
key=random_hex())
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
self.assertEqual(self.client.session['wizard_login_view']['user_pk'], str(user.pk))
self.assertEqual(
self.client.session['wizard_login_view']['user_backend'],
'django.contrib.auth.backends.ModelBackend')
self.assertEqual(self.client.session['wizard_login_view']['authentication_time'], 12345)
mock_time.time.return_value = 20345.12
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
self.assertEqual(self.client.session['_auth_user_id'], str(user.pk))
def test_valid_login_with_redirect_authenticated_user(self):
user = self.create_user()
response = self.client.get(
reverse('custom-redirect-authenticated-user-login')
)
self.assertEqual(response.status_code, 200)
self.client.force_login(user)
response = self.client.get(
reverse('custom-redirect-authenticated-user-login')
)
self.assertRedirects(response, reverse('two_factor:profile'))
def test_valid_login_with_redirect_authenticated_user_loop(self):
redirect_url = reverse('custom-redirect-authenticated-user-login')
user = self.create_user()
self.client.force_login(user)
with self.assertRaises(ValueError):
self.client.get(
'%s?%s' % (reverse('custom-redirect-authenticated-user-login'), 'next=' + redirect_url),
)
@mock.patch('two_factor.views.core.signals.user_verified.send')
def test_with_generator(self, mock_signal):
user = self.create_user()
device = user.totpdevice_set.create(name='default',
key=random_hex())
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
self.assertContains(response, 'autofocus="autofocus"')
self.assertContains(response, 'pattern="[0-9]*"')
self.assertContains(response, 'autocomplete="one-time-code"')
response = self._post({'token-otp_token': '<PASSWORD>',
'login_view-current_step': 'token'})
self.assertEqual(response.context_data['wizard']['form'].errors,
{'__all__': ['Invalid token. Please make sure you '
'have entered it correctly.']})
# reset throttle because we're not testing that
device.throttle_reset()
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
self.assertEqual(device.persistent_id,
self.client.session.get(DEVICE_ID_SESSION_KEY))
# Check that the signal was fired.
mock_signal.assert_called_with(sender=mock.ANY, request=mock.ANY, user=user, device=device)
@mock.patch('two_factor.views.core.signals.user_verified.send')
def test_throttle_with_generator(self, mock_signal):
user = self.create_user()
device = user.totpdevice_set.create(name='default',
key=random_hex())
self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
# throttle device
device.throttle_increment()
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token'})
self.assertEqual(response.context_data['wizard']['form'].errors,
{'__all__': ['Invalid token. Please make sure you '
'have entered it correctly.']})
@mock.patch('two_factor.gateways.fake.Fake')
@mock.patch('two_factor.views.core.signals.user_verified.send')
@override_settings(
TWO_FACTOR_SMS_GATEWAY='two_factor.gateways.fake.Fake',
TWO_FACTOR_CALL_GATEWAY='two_factor.gateways.fake.Fake',
)
def test_with_backup_phone(self, mock_signal, fake):
user = self.create_user()
for no_digits in (6, 8):
with self.settings(TWO_FACTOR_TOTP_DIGITS=no_digits):
user.totpdevice_set.create(name='default', key=random_hex(),
digits=no_digits)
device = user.phonedevice_set.create(name='backup', number='+31101234567',
method='sms',
key=random_hex())
# Backup phones should be listed on the login form
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Send text message to +31 ** *** **67')
# Ask for challenge on invalid device
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'challenge_device': 'MALICIOUS/INPUT/666'})
self.assertContains(response, 'Send text message to +31 ** *** **67')
# Ask for SMS challenge
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'challenge_device': device.persistent_id})
self.assertContains(response, 'We sent you a text message')
test_call_kwargs = fake.return_value.send_sms.call_args[1]
self.assertEqual(test_call_kwargs['device'], device)
self.assertIn(test_call_kwargs['token'],
[str(totp(device.bin_key, digits=no_digits, drift=i)).zfill(no_digits)
for i in [-1, 0]])
# Ask for phone challenge
device.method = 'call'
device.save()
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'challenge_device': device.persistent_id})
self.assertContains(response, 'We are calling your phone right now')
test_call_kwargs = fake.return_value.make_call.call_args[1]
self.assertEqual(test_call_kwargs['device'], device)
self.assertIn(test_call_kwargs['token'],
[str(totp(device.bin_key, digits=no_digits, drift=i)).zfill(no_digits)
for i in [-1, 0]])
# Valid token should be accepted.
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
self.assertEqual(device.persistent_id,
self.client.session.get(DEVICE_ID_SESSION_KEY))
# Check that the signal was fired.
mock_signal.assert_called_with(sender=mock.ANY, request=mock.ANY, user=user, device=device)
@mock.patch('two_factor.views.core.signals.user_verified.send')
def test_with_backup_token(self, mock_signal):
user = self.create_user()
user.totpdevice_set.create(name='default', key=random_hex())
device = user.staticdevice_set.create(name='backup')
device.token_set.create(token='<KEY>')
# Backup phones should be listed on the login form
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Backup Token')
# Should be able to go to backup tokens step in wizard
response = self._post({'wizard_goto_step': 'backup'})
self.assertContains(response, 'backup tokens')
# Wrong codes should not be accepted
response = self._post({'backup-otp_token': 'WRONG',
'login_view-current_step': 'backup'})
self.assertEqual(response.context_data['wizard']['form'].errors,
{'__all__': ['Invalid token. Please make sure you '
'have entered it correctly.']})
# static devices are throttled
device.throttle_reset()
# Valid token should be accepted.
response = self._post({'backup-otp_token': '<KEY>',
'login_view-current_step': 'backup'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
# Check that the signal was fired.
mock_signal.assert_called_with(sender=mock.ANY, request=mock.ANY, user=user, device=device)
@mock.patch('two_factor.views.utils.logger')
def test_reset_wizard_state(self, mock_logger):
self.create_user()
self.enable_otp()
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
# A GET request resets the state of the wizard...
self.client.get(reverse('two_factor:login'))
# ...so there is no user in this request anymore. As the login flow
# depends on a user being present, this should be handled gracefully.
response = self._post({'token-otp_token': '<PASSWORD>',
'login_view-current_step': 'token'})
self.assertContains(response, 'Password:')
# Check that a message was logged.
mock_logger.warning.assert_called_with(
"Requested step '%s' is no longer valid, returning to last valid "
"step in the wizard.",
'token')
@mock.patch('two_factor.views.utils.logger')
def test_login_different_user_on_existing_session(self, mock_logger):
"""
This test reproduces the issue where a user is logged in and a different user
attempts to login.
"""
self.create_user()
self.create_user(username='<EMAIL>')
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
def test_missing_management_data(self):
# missing management data
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>'})
# view should return HTTP 400 Bad Request
self.assertEqual(response.status_code, 400)
def test_no_password_in_session(self):
self.create_user()
self.enable_otp()
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
session_contents = json.dumps(list(self.client.session.items()))
self.assertNotIn('secret', session_contents)
def test_login_different_user_with_otp_on_existing_session(self):
self.create_user()
vedran_user = self.create_user(username='<EMAIL>')
device = vedran_user.totpdevice_set.create(name='default', key=random_hex())
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertRedirects(response,
resolve_url(settings.LOGIN_REDIRECT_URL))
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token',
'token-remember': 'on'})
self.assertRedirects(response,
resolve_url(settings.LOGIN_REDIRECT_URL))
def test_login_view_is_step_visible(self):
request = RequestFactory().get(reverse('login'))
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore(None)
login_view = LoginView(**LoginView.get_initkwargs())
login_view.setup(request)
login_view.dispatch(request)
# Initially, any step is visible
for step, form_class in login_view.form_list.items():
self.assertTrue(login_view.is_step_visible(step, form_class))
login_view.storage.validated_step_data['auth'] = {'username': 'joe', 'password': '<PASSWORD>'}
login_view.storage.validated_step_data['token'] = {'otp_token': '<PASSWORD>'}
# Once token was entered, the token step is no longer visible
for step, form_class in login_view.form_list.items():
if step == 'token':
self.assertFalse(login_view.is_step_visible(step, form_class))
else:
self.assertTrue(login_view.is_step_visible(step, form_class))
class BackupTokensTest(UserMixin, TestCase):
def setUp(self):
super().setUp()
self.create_user()
self.enable_otp()
self.login_user()
def test_empty(self):
response = self.client.get(reverse('two_factor:backup_tokens'))
self.assertContains(response, 'You don\'t have any backup codes yet.')
def test_generate(self):
url = reverse('two_factor:backup_tokens')
response = self.client.post(url)
self.assertRedirects(response, url)
response = self.client.get(url)
first_set = set([token.token for token in
response.context_data['device'].token_set.all()])
self.assertNotContains(response, 'You don\'t have any backup codes '
'yet.')
self.assertEqual(10, len(first_set))
# Generating the tokens should give a fresh set
self.client.post(url)
response = self.client.get(url)
second_set = set([token.token for token in
response.context_data['device'].token_set.all()])
self.assertNotEqual(first_set, second_set)
def test_no_cancel_url(self):
response = self.client.get(reverse('two_factor:login'))
self.assertEqual(response.status_code, 200)
self.assertNotIn('cancel_url', response.context.keys())
@override_settings(LOGOUT_REDIRECT_URL='custom-field-name-login')
def test_cancel_redirects_to_logout_redirect_url(self):
response = self.client.get(reverse('two_factor:login'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['cancel_url'], reverse('custom-field-name-login'))
@override_settings(LOGOUT_URL='custom-field-name-login')
def test_logout_url_warning_raised(self):
with self.assertWarns(DeprecationWarning):
response = self.client.get(reverse('two_factor:login'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['cancel_url'], reverse('custom-field-name-login'))
@override_settings(ROOT_URLCONF='tests.urls_admin')
class RememberLoginTest(UserMixin, TestCase):
def setUp(self):
super().setUp()
self.user = self.create_user()
self.device = self.user.totpdevice_set.create(name='default', key=random_hex())
def _post(self, data=None):
return self.client.post(reverse('two_factor:login'), data=data)
def set_invalid_remember_cookie(self):
for cookie in self.client.cookies:
if cookie.startswith("remember-cookie_"):
self._restore_remember_cookie_data = dict(name=cookie, value=self.client.cookies[cookie].value)
self.client.cookies[cookie] = self.client.cookies[cookie].value[:-5] + "0" * 5 # an invalid key
def restore_remember_cookie(self):
self.client.cookies[self._restore_remember_cookie_data['name']] = self._restore_remember_cookie_data['value']
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60)
def test_with_remember(self):
# Login
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
response = self._post({'token-otp_token': totp_str(self.device.bin_key),
'login_view-current_step': 'token',
'token-remember': 'on'})
self.assertRedirects(response, reverse('two_factor:profile'), fetch_redirect_response=False)
self.assertEqual(1, len([cookie for cookie in response.cookies if cookie.startswith('remember-cookie_')]))
# Logout
self.client.get(reverse('logout'))
response = self.client.get('/secure/raises/')
self.assertEqual(response.status_code, 403)
# Login without token
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
response = self.client.get('/secure/raises/')
self.assertEqual(response.status_code, 200)
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 3)
def test_with_remember_label_3_min(self):
# Login
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'ask again on this device for 3 minutes')
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60 * 4)
def test_with_remember_label_4_hours(self):
# Login
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'ask again on this device for 4 hours')
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60 * 24 * 5)
def test_with_remember_label_5_days(self):
# Login
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'ask again on this device for 5 days')
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60)
def test_without_remember(self):
# Login
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
response = self._post({'token-otp_token': totp_str(self.device.bin_key),
'login_view-current_step': 'token'})
self.assertEqual(0, len([cookie for cookie in response.cookies if cookie.startswith('remember-cookie_')]))
# Logout
self.client.get(reverse('logout'))
response = self.client.get('/secure/raises/')
self.assertEqual(response.status_code, 403)
# Login
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=1)
def test_expired(self):
# Login
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
response = self._post({'token-otp_token': totp_str(self.device.bin_key),
'login_view-current_step': 'token',
'token-remember': 'on'})
self.assertEqual(1, len([cookie for cookie in response.cookies if cookie.startswith('remember-cookie_')]))
# Logout
self.client.get(reverse('logout'))
response = self.client.get('/secure/raises/')
self.assertEqual(response.status_code, 403)
# Wait to expire
sleep(1)
# Login but expired remember cookie
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
self.assertFalse(any(
key.startswith('remember-cookie_') and cookie.value
for key, cookie in self.client.cookies.items()
))
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60)
def test_wrong_signature(self):
# Login
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
response = self._post({'token-otp_token': totp_str(self.device.bin_key),
'login_view-current_step': 'token',
'token-remember': 'on'})
self.assertEqual(1, len([cookie for cookie in response.cookies if cookie.startswith('remember-cookie_')]))
# Logout
self.client.get(reverse('logout'))
response = self.client.get('/secure/raises/')
self.assertEqual(response.status_code, 403)
self.set_invalid_remember_cookie()
# Login but exired remember cookie
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
@override_settings(
TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60,
OTP_HOTP_THROTTLE_FACTOR=60 * 60,
OTP_TOTP_THROTTLE_FACTOR=60 * 60,
)
def test_remember_token_throttling(self):
# Login
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
# Enter token
response = self._post({'token-otp_token': totp_str(self.device.bin_key),
'login_view-current_step': 'token',
'token-remember': 'on'})
self.assertEqual(1, len([cookie for cookie in response.cookies if cookie.startswith('remember-cookie_')]))
# Logout
self.client.get(reverse('logout'))
# Login having an invalid remember cookie
self.set_invalid_remember_cookie()
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
# Login with valid remember cookie but throttled
self.client = self.client_class()
self.restore_remember_cookie()
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
# Reset throttling
self.device.throttle_reset()
# Login with valid remember cookie
self.client = self.client_class()
self.restore_remember_cookie()
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertRedirects(response, reverse(settings.LOGIN_REDIRECT_URL), fetch_redirect_response=False)
@mock.patch('two_factor.gateways.fake.Fake')
@mock.patch('two_factor.views.core.signals.user_verified.send')
@override_settings(
TWO_FACTOR_SMS_GATEWAY='two_factor.gateways.fake.Fake',
TWO_FACTOR_CALL_GATEWAY='two_factor.gateways.fake.Fake',
TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60,
)
def test_phonedevice_with_remember_cookie(self, mock_signal, fake):
self.user.totpdevice_set.first().delete()
device = self.user.phonedevice_set.create(name='default', number='+31101234567', method='sms')
# Ask for SMS challenge
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertContains(response, 'We sent you a text message')
test_call_kwargs = fake.return_value.send_sms.call_args[1]
self.assertEqual(test_call_kwargs['device'], device)
# Valid token should be accepted.
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token',
'token-remember': 'on'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
# Logout
self.client.get(reverse('logout'))
# Ask for SMS challenge
response = self._post({'auth-username': '<EMAIL>',
'auth-password': '<PASSWORD>',
'login_view-current_step': 'auth'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
|
[
"importlib.import_module",
"django.test.RequestFactory",
"django_otp.util.random_hex",
"time.sleep",
"unittest.mock.patch",
"django.urls.reverse",
"two_factor.views.core.LoginView.get_initkwargs",
"django.test.utils.override_settings",
"django.shortcuts.resolve_url",
"django_otp.oath.totp"
] |
[((24337, 24387), 'django.test.utils.override_settings', 'override_settings', ([], {'ROOT_URLCONF': '"""tests.urls_admin"""'}), "(ROOT_URLCONF='tests.urls_admin')\n", (24354, 24387), False, 'from django.test.utils import override_settings\n'), ((1136, 1198), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.core.signals.user_verified.send"""'], {}), "('two_factor.views.core.signals.user_verified.send')\n", (1146, 1198), False, 'from unittest import mock\n'), ((3996, 4036), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.core.time"""'], {}), "('two_factor.views.core.time')\n", (4006, 4036), False, 'from unittest import mock\n'), ((4870, 4910), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.core.time"""'], {}), "('two_factor.views.core.time')\n", (4880, 4910), False, 'from unittest import mock\n'), ((5881, 5923), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.core.logger"""'], {}), "('two_factor.views.core.logger')\n", (5891, 5923), False, 'from unittest import mock\n'), ((5929, 5969), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.core.time"""'], {}), "('two_factor.views.core.time')\n", (5939, 5969), False, 'from unittest import mock\n'), ((7475, 7520), 'django.test.utils.override_settings', 'override_settings', ([], {'TWO_FACTOR_LOGIN_TIMEOUT': '(0)'}), '(TWO_FACTOR_LOGIN_TIMEOUT=0)\n', (7492, 7520), False, 'from django.test.utils import override_settings\n'), ((7526, 7566), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.core.time"""'], {}), "('two_factor.views.core.time')\n", (7536, 7566), False, 'from unittest import mock\n'), ((9651, 9713), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.core.signals.user_verified.send"""'], {}), "('two_factor.views.core.signals.user_verified.send')\n", (9661, 9713), False, 'from unittest import mock\n'), ((11285, 11347), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.core.signals.user_verified.send"""'], {}), "('two_factor.views.core.signals.user_verified.send')\n", (11295, 11347), False, 'from unittest import mock\n'), ((12151, 12194), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.gateways.fake.Fake"""'], {}), "('two_factor.gateways.fake.Fake')\n", (12161, 12194), False, 'from unittest import mock\n'), ((12200, 12262), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.core.signals.user_verified.send"""'], {}), "('two_factor.views.core.signals.user_verified.send')\n", (12210, 12262), False, 'from unittest import mock\n'), ((12268, 12402), 'django.test.utils.override_settings', 'override_settings', ([], {'TWO_FACTOR_SMS_GATEWAY': '"""two_factor.gateways.fake.Fake"""', 'TWO_FACTOR_CALL_GATEWAY': '"""two_factor.gateways.fake.Fake"""'}), "(TWO_FACTOR_SMS_GATEWAY='two_factor.gateways.fake.Fake',\n TWO_FACTOR_CALL_GATEWAY='two_factor.gateways.fake.Fake')\n", (12285, 12402), False, 'from django.test.utils import override_settings\n'), ((15735, 15797), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.core.signals.user_verified.send"""'], {}), "('two_factor.views.core.signals.user_verified.send')\n", (15745, 15797), False, 'from unittest import mock\n'), ((17412, 17455), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.utils.logger"""'], {}), "('two_factor.views.utils.logger')\n", (17422, 17455), False, 'from unittest import mock\n'), ((18481, 18524), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.utils.logger"""'], {}), "('two_factor.views.utils.logger')\n", (18491, 18524), False, 'from unittest import mock\n'), ((23628, 23692), 'django.test.utils.override_settings', 'override_settings', ([], {'LOGOUT_REDIRECT_URL': '"""custom-field-name-login"""'}), "(LOGOUT_REDIRECT_URL='custom-field-name-login')\n", (23645, 23692), False, 'from django.test.utils import override_settings\n'), ((23968, 24023), 'django.test.utils.override_settings', 'override_settings', ([], {'LOGOUT_URL': '"""custom-field-name-login"""'}), "(LOGOUT_URL='custom-field-name-login')\n", (23985, 24023), False, 'from django.test.utils import override_settings\n'), ((25241, 25298), 'django.test.utils.override_settings', 'override_settings', ([], {'TWO_FACTOR_REMEMBER_COOKIE_AGE': '(60 * 60)'}), '(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60)\n', (25258, 25298), False, 'from django.test.utils import override_settings\n'), ((26505, 26561), 'django.test.utils.override_settings', 'override_settings', ([], {'TWO_FACTOR_REMEMBER_COOKIE_AGE': '(60 * 3)'}), '(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 3)\n', (26522, 26561), False, 'from django.test.utils import override_settings\n'), ((26898, 26959), 'django.test.utils.override_settings', 'override_settings', ([], {'TWO_FACTOR_REMEMBER_COOKIE_AGE': '(60 * 60 * 4)'}), '(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60 * 4)\n', (26915, 26959), False, 'from django.test.utils import override_settings\n'), ((27296, 27362), 'django.test.utils.override_settings', 'override_settings', ([], {'TWO_FACTOR_REMEMBER_COOKIE_AGE': '(60 * 60 * 24 * 5)'}), '(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60 * 24 * 5)\n', (27313, 27362), False, 'from django.test.utils import override_settings\n'), ((27697, 27754), 'django.test.utils.override_settings', 'override_settings', ([], {'TWO_FACTOR_REMEMBER_COOKIE_AGE': '(60 * 60)'}), '(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60)\n', (27714, 27754), False, 'from django.test.utils import override_settings\n'), ((28736, 28787), 'django.test.utils.override_settings', 'override_settings', ([], {'TWO_FACTOR_REMEMBER_COOKIE_AGE': '(1)'}), '(TWO_FACTOR_REMEMBER_COOKIE_AGE=1)\n', (28753, 28787), False, 'from django.test.utils import override_settings\n'), ((30050, 30107), 'django.test.utils.override_settings', 'override_settings', ([], {'TWO_FACTOR_REMEMBER_COOKIE_AGE': '(60 * 60)'}), '(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60)\n', (30067, 30107), False, 'from django.test.utils import override_settings\n'), ((31214, 31343), 'django.test.utils.override_settings', 'override_settings', ([], {'TWO_FACTOR_REMEMBER_COOKIE_AGE': '(60 * 60)', 'OTP_HOTP_THROTTLE_FACTOR': '(60 * 60)', 'OTP_TOTP_THROTTLE_FACTOR': '(60 * 60)'}), '(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60,\n OTP_HOTP_THROTTLE_FACTOR=60 * 60, OTP_TOTP_THROTTLE_FACTOR=60 * 60)\n', (31231, 31343), False, 'from django.test.utils import override_settings\n'), ((33269, 33312), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.gateways.fake.Fake"""'], {}), "('two_factor.gateways.fake.Fake')\n", (33279, 33312), False, 'from unittest import mock\n'), ((33318, 33380), 'unittest.mock.patch', 'mock.patch', (['"""two_factor.views.core.signals.user_verified.send"""'], {}), "('two_factor.views.core.signals.user_verified.send')\n", (33328, 33380), False, 'from unittest import mock\n'), ((33386, 33564), 'django.test.utils.override_settings', 'override_settings', ([], {'TWO_FACTOR_SMS_GATEWAY': '"""two_factor.gateways.fake.Fake"""', 'TWO_FACTOR_CALL_GATEWAY': '"""two_factor.gateways.fake.Fake"""', 'TWO_FACTOR_REMEMBER_COOKIE_AGE': '(60 * 60)'}), "(TWO_FACTOR_SMS_GATEWAY='two_factor.gateways.fake.Fake',\n TWO_FACTOR_CALL_GATEWAY='two_factor.gateways.fake.Fake',\n TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60)\n", (33403, 33564), False, 'from django.test.utils import override_settings\n'), ((1729, 1756), 'django.urls.reverse', 'reverse', (['"""two_factor:setup"""'], {}), "('two_factor:setup')\n", (1736, 1756), False, 'from django.urls import reverse\n'), ((2167, 2194), 'django.urls.reverse', 'reverse', (['"""two_factor:setup"""'], {}), "('two_factor:setup')\n", (2174, 2194), False, 'from django.urls import reverse\n'), ((2597, 2624), 'django.urls.reverse', 'reverse', (['"""two_factor:setup"""'], {}), "('two_factor:setup')\n", (2604, 2624), False, 'from django.urls import reverse\n'), ((9329, 9380), 'django.urls.reverse', 'reverse', (['"""custom-redirect-authenticated-user-login"""'], {}), "('custom-redirect-authenticated-user-login')\n", (9336, 9380), False, 'from django.urls import reverse\n'), ((21323, 21361), 'importlib.import_module', 'import_module', (['settings.SESSION_ENGINE'], {}), '(settings.SESSION_ENGINE)\n', (21336, 21361), False, 'from importlib import import_module\n'), ((22645, 22680), 'django.urls.reverse', 'reverse', (['"""two_factor:backup_tokens"""'], {}), "('two_factor:backup_tokens')\n", (22652, 22680), False, 'from django.urls import reverse\n'), ((29589, 29597), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (29594, 29597), False, 'from time import sleep\n'), ((607, 634), 'django.urls.reverse', 'reverse', (['"""two_factor:login"""'], {}), "('two_factor:login')\n", (614, 634), False, 'from django.urls import reverse\n'), ((708, 735), 'django.urls.reverse', 'reverse', (['"""two_factor:login"""'], {}), "('two_factor:login')\n", (715, 735), False, 'from django.urls import reverse\n'), ((1498, 1538), 'django.shortcuts.resolve_url', 'resolve_url', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (1509, 1538), False, 'from django.shortcuts import resolve_url\n'), ((3928, 3957), 'django.urls.reverse', 'reverse', (['"""two_factor:profile"""'], {}), "('two_factor:profile')\n", (3935, 3957), False, 'from django.urls import reverse\n'), ((8636, 8676), 'django.shortcuts.resolve_url', 'resolve_url', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (8647, 8676), False, 'from django.shortcuts import resolve_url\n'), ((8903, 8954), 'django.urls.reverse', 'reverse', (['"""custom-redirect-authenticated-user-login"""'], {}), "('custom-redirect-authenticated-user-login')\n", (8910, 8954), False, 'from django.urls import reverse\n'), ((9103, 9154), 'django.urls.reverse', 'reverse', (['"""custom-redirect-authenticated-user-login"""'], {}), "('custom-redirect-authenticated-user-login')\n", (9110, 9154), False, 'from django.urls import reverse\n'), ((9204, 9233), 'django.urls.reverse', 'reverse', (['"""two_factor:profile"""'], {}), "('two_factor:profile')\n", (9211, 9233), False, 'from django.urls import reverse\n'), ((10972, 11012), 'django.shortcuts.resolve_url', 'resolve_url', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (10983, 11012), False, 'from django.shortcuts import resolve_url\n'), ((17220, 17260), 'django.shortcuts.resolve_url', 'resolve_url', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (17231, 17260), False, 'from django.shortcuts import resolve_url\n'), ((17881, 17908), 'django.urls.reverse', 'reverse', (['"""two_factor:login"""'], {}), "('two_factor:login')\n", (17888, 17908), False, 'from django.urls import reverse\n'), ((19036, 19076), 'django.shortcuts.resolve_url', 'resolve_url', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (19047, 19076), False, 'from django.shortcuts import resolve_url\n'), ((19306, 19346), 'django.shortcuts.resolve_url', 'resolve_url', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (19317, 19346), False, 'from django.shortcuts import resolve_url\n'), ((20613, 20653), 'django.shortcuts.resolve_url', 'resolve_url', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (20624, 20653), False, 'from django.shortcuts import resolve_url\n'), ((21159, 21199), 'django.shortcuts.resolve_url', 'resolve_url', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (21170, 21199), False, 'from django.shortcuts import resolve_url\n'), ((21288, 21304), 'django.urls.reverse', 'reverse', (['"""login"""'], {}), "('login')\n", (21295, 21304), False, 'from django.urls import reverse\n'), ((22485, 22520), 'django.urls.reverse', 'reverse', (['"""two_factor:backup_tokens"""'], {}), "('two_factor:backup_tokens')\n", (22492, 22520), False, 'from django.urls import reverse\n'), ((23477, 23504), 'django.urls.reverse', 'reverse', (['"""two_factor:login"""'], {}), "('two_factor:login')\n", (23484, 23504), False, 'from django.urls import reverse\n'), ((23788, 23815), 'django.urls.reverse', 'reverse', (['"""two_factor:login"""'], {}), "('two_factor:login')\n", (23795, 23815), False, 'from django.urls import reverse\n'), ((23926, 23960), 'django.urls.reverse', 'reverse', (['"""custom-field-name-login"""'], {}), "('custom-field-name-login')\n", (23933, 23960), False, 'from django.urls import reverse\n'), ((24298, 24332), 'django.urls.reverse', 'reverse', (['"""custom-field-name-login"""'], {}), "('custom-field-name-login')\n", (24305, 24332), False, 'from django.urls import reverse\n'), ((24671, 24698), 'django.urls.reverse', 'reverse', (['"""two_factor:login"""'], {}), "('two_factor:login')\n", (24678, 24698), False, 'from django.urls import reverse\n'), ((25829, 25858), 'django.urls.reverse', 'reverse', (['"""two_factor:profile"""'], {}), "('two_factor:profile')\n", (25836, 25858), False, 'from django.urls import reverse\n'), ((26048, 26065), 'django.urls.reverse', 'reverse', (['"""logout"""'], {}), "('logout')\n", (26055, 26065), False, 'from django.urls import reverse\n'), ((28351, 28368), 'django.urls.reverse', 'reverse', (['"""logout"""'], {}), "('logout')\n", (28358, 28368), False, 'from django.urls import reverse\n'), ((29430, 29447), 'django.urls.reverse', 'reverse', (['"""logout"""'], {}), "('logout')\n", (29437, 29447), False, 'from django.urls import reverse\n'), ((30758, 30775), 'django.urls.reverse', 'reverse', (['"""logout"""'], {}), "('logout')\n", (30765, 30775), False, 'from django.urls import reverse\n'), ((32053, 32070), 'django.urls.reverse', 'reverse', (['"""logout"""'], {}), "('logout')\n", (32060, 32070), False, 'from django.urls import reverse\n'), ((33194, 33230), 'django.urls.reverse', 'reverse', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (33201, 33230), False, 'from django.urls import reverse\n'), ((34512, 34552), 'django.shortcuts.resolve_url', 'resolve_url', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (34523, 34552), False, 'from django.shortcuts import resolve_url\n'), ((34596, 34613), 'django.urls.reverse', 'reverse', (['"""logout"""'], {}), "('logout')\n", (34603, 34613), False, 'from django.urls import reverse\n'), ((34876, 34916), 'django.shortcuts.resolve_url', 'resolve_url', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (34887, 34916), False, 'from django.shortcuts import resolve_url\n'), ((4270, 4282), 'django_otp.util.random_hex', 'random_hex', ([], {}), '()\n', (4280, 4282), False, 'from django_otp.util import random_hex\n'), ((5162, 5174), 'django_otp.util.random_hex', 'random_hex', ([], {}), '()\n', (5172, 5174), False, 'from django_otp.util import random_hex\n'), ((6223, 6235), 'django_otp.util.random_hex', 'random_hex', ([], {}), '()\n', (6233, 6235), False, 'from django_otp.util import random_hex\n'), ((7810, 7822), 'django_otp.util.random_hex', 'random_hex', ([], {}), '()\n', (7820, 7822), False, 'from django_otp.util import random_hex\n'), ((9904, 9916), 'django_otp.util.random_hex', 'random_hex', ([], {}), '()\n', (9914, 9916), False, 'from django_otp.util import random_hex\n'), ((11547, 11559), 'django_otp.util.random_hex', 'random_hex', ([], {}), '()\n', (11557, 11559), False, 'from django_otp.util import random_hex\n'), ((15407, 15447), 'django.shortcuts.resolve_url', 'resolve_url', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (15418, 15447), False, 'from django.shortcuts import resolve_url\n'), ((15938, 15950), 'django_otp.util.random_hex', 'random_hex', ([], {}), '()\n', (15948, 15950), False, 'from django_otp.util import random_hex\n'), ((20342, 20354), 'django_otp.util.random_hex', 'random_hex', ([], {}), '()\n', (20352, 20354), False, 'from django_otp.util import random_hex\n'), ((21267, 21283), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (21281, 21283), False, 'from django.test import RequestFactory, TestCase\n'), ((21447, 21473), 'two_factor.views.core.LoginView.get_initkwargs', 'LoginView.get_initkwargs', ([], {}), '()\n', (21471, 21473), False, 'from two_factor.views.core import LoginView\n'), ((24160, 24187), 'django.urls.reverse', 'reverse', (['"""two_factor:login"""'], {}), "('two_factor:login')\n", (24167, 24187), False, 'from django.urls import reverse\n'), ((24592, 24604), 'django_otp.util.random_hex', 'random_hex', ([], {}), '()\n', (24602, 24604), False, 'from django_otp.util import random_hex\n'), ((1844, 1871), 'django.urls.reverse', 'reverse', (['"""two_factor:login"""'], {}), "('two_factor:login')\n", (1851, 1871), False, 'from django.urls import reverse\n'), ((2712, 2746), 'django.urls.reverse', 'reverse', (['"""custom-field-name-login"""'], {}), "('custom-field-name-login')\n", (2719, 2746), False, 'from django.urls import reverse\n'), ((3178, 3221), 'django.urls.reverse', 'reverse', (['"""custom-allowed-success-url-login"""'], {}), "('custom-allowed-success-url-login')\n", (3185, 3221), False, 'from django.urls import reverse\n'), ((3685, 3728), 'django.urls.reverse', 'reverse', (['"""custom-allowed-success-url-login"""'], {}), "('custom-allowed-success-url-login')\n", (3692, 3728), False, 'from django.urls import reverse\n'), ((9553, 9604), 'django.urls.reverse', 'reverse', (['"""custom-redirect-authenticated-user-login"""'], {}), "('custom-redirect-authenticated-user-login')\n", (9560, 9604), False, 'from django.urls import reverse\n'), ((12675, 12687), 'django_otp.util.random_hex', 'random_hex', ([], {}), '()\n', (12685, 12687), False, 'from django_otp.util import random_hex\n'), ((12965, 12977), 'django_otp.util.random_hex', 'random_hex', ([], {}), '()\n', (12975, 12977), False, 'from django_otp.util import random_hex\n'), ((14279, 14326), 'django_otp.oath.totp', 'totp', (['device.bin_key'], {'digits': 'no_digits', 'drift': 'i'}), '(device.bin_key, digits=no_digits, drift=i)\n', (14283, 14326), False, 'from django_otp.oath import totp\n'), ((15049, 15096), 'django_otp.oath.totp', 'totp', (['device.bin_key'], {'digits': 'no_digits', 'drift': 'i'}), '(device.bin_key, digits=no_digits, drift=i)\n', (15053, 15096), False, 'from django_otp.oath import totp\n')]
|
# Copyright (C) 2014 <NAME>
# All rights reserved.
#
# This file is part of phonopy.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of the phonopy project nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import warnings
import numpy as np
from spglib import (
get_stabilized_reciprocal_mesh, relocate_BZ_grid_address,
get_symmetry_dataset, get_pointgroup)
from phonopy.structure.brillouin_zone import get_qpoints_in_Brillouin_zone
from phonopy.structure.symmetry import (
get_lattice_vector_equivalence, get_pointgroup_operations,
collect_unique_rotations)
from phonopy.structure.cells import (
get_primitive_matrix_by_centring, estimate_supercell_matrix,
estimate_supercell_matrix_from_pointgroup, determinant)
from phonopy.structure.snf import SNF3x3
from phonopy.harmonic.force_constants import similarity_transformation
def length2mesh(length, lattice, rotations=None):
"""Convert length to mesh for q-point sampling
This conversion for each reciprocal axis follows VASP convention by
N = max(1, int(l * |a|^* + 0.5))
'int' means rounding down, not rounding to nearest integer.
Parameters
----------
length : float
Length having the unit of direct space length.
lattice : array_like
Basis vectors of primitive cell in row vectors.
dtype='double', shape=(3, 3)
rotations: array_like, optional
Rotation matrices in real space. When given, mesh numbers that are
symmetrically reasonable are returned. Default is None.
dtype='intc', shape=(rotations, 3, 3)
Returns
-------
array_like
dtype=int, shape=(3,)
"""
rec_lattice = np.linalg.inv(lattice)
rec_lat_lengths = np.sqrt(np.diagonal(np.dot(rec_lattice.T, rec_lattice)))
mesh_numbers = np.rint(rec_lat_lengths * length).astype(int)
if rotations is not None:
reclat_equiv = get_lattice_vector_equivalence(
[r.T for r in np.array(rotations)])
m = mesh_numbers
mesh_equiv = [m[1] == m[2], m[2] == m[0], m[0] == m[1]]
for i, pair in enumerate(([1, 2], [2, 0], [0, 1])):
if reclat_equiv[i] and not mesh_equiv:
m[pair] = max(m[pair])
return np.maximum(mesh_numbers, [1, 1, 1])
def get_qpoints(mesh_numbers,
reciprocal_lattice, # column vectors
q_mesh_shift=None, # Monkhorst-Pack style grid shift
is_gamma_center=True,
is_time_reversal=True,
fit_in_BZ=True,
rotations=None, # Point group operations in real space
is_mesh_symmetry=True):
gp = GridPoints(mesh_numbers,
reciprocal_lattice,
q_mesh_shift=q_mesh_shift,
is_gamma_center=is_gamma_center,
is_time_reversal=is_time_reversal,
fit_in_BZ=fit_in_BZ,
rotations=rotations,
is_mesh_symmetry=is_mesh_symmetry)
return gp.qpoints, gp.weights
def extract_ir_grid_points(grid_mapping_table):
ir_grid_points = np.array(np.unique(grid_mapping_table),
dtype=grid_mapping_table.dtype)
weights = np.zeros_like(grid_mapping_table)
for i, gp in enumerate(grid_mapping_table):
weights[gp] += 1
ir_weights = np.array(weights[ir_grid_points], dtype='intc')
return ir_grid_points, ir_weights
class GridPoints(object):
"""Class to generate irreducible grid points on uniform mesh grids
Attributes
----------
mesh_numbers: ndarray
Mesh numbers along a, b, c axes.
dtype='intc'
shape=(3,)
reciprocal_lattice: array_like
Basis vectors in reciprocal space. a*, b*, c* are given in column
vectors.
dtype='double'
shape=(3, 3)
qpoints: ndarray
q-points in reduced coordinates of reciprocal lattice
dtype='double'
shape=(ir-grid points, 3)
weights: ndarray
Geometric q-point weights. Its sum is the number of grid points.
dtype='intc'
shape=(ir-grid points,)
grid_address: ndarray
Addresses of all grid points represented by integers.
dtype='intc'
shape=(prod(mesh_numbers), 3)
ir_grid_points: ndarray
Indices of irreducible grid points in grid_address.
dtype='uintp', shape=(ir-grid points,)
grid_mapping_table: ndarray
Index mapping table from all grid points to ir-grid points.
dtype='uintp', shape=(prod(mesh_numbers),)
"""
def __init__(self,
mesh_numbers,
reciprocal_lattice,
q_mesh_shift=None, # Monkhorst-Pack style grid shift
is_gamma_center=True,
is_time_reversal=True,
fit_in_BZ=True,
rotations=None, # Point group operations in real space
is_mesh_symmetry=True): # Except for time reversal symmetry
"""
Note
----
Uniform mesh grids are made according to Monkhorst-Pack scheme, i.e.,
for odd (even) numbers, centre are (are not) sampled. The Gamma-centre
sampling is supported by ``is_gamma_center=True``.
Parameters
----------
mesh_numbers: array_like
Mesh numbers along a, b, c axes.
dtype='intc'
shape=(3, )
reciprocal_lattice: array_like
Basis vectors in reciprocal space. a*, b*, c* are given in column
vectors.
dtype='double'
shape=(3, 3)
q_mesh_shift: array_like, optional, default None (no shift)
Mesh shifts along a*, b*, c* axes with respect to neighboring grid
points from the original mesh (Monkhorst-Pack or Gamma center).
0.5 gives half grid shift. Normally 0 or 0.5 is given.
Otherwise q-points symmetry search is not performed.
dtype='double'
shape=(3, )
is_gamma_center: bool, default False
Uniform mesh grids are generated centring at Gamma point but not
the Monkhorst-Pack scheme.
is_time_reversal: bool, optional, default True
Time reversal symmetry is considered in symmetry search. By this,
inversion symmetry is always included.
fit_in_BZ: bool, optional, default True
rotations: array_like, default None (only unitary operation)
Rotation matrices in direct space. For each rotation matrix R,
a point in crystallographic coordinates, x, is sent as x' = Rx.
dtype='intc'
shape=(rotations, 3, 3)
is_mesh_symmetry: bool, optional, default True
Wheather symmetry search is done or not.
"""
self._mesh = np.array(mesh_numbers, dtype='intc')
self._rec_lat = reciprocal_lattice
self._is_shift = self._shift2boolean(q_mesh_shift,
is_gamma_center=is_gamma_center)
self._is_time_reversal = is_time_reversal
self._fit_in_BZ = fit_in_BZ
self._rotations = rotations
self._is_mesh_symmetry = is_mesh_symmetry
self._ir_qpoints = None
self._grid_address = None
self._ir_grid_points = None
self._ir_weights = None
self._grid_mapping_table = None
if self._is_shift is None:
self._is_mesh_symmetry = False
self._is_shift = self._shift2boolean(None)
self._set_grid_points()
self._ir_qpoints += q_mesh_shift / self._mesh
self._fit_qpoints_in_BZ()
else: # zero or half shift
self._set_grid_points()
@property
def mesh_numbers(self):
return self._mesh
@property
def reciprocal_lattice(self):
return self._rec_lat
@property
def grid_address(self):
return self._grid_address
def get_grid_address(self):
warnings.warn("GridPoints.get_grid_address is deprecated."
"Use grid_address attribute.",
DeprecationWarning)
return self.grid_address
@property
def ir_grid_points(self):
return self._ir_grid_points
def get_ir_grid_points(self):
warnings.warn("GridPoints.get_ir_grid_points is deprecated."
"Use ir_grid_points attribute.",
DeprecationWarning)
return self.ir_grid_points
@property
def qpoints(self):
return self._ir_qpoints
def get_ir_qpoints(self):
warnings.warn("GridPoints.get_ir_qpoints is deprecated."
"Use points attribute.",
DeprecationWarning)
return self.qpoints
@property
def weights(self):
return self._ir_weights
def get_ir_grid_weights(self):
warnings.warn("GridPoints.get_ir_grid_weights is deprecated."
"Use weights attribute.",
DeprecationWarning)
return self.weights
@property
def grid_mapping_table(self):
return self._grid_mapping_table
def get_grid_mapping_table(self):
warnings.warn("GridPoints.get_grid_mapping_table is deprecated."
"Use grid_mapping_table attribute.",
DeprecationWarning)
return self.grid_mapping_table
def _set_grid_points(self):
if self._is_mesh_symmetry and self._has_mesh_symmetry():
self._set_ir_qpoints(self._rotations,
is_time_reversal=self._is_time_reversal)
else:
self._set_ir_qpoints([np.eye(3, dtype='intc')],
is_time_reversal=self._is_time_reversal)
def _shift2boolean(self,
q_mesh_shift,
is_gamma_center=False,
tolerance=1e-5):
"""
Tolerance is used to judge zero/half gird shift.
This value is not necessary to be changed usually.
"""
if q_mesh_shift is None:
shift = np.zeros(3, dtype='double')
else:
shift = np.array(q_mesh_shift, dtype='double')
diffby2 = np.abs(shift * 2 - np.rint(shift * 2))
if (diffby2 < 0.01).all(): # zero or half shift
diff = np.abs(shift - np.rint(shift))
if is_gamma_center:
is_shift = list(diff > 0.1)
else: # Monkhorst-pack
is_shift = list(np.logical_xor((diff > 0.1),
(self._mesh % 2 == 0)) * 1)
else:
is_shift = None
return is_shift
def _has_mesh_symmetry(self):
if self._rotations is None:
return False
m = self._mesh
mesh_equiv = [m[1] == m[2], m[2] == m[0], m[0] == m[1]]
lattice_equiv = get_lattice_vector_equivalence(
[r.T for r in self._rotations])
return np.extract(lattice_equiv, mesh_equiv).all()
def _fit_qpoints_in_BZ(self):
qpoint_set_in_BZ = get_qpoints_in_Brillouin_zone(self._rec_lat,
self._ir_qpoints)
qpoints_in_BZ = np.array([q_set[0] for q_set in qpoint_set_in_BZ],
dtype='double', order='C')
self._ir_qpoints = qpoints_in_BZ
def _set_ir_qpoints(self,
rotations,
is_time_reversal=True):
grid_mapping_table, grid_address = get_stabilized_reciprocal_mesh(
self._mesh,
rotations,
is_shift=self._is_shift,
is_time_reversal=is_time_reversal)
shift = np.array(self._is_shift, dtype='intc') * 0.5
if self._fit_in_BZ:
grid_address, _ = relocate_BZ_grid_address(
grid_address,
self._mesh,
self._rec_lat,
is_shift=self._is_shift)
self._grid_address = grid_address[:np.prod(self._mesh)]
else:
self._grid_address = grid_address
(self._ir_grid_points,
self._ir_weights) = extract_ir_grid_points(grid_mapping_table)
self._ir_qpoints = np.array(
(self._grid_address[self._ir_grid_points] + shift) / self._mesh,
dtype='double', order='C')
self._grid_mapping_table = grid_mapping_table
class GeneralizedRegularGridPoints(object):
"""Generalized regular grid points
Method strategy in suggest mode
-------------------------------
1. Create conventional unit cell using spglib.
2. Sample regular grid points for the conventional unit cell (mesh_numbers)
3. Transformation matrix from primitive to conventinal unit cell (inv_pmat)
4. Get supercell multiplicities (mesh_numbers) from the conventional unit
cell considering the lattice shape.
5. mmat = (inv_pmat * mesh_numbers).T, which is related to the
transformation from primitive cell to supercell.
6. D = P.mmat.Q, where D = diag([n1, n2, n3])
7. Grid points for primitive cell are
[np.dot(Q, g) for g in ndindex((n1, n2, n3))].
Method strategy in non-suggest mode
-----------------------------------
1. Find symmetry operations
2. Determine point group and transformation matrix (tmat) from input cell
3. Get supercell multiplicities (mesh_numbers) from the transformed cell
considering the lattice shape.
4. mmat = (tmat * mesh_numbers).T
5. D = P.mmat.Q, where D = diag([n1, n2, n3])
6. Grid points for primitive cell are
[np.dot(Q, g) for g in ndindex((n1, n2, n3))].
Attributes
----------
grid_address : ndarray
Grid addresses in integers.
shape=(num_grid_points, 3), dtype='intc', order='C'
qpoints : ndarray
q-points with respect to basis vectors of input or standardized
primitive cell.
shape=(num_grid_points, 3), dtype='intc', order='C'
grid_matrix : ndarray
Grid generating matrix.
shape=(3,3), dtype='intc', order='C'
matrix_to_primitive : ndarray or None
None when ``suggest`` is False. Otherwise, transformation matrix from
input cell to the suggested primitive cell.
shape=(3,3), dtype='double', order='C'
snf : SNF3x3
SNF3x3 instance of grid generating matrix.
"""
def __init__(self,
cell,
length,
suggest=True,
is_time_reversal=True,
x_fastest=True,
symprec=1e-5):
"""
Parameters
----------
cell : PhonopyAtoms
Input cell.
length : float
Length having the unit of direct space length.
suggest : bool, optional, default True
With True, a standardized primitive cell is suggested and the grids
are generated for it. With False, input cell is used.
is_time_reversal: bool, optional, default True
Time reversal symmetry is considered in symmetry search. By this,
inversion symmetry is always included.
x_fastest : bool, optional, default=True
In grid generation, [[x, y, z], ...], x runs fastest when True,
otherwise z runs fastest.
"""
self._cell = cell
self._length = length
self._suggest = suggest
self._is_time_reversal = is_time_reversal
self._x_fastest = x_fastest
self._grid_address = None
self._snf = None
self._transformation_matrix = None
self._grid_matrix = None
self._reciprocal_operations = None
self._prepare(cell, length, symprec)
self._generate_grid_points()
self._generate_q_points()
self._reciprocal_operations = get_reciprocal_operations(
self._sym_dataset['rotations'],
self._transformation_matrix,
self._snf.D,
self._snf.Q,
is_time_reversal=self._is_time_reversal)
@property
def grid_address(self):
return self._grid_address
@property
def qpoints(self):
return self._qpoints
@property
def grid_matrix(self):
"""Grid generating matrix"""
return self._grid_matrix
@property
def transformation_matrix(self):
"""Transformation matrix"""
return self._transformation_matrix
@property
def snf(self):
"""SNF3x3 instance of grid generating matrix"""
return self._snf
@property
def reciprocal_operations(self):
return self._reciprocal_operations
def _prepare(self, cell, length, symprec):
"""Define grid generating matrix and run the SNF"""
self._sym_dataset = get_symmetry_dataset(
cell.totuple(), symprec=symprec)
if self._suggest:
self._set_grid_matrix_by_std_primitive_cell(cell, length)
else:
self._set_grid_matrix_by_input_cell(cell, length)
self._snf = SNF3x3(self._grid_matrix)
self._snf.run()
def _set_grid_matrix_by_std_primitive_cell(self, cell, length):
"""Grid generating matrix based on standeardized primitive cell"""
tmat = self._sym_dataset['transformation_matrix']
centring = self._sym_dataset['international'][0]
pmat = get_primitive_matrix_by_centring(centring)
conv_lat = np.dot(np.linalg.inv(tmat).T, cell.cell)
num_cells = np.prod(length2mesh(length, conv_lat))
mesh_numbers = estimate_supercell_matrix(
self._sym_dataset,
max_num_atoms=num_cells * len(self._sym_dataset['std_types']))
inv_pmat = np.linalg.inv(pmat)
inv_pmat_int = np.rint(inv_pmat).astype(int)
assert (np.abs(inv_pmat - inv_pmat_int) < 1e-5).all()
# transpose in reciprocal space
self._grid_matrix = np.array(
(inv_pmat_int * mesh_numbers).T, dtype='intc', order='C')
# From input lattice to the primitive lattice in real space
self._transformation_matrix = np.array(
np.dot(np.linalg.inv(tmat), pmat), dtype='double', order='C')
def _set_grid_matrix_by_input_cell(self, input_cell, length):
"""Grid generating matrix based on input cell"""
pointgroup = get_pointgroup(self._sym_dataset['rotations'])
# tmat: From input lattice to point group preserving lattice
tmat = pointgroup[2]
lattice = np.dot(input_cell.cell.T, tmat).T
num_cells = np.prod(length2mesh(length, lattice))
mesh_numbers = estimate_supercell_matrix_from_pointgroup(
pointgroup[1], lattice, num_cells)
# transpose in reciprocal space
self._grid_matrix = np.array(
np.multiply(tmat, mesh_numbers).T, dtype='intc', order='C')
self._transformation_matrix = np.eye(3, dtype='double', order='C')
def _generate_grid_points(self):
d = np.diagonal(self._snf.D)
if self._x_fastest:
# x runs fastest.
z, y, x = np.meshgrid(range(d[2]), range(d[1]), range(d[0]),
indexing='ij')
else:
# z runs fastest.
x, y, z = np.meshgrid(range(d[0]), range(d[1]), range(d[2]),
indexing='ij')
self._grid_address = np.array(np.c_[x.ravel(), y.ravel(), z.ravel()],
dtype='intc', order='C')
def _generate_q_points(self):
D_inv = np.linalg.inv(self._snf.D)
qpoints = np.dot(
self._grid_address, np.dot(self._snf.Q, D_inv).T)
qpoints -= np.rint(qpoints)
self._qpoints = qpoints
def get_reciprocal_operations(rotations,
transformation_matrix,
D,
Q,
is_time_reversal=True):
"""Generate reciprocal rotation matrices
Collect unique real space rotation matrices and transpose them.
When is_time_reversal=True, inversion is added if it is not in the
list of the rotation matrices.
Parameters
----------
rotations : ndarray
Rotation matrices in real space. x' = Rx.
shape=(rotations, 3, 3), dtype='intc'
transformation_matrxi : array_like
Transformation matrix of basis vectors in real space. Using this
rotation matrices are transformed.
D : array_like
D of smith normal form 3x3.
shape=(3, 3)
Q : array_like
Q of smith normal form 3x3.
shape=(3, 3)
is_time_reversal : bool
When True, inversion operation is added.
Returns
-------
rotations_for_Q : ndarray
Rotation matrices in reciprocal space. Grid points are sent by the
symmetrically equivalent grid points as follows:
g' = (R_Q g) % diagonal(D)
shape=(rotations, 3, 3), dtype='intc', order='C'
"""
unique_rots = []
tmat_inv = np.linalg.inv(transformation_matrix)
for r in collect_unique_rotations(rotations):
_r = similarity_transformation(tmat_inv, r)
_r_int = np.rint(_r).astype(int)
assert (np.abs(_r - _r_int) < 1e-5).all()
unique_rots.append(_r_int)
ptg_ops, rec_ops = get_pointgroup_operations(
unique_rots, is_time_reversal=is_time_reversal)
Q_inv = np.linalg.inv(Q)
rec_ops_Q = []
for r in rec_ops:
_r = similarity_transformation(Q_inv, r)
_r = similarity_transformation(D, _r)
_r_int = np.rint(_r).astype(int)
assert (np.abs(_r - _r_int) < 1e-5).all()
assert abs(determinant(_r_int)) == 1
rec_ops_Q.append(_r_int)
return np.array(rec_ops_Q, dtype='intc', order='C')
|
[
"numpy.maximum",
"numpy.abs",
"phonopy.structure.snf.SNF3x3",
"spglib.relocate_BZ_grid_address",
"phonopy.structure.cells.determinant",
"numpy.unique",
"numpy.prod",
"phonopy.structure.symmetry.get_pointgroup_operations",
"numpy.zeros_like",
"numpy.multiply",
"numpy.extract",
"numpy.logical_xor",
"spglib.get_stabilized_reciprocal_mesh",
"phonopy.structure.cells.estimate_supercell_matrix_from_pointgroup",
"numpy.diagonal",
"phonopy.structure.cells.get_primitive_matrix_by_centring",
"phonopy.structure.symmetry.collect_unique_rotations",
"spglib.get_pointgroup",
"numpy.linalg.inv",
"phonopy.structure.brillouin_zone.get_qpoints_in_Brillouin_zone",
"phonopy.structure.symmetry.get_lattice_vector_equivalence",
"numpy.dot",
"numpy.zeros",
"phonopy.harmonic.force_constants.similarity_transformation",
"numpy.rint",
"numpy.array",
"numpy.eye",
"warnings.warn"
] |
[((3043, 3065), 'numpy.linalg.inv', 'np.linalg.inv', (['lattice'], {}), '(lattice)\n', (3056, 3065), True, 'import numpy as np\n'), ((3595, 3630), 'numpy.maximum', 'np.maximum', (['mesh_numbers', '[1, 1, 1]'], {}), '(mesh_numbers, [1, 1, 1])\n', (3605, 3630), True, 'import numpy as np\n'), ((4596, 4629), 'numpy.zeros_like', 'np.zeros_like', (['grid_mapping_table'], {}), '(grid_mapping_table)\n', (4609, 4629), True, 'import numpy as np\n'), ((4720, 4767), 'numpy.array', 'np.array', (['weights[ir_grid_points]'], {'dtype': '"""intc"""'}), "(weights[ir_grid_points], dtype='intc')\n", (4728, 4767), True, 'import numpy as np\n'), ((22423, 22459), 'numpy.linalg.inv', 'np.linalg.inv', (['transformation_matrix'], {}), '(transformation_matrix)\n', (22436, 22459), True, 'import numpy as np\n'), ((22473, 22508), 'phonopy.structure.symmetry.collect_unique_rotations', 'collect_unique_rotations', (['rotations'], {}), '(rotations)\n', (22497, 22508), False, 'from phonopy.structure.symmetry import get_lattice_vector_equivalence, get_pointgroup_operations, collect_unique_rotations\n'), ((22712, 22785), 'phonopy.structure.symmetry.get_pointgroup_operations', 'get_pointgroup_operations', (['unique_rots'], {'is_time_reversal': 'is_time_reversal'}), '(unique_rots, is_time_reversal=is_time_reversal)\n', (22737, 22785), False, 'from phonopy.structure.symmetry import get_lattice_vector_equivalence, get_pointgroup_operations, collect_unique_rotations\n'), ((22808, 22824), 'numpy.linalg.inv', 'np.linalg.inv', (['Q'], {}), '(Q)\n', (22821, 22824), True, 'import numpy as np\n'), ((23142, 23186), 'numpy.array', 'np.array', (['rec_ops_Q'], {'dtype': '"""intc"""', 'order': '"""C"""'}), "(rec_ops_Q, dtype='intc', order='C')\n", (23150, 23186), True, 'import numpy as np\n'), ((4489, 4518), 'numpy.unique', 'np.unique', (['grid_mapping_table'], {}), '(grid_mapping_table)\n', (4498, 4518), True, 'import numpy as np\n'), ((8191, 8227), 'numpy.array', 'np.array', (['mesh_numbers'], {'dtype': '"""intc"""'}), "(mesh_numbers, dtype='intc')\n", (8199, 8227), True, 'import numpy as np\n'), ((9358, 9473), 'warnings.warn', 'warnings.warn', (['"""GridPoints.get_grid_address is deprecated.Use grid_address attribute."""', 'DeprecationWarning'], {}), "(\n 'GridPoints.get_grid_address is deprecated.Use grid_address attribute.',\n DeprecationWarning)\n", (9371, 9473), False, 'import warnings\n'), ((9669, 9789), 'warnings.warn', 'warnings.warn', (['"""GridPoints.get_ir_grid_points is deprecated.Use ir_grid_points attribute."""', 'DeprecationWarning'], {}), "(\n 'GridPoints.get_ir_grid_points is deprecated.Use ir_grid_points attribute.'\n , DeprecationWarning)\n", (9682, 9789), False, 'import warnings\n'), ((9971, 10073), 'warnings.warn', 'warnings.warn', (['"""GridPoints.get_ir_qpoints is deprecated.Use points attribute."""', 'DeprecationWarning'], {}), "('GridPoints.get_ir_qpoints is deprecated.Use points attribute.',\n DeprecationWarning)\n", (9984, 10073), False, 'import warnings\n'), ((10259, 10372), 'warnings.warn', 'warnings.warn', (['"""GridPoints.get_ir_grid_weights is deprecated.Use weights attribute."""', 'DeprecationWarning'], {}), "(\n 'GridPoints.get_ir_grid_weights is deprecated.Use weights attribute.',\n DeprecationWarning)\n", (10272, 10372), False, 'import warnings\n'), ((10575, 10703), 'warnings.warn', 'warnings.warn', (['"""GridPoints.get_grid_mapping_table is deprecated.Use grid_mapping_table attribute."""', 'DeprecationWarning'], {}), "(\n 'GridPoints.get_grid_mapping_table is deprecated.Use grid_mapping_table attribute.'\n , DeprecationWarning)\n", (10588, 10703), False, 'import warnings\n'), ((12285, 12347), 'phonopy.structure.symmetry.get_lattice_vector_equivalence', 'get_lattice_vector_equivalence', (['[r.T for r in self._rotations]'], {}), '([r.T for r in self._rotations])\n', (12315, 12347), False, 'from phonopy.structure.symmetry import get_lattice_vector_equivalence, get_pointgroup_operations, collect_unique_rotations\n'), ((12482, 12544), 'phonopy.structure.brillouin_zone.get_qpoints_in_Brillouin_zone', 'get_qpoints_in_Brillouin_zone', (['self._rec_lat', 'self._ir_qpoints'], {}), '(self._rec_lat, self._ir_qpoints)\n', (12511, 12544), False, 'from phonopy.structure.brillouin_zone import get_qpoints_in_Brillouin_zone\n'), ((12626, 12703), 'numpy.array', 'np.array', (['[q_set[0] for q_set in qpoint_set_in_BZ]'], {'dtype': '"""double"""', 'order': '"""C"""'}), "([q_set[0] for q_set in qpoint_set_in_BZ], dtype='double', order='C')\n", (12634, 12703), True, 'import numpy as np\n'), ((12935, 13053), 'spglib.get_stabilized_reciprocal_mesh', 'get_stabilized_reciprocal_mesh', (['self._mesh', 'rotations'], {'is_shift': 'self._is_shift', 'is_time_reversal': 'is_time_reversal'}), '(self._mesh, rotations, is_shift=self.\n _is_shift, is_time_reversal=is_time_reversal)\n', (12965, 13053), False, 'from spglib import get_stabilized_reciprocal_mesh, relocate_BZ_grid_address, get_symmetry_dataset, get_pointgroup\n'), ((13635, 13739), 'numpy.array', 'np.array', (['((self._grid_address[self._ir_grid_points] + shift) / self._mesh)'], {'dtype': '"""double"""', 'order': '"""C"""'}), "((self._grid_address[self._ir_grid_points] + shift) / self._mesh,\n dtype='double', order='C')\n", (13643, 13739), True, 'import numpy as np\n'), ((18456, 18481), 'phonopy.structure.snf.SNF3x3', 'SNF3x3', (['self._grid_matrix'], {}), '(self._grid_matrix)\n', (18462, 18481), False, 'from phonopy.structure.snf import SNF3x3\n'), ((18781, 18823), 'phonopy.structure.cells.get_primitive_matrix_by_centring', 'get_primitive_matrix_by_centring', (['centring'], {}), '(centring)\n', (18813, 18823), False, 'from phonopy.structure.cells import get_primitive_matrix_by_centring, estimate_supercell_matrix, estimate_supercell_matrix_from_pointgroup, determinant\n'), ((19118, 19137), 'numpy.linalg.inv', 'np.linalg.inv', (['pmat'], {}), '(pmat)\n', (19131, 19137), True, 'import numpy as np\n'), ((19321, 19387), 'numpy.array', 'np.array', (['(inv_pmat_int * mesh_numbers).T'], {'dtype': '"""intc"""', 'order': '"""C"""'}), "((inv_pmat_int * mesh_numbers).T, dtype='intc', order='C')\n", (19329, 19387), True, 'import numpy as np\n'), ((19737, 19783), 'spglib.get_pointgroup', 'get_pointgroup', (["self._sym_dataset['rotations']"], {}), "(self._sym_dataset['rotations'])\n", (19751, 19783), False, 'from spglib import get_stabilized_reciprocal_mesh, relocate_BZ_grid_address, get_symmetry_dataset, get_pointgroup\n'), ((20015, 20091), 'phonopy.structure.cells.estimate_supercell_matrix_from_pointgroup', 'estimate_supercell_matrix_from_pointgroup', (['pointgroup[1]', 'lattice', 'num_cells'], {}), '(pointgroup[1], lattice, num_cells)\n', (20056, 20091), False, 'from phonopy.structure.cells import get_primitive_matrix_by_centring, estimate_supercell_matrix, estimate_supercell_matrix_from_pointgroup, determinant\n'), ((20293, 20329), 'numpy.eye', 'np.eye', (['(3)'], {'dtype': '"""double"""', 'order': '"""C"""'}), "(3, dtype='double', order='C')\n", (20299, 20329), True, 'import numpy as np\n'), ((20380, 20404), 'numpy.diagonal', 'np.diagonal', (['self._snf.D'], {}), '(self._snf.D)\n', (20391, 20404), True, 'import numpy as np\n'), ((20943, 20969), 'numpy.linalg.inv', 'np.linalg.inv', (['self._snf.D'], {}), '(self._snf.D)\n', (20956, 20969), True, 'import numpy as np\n'), ((21077, 21093), 'numpy.rint', 'np.rint', (['qpoints'], {}), '(qpoints)\n', (21084, 21093), True, 'import numpy as np\n'), ((22523, 22561), 'phonopy.harmonic.force_constants.similarity_transformation', 'similarity_transformation', (['tmat_inv', 'r'], {}), '(tmat_inv, r)\n', (22548, 22561), False, 'from phonopy.harmonic.force_constants import similarity_transformation\n'), ((22879, 22914), 'phonopy.harmonic.force_constants.similarity_transformation', 'similarity_transformation', (['Q_inv', 'r'], {}), '(Q_inv, r)\n', (22904, 22914), False, 'from phonopy.harmonic.force_constants import similarity_transformation\n'), ((22928, 22960), 'phonopy.harmonic.force_constants.similarity_transformation', 'similarity_transformation', (['D', '_r'], {}), '(D, _r)\n', (22953, 22960), False, 'from phonopy.harmonic.force_constants import similarity_transformation\n'), ((3108, 3142), 'numpy.dot', 'np.dot', (['rec_lattice.T', 'rec_lattice'], {}), '(rec_lattice.T, rec_lattice)\n', (3114, 3142), True, 'import numpy as np\n'), ((3164, 3197), 'numpy.rint', 'np.rint', (['(rec_lat_lengths * length)'], {}), '(rec_lat_lengths * length)\n', (3171, 3197), True, 'import numpy as np\n'), ((11496, 11523), 'numpy.zeros', 'np.zeros', (['(3)'], {'dtype': '"""double"""'}), "(3, dtype='double')\n", (11504, 11523), True, 'import numpy as np\n'), ((11558, 11596), 'numpy.array', 'np.array', (['q_mesh_shift'], {'dtype': '"""double"""'}), "(q_mesh_shift, dtype='double')\n", (11566, 11596), True, 'import numpy as np\n'), ((13115, 13153), 'numpy.array', 'np.array', (['self._is_shift'], {'dtype': '"""intc"""'}), "(self._is_shift, dtype='intc')\n", (13123, 13153), True, 'import numpy as np\n'), ((13219, 13314), 'spglib.relocate_BZ_grid_address', 'relocate_BZ_grid_address', (['grid_address', 'self._mesh', 'self._rec_lat'], {'is_shift': 'self._is_shift'}), '(grid_address, self._mesh, self._rec_lat, is_shift=\n self._is_shift)\n', (13243, 13314), False, 'from spglib import get_stabilized_reciprocal_mesh, relocate_BZ_grid_address, get_symmetry_dataset, get_pointgroup\n'), ((19900, 19931), 'numpy.dot', 'np.dot', (['input_cell.cell.T', 'tmat'], {}), '(input_cell.cell.T, tmat)\n', (19906, 19931), True, 'import numpy as np\n'), ((11635, 11653), 'numpy.rint', 'np.rint', (['(shift * 2)'], {}), '(shift * 2)\n', (11642, 11653), True, 'import numpy as np\n'), ((12376, 12413), 'numpy.extract', 'np.extract', (['lattice_equiv', 'mesh_equiv'], {}), '(lattice_equiv, mesh_equiv)\n', (12386, 12413), True, 'import numpy as np\n'), ((18850, 18869), 'numpy.linalg.inv', 'np.linalg.inv', (['tmat'], {}), '(tmat)\n', (18863, 18869), True, 'import numpy as np\n'), ((19161, 19178), 'numpy.rint', 'np.rint', (['inv_pmat'], {}), '(inv_pmat)\n', (19168, 19178), True, 'import numpy as np\n'), ((19536, 19555), 'numpy.linalg.inv', 'np.linalg.inv', (['tmat'], {}), '(tmat)\n', (19549, 19555), True, 'import numpy as np\n'), ((20195, 20226), 'numpy.multiply', 'np.multiply', (['tmat', 'mesh_numbers'], {}), '(tmat, mesh_numbers)\n', (20206, 20226), True, 'import numpy as np\n'), ((21028, 21054), 'numpy.dot', 'np.dot', (['self._snf.Q', 'D_inv'], {}), '(self._snf.Q, D_inv)\n', (21034, 21054), True, 'import numpy as np\n'), ((22579, 22590), 'numpy.rint', 'np.rint', (['_r'], {}), '(_r)\n', (22586, 22590), True, 'import numpy as np\n'), ((22978, 22989), 'numpy.rint', 'np.rint', (['_r'], {}), '(_r)\n', (22985, 22989), True, 'import numpy as np\n'), ((23071, 23090), 'phonopy.structure.cells.determinant', 'determinant', (['_r_int'], {}), '(_r_int)\n', (23082, 23090), False, 'from phonopy.structure.cells import get_primitive_matrix_by_centring, estimate_supercell_matrix, estimate_supercell_matrix_from_pointgroup, determinant\n'), ((3322, 3341), 'numpy.array', 'np.array', (['rotations'], {}), '(rotations)\n', (3330, 3341), True, 'import numpy as np\n'), ((11050, 11073), 'numpy.eye', 'np.eye', (['(3)'], {'dtype': '"""intc"""'}), "(3, dtype='intc')\n", (11056, 11073), True, 'import numpy as np\n'), ((11747, 11761), 'numpy.rint', 'np.rint', (['shift'], {}), '(shift)\n', (11754, 11761), True, 'import numpy as np\n'), ((13422, 13441), 'numpy.prod', 'np.prod', (['self._mesh'], {}), '(self._mesh)\n', (13429, 13441), True, 'import numpy as np\n'), ((19207, 19238), 'numpy.abs', 'np.abs', (['(inv_pmat - inv_pmat_int)'], {}), '(inv_pmat - inv_pmat_int)\n', (19213, 19238), True, 'import numpy as np\n'), ((22619, 22638), 'numpy.abs', 'np.abs', (['(_r - _r_int)'], {}), '(_r - _r_int)\n', (22625, 22638), True, 'import numpy as np\n'), ((23018, 23037), 'numpy.abs', 'np.abs', (['(_r - _r_int)'], {}), '(_r - _r_int)\n', (23024, 23037), True, 'import numpy as np\n'), ((11907, 11954), 'numpy.logical_xor', 'np.logical_xor', (['(diff > 0.1)', '(self._mesh % 2 == 0)'], {}), '(diff > 0.1, self._mesh % 2 == 0)\n', (11921, 11954), True, 'import numpy as np\n')]
|
import os
import re
import torch
from torchtext.data import Field, TabularDataset
from torchtext.data.iterator import BucketIterator
from torchtext.vocab import Vectors
from datasets.reuters import process_labels
from datasets.ag_news import process_labels
def clean_string(string):
"""
Performs tokenization and string cleaning for the YelpReviewPolarity dataset
"""
string = re.sub(r"\\n", " ", string)
# string = re.sub(r"[^A-Za-z0-9(),!?\'`]", " ", string)
string = re.sub(r"\s{2,}", " ", string)
return string.lower().strip().split()
class YelpReviewPolarity(TabularDataset):
NAME = 'YelpReviewPolarity'
NUM_CLASSES = 2
IS_MULTILABEL = False
TEXT_FIELD = Field(batch_first=True, tokenize=clean_string, include_lengths=True)
LABEL_FIELD = Field(sequential=False, use_vocab=False, batch_first=True,
preprocessing=lambda s: process_labels(s, YelpReviewPolarity.NUM_CLASSES))
@staticmethod
def sort_key(ex):
return len(ex.text)
@classmethod
def splits(cls, path, train=os.path.join('.local_data', 'YelpReviewPolarity', 'train.csv'),
test=os.path.join('.local_data', 'YelpReviewPolarity', 'test.csv'), **kwargs):
return super(YelpReviewPolarity, cls).splits(
path, train=train, test=test, format='csv', fields=[('label', cls.LABEL_FIELD), ('text', cls.TEXT_FIELD)]
)
@classmethod
def iters(cls, path, vectors_name, vectors_cache, batch_size=64, shuffle=True, device=0, vectors=None,
unk_init=torch.Tensor.zero_):
"""
:param path: directory containing train, test, dev files
:param vectors_name: name of word vectors file
:param vectors_cache: path to directory containing word vectors file
:param batch_size: batch size
:param device: GPU device
:param vectors: custom vectors - either predefined torchtext vectors or your own custom Vector classes
:param unk_init: function used to generate vector for OOV words
:return:
"""
if vectors is None:
vectors = Vectors(name=vectors_name, cache=vectors_cache, unk_init=unk_init)
train, test = cls.splits(path)
cls.TEXT_FIELD.build_vocab(train, test, vectors=vectors)
return BucketIterator.splits((train, test), batch_size=batch_size, repeat=False, shuffle=shuffle,
sort_within_batch=True, device=device)
|
[
"os.path.join",
"datasets.ag_news.process_labels",
"torchtext.data.iterator.BucketIterator.splits",
"torchtext.vocab.Vectors",
"re.sub",
"torchtext.data.Field"
] |
[((397, 425), 're.sub', 're.sub', (['"""\\\\\\\\n"""', '""" """', 'string'], {}), "('\\\\\\\\n', ' ', string)\n", (403, 425), False, 'import re\n'), ((498, 528), 're.sub', 're.sub', (['"""\\\\s{2,}"""', '""" """', 'string'], {}), "('\\\\s{2,}', ' ', string)\n", (504, 528), False, 'import re\n'), ((710, 778), 'torchtext.data.Field', 'Field', ([], {'batch_first': '(True)', 'tokenize': 'clean_string', 'include_lengths': '(True)'}), '(batch_first=True, tokenize=clean_string, include_lengths=True)\n', (715, 778), False, 'from torchtext.data import Field, TabularDataset\n'), ((1074, 1136), 'os.path.join', 'os.path.join', (['""".local_data"""', '"""YelpReviewPolarity"""', '"""train.csv"""'], {}), "('.local_data', 'YelpReviewPolarity', 'train.csv')\n", (1086, 1136), False, 'import os\n'), ((1159, 1220), 'os.path.join', 'os.path.join', (['""".local_data"""', '"""YelpReviewPolarity"""', '"""test.csv"""'], {}), "('.local_data', 'YelpReviewPolarity', 'test.csv')\n", (1171, 1220), False, 'import os\n'), ((2315, 2448), 'torchtext.data.iterator.BucketIterator.splits', 'BucketIterator.splits', (['(train, test)'], {'batch_size': 'batch_size', 'repeat': '(False)', 'shuffle': 'shuffle', 'sort_within_batch': '(True)', 'device': 'device'}), '((train, test), batch_size=batch_size, repeat=False,\n shuffle=shuffle, sort_within_batch=True, device=device)\n', (2336, 2448), False, 'from torchtext.data.iterator import BucketIterator\n'), ((2128, 2194), 'torchtext.vocab.Vectors', 'Vectors', ([], {'name': 'vectors_name', 'cache': 'vectors_cache', 'unk_init': 'unk_init'}), '(name=vectors_name, cache=vectors_cache, unk_init=unk_init)\n', (2135, 2194), False, 'from torchtext.vocab import Vectors\n'), ((904, 953), 'datasets.ag_news.process_labels', 'process_labels', (['s', 'YelpReviewPolarity.NUM_CLASSES'], {}), '(s, YelpReviewPolarity.NUM_CLASSES)\n', (918, 953), False, 'from datasets.ag_news import process_labels\n')]
|
from flask import Blueprint
from marshmallow import pprint
from api.utils import catch_exception
from api.resources import LIMIT_ROWS
from api.database import get_session
import api.models.post
post_bp = Blueprint('post', __name__, url_prefix='/post')
|
[
"flask.Blueprint"
] |
[((206, 253), 'flask.Blueprint', 'Blueprint', (['"""post"""', '__name__'], {'url_prefix': '"""/post"""'}), "('post', __name__, url_prefix='/post')\n", (215, 253), False, 'from flask import Blueprint\n')]
|
import streamlit as st
def draw_tot_cases_graph(df):
tot_cases_by_day = df.groupby("date")["cases"].sum()
st.write("Total cases(US):")
st.line_chart(tot_cases_by_day)
def draw_daily_cases_graph(df):
cases_by_day = df.groupby("date")["cases"].sum().reset_index(name="cases")
shifted = cases_by_day["cases"].shift(1)
cases_by_day["daily_cases"] = cases_by_day["cases"] - shifted
cases_by_day.drop(columns=["cases"], axis=1, inplace=True)
cases_by_day.set_index("date", inplace=True)
st.write("Daily cases(US):")
st.bar_chart(cases_by_day)
def draw_tot_deaths_graph(df):
tot_cases_by_day = df.groupby("date")["deaths"].sum()
st.write("Total deaths(US):")
st.line_chart(tot_cases_by_day)
def draw_daily_deaths_graph(df):
cases_by_day = df.groupby("date")["deaths"].sum().reset_index(name="deaths")
shifted = cases_by_day["deaths"].shift(1)
cases_by_day["daily_deaths"] = cases_by_day["deaths"] - shifted
cases_by_day.drop(columns=["deaths"], axis=1, inplace=True)
cases_by_day.set_index("date", inplace=True)
st.write("Daily deaths(US):")
st.bar_chart(cases_by_day)
def draw_county_state_cases_graph(df, co, state):
county_state_cases_by_day = df.groupby("date")["cases"].sum()
st.write(f"Total cases({co}, {state}):")
st.line_chart(county_state_cases_by_day)
def draw_daily_county_state_cases_graph(df, co, state):
cases_by_day = df.groupby("date")["cases"].sum().reset_index(name="cases")
shifted = cases_by_day["cases"].shift(1)
cases_by_day["daily_cases"] = cases_by_day["cases"] - shifted
cases_by_day.loc[cases_by_day.daily_cases < 0, "daily_cases"] = 0
cases_by_day.drop(columns=["cases"], axis=1, inplace=True)
cases_by_day.set_index("date", inplace=True)
st.write(f"Daily cases({co}, {state}):")
st.bar_chart(cases_by_day)
def draw_county_state_deaths_graph(df, co, state):
county_state_deaths_by_day = df.groupby("date")["deaths"].sum()
st.write(f"Total deaths({co}, {state}):")
st.line_chart(county_state_deaths_by_day)
def draw_daily_county_state_deaths_graph(df, co, state):
cases_by_day = df.groupby("date")["deaths"].sum().reset_index(name="deaths")
shifted = cases_by_day["deaths"].shift(1)
cases_by_day["daily_deaths"] = cases_by_day["deaths"] - shifted
cases_by_day.loc[cases_by_day.daily_deaths < 0, "daily_deaths"] = 0
cases_by_day.drop(columns=["deaths"], axis=1, inplace=True)
cases_by_day.set_index("date", inplace=True)
st.write(f"Daily deaths({co}, {state}):")
st.bar_chart(cases_by_day)
|
[
"streamlit.line_chart",
"streamlit.write",
"streamlit.bar_chart"
] |
[((116, 144), 'streamlit.write', 'st.write', (['"""Total cases(US):"""'], {}), "('Total cases(US):')\n", (124, 144), True, 'import streamlit as st\n'), ((149, 180), 'streamlit.line_chart', 'st.line_chart', (['tot_cases_by_day'], {}), '(tot_cases_by_day)\n', (162, 180), True, 'import streamlit as st\n'), ((521, 549), 'streamlit.write', 'st.write', (['"""Daily cases(US):"""'], {}), "('Daily cases(US):')\n", (529, 549), True, 'import streamlit as st\n'), ((554, 580), 'streamlit.bar_chart', 'st.bar_chart', (['cases_by_day'], {}), '(cases_by_day)\n', (566, 580), True, 'import streamlit as st\n'), ((676, 705), 'streamlit.write', 'st.write', (['"""Total deaths(US):"""'], {}), "('Total deaths(US):')\n", (684, 705), True, 'import streamlit as st\n'), ((710, 741), 'streamlit.line_chart', 'st.line_chart', (['tot_cases_by_day'], {}), '(tot_cases_by_day)\n', (723, 741), True, 'import streamlit as st\n'), ((1089, 1118), 'streamlit.write', 'st.write', (['"""Daily deaths(US):"""'], {}), "('Daily deaths(US):')\n", (1097, 1118), True, 'import streamlit as st\n'), ((1123, 1149), 'streamlit.bar_chart', 'st.bar_chart', (['cases_by_day'], {}), '(cases_by_day)\n', (1135, 1149), True, 'import streamlit as st\n'), ((1272, 1312), 'streamlit.write', 'st.write', (['f"""Total cases({co}, {state}):"""'], {}), "(f'Total cases({co}, {state}):')\n", (1280, 1312), True, 'import streamlit as st\n'), ((1317, 1357), 'streamlit.line_chart', 'st.line_chart', (['county_state_cases_by_day'], {}), '(county_state_cases_by_day)\n', (1330, 1357), True, 'import streamlit as st\n'), ((1792, 1832), 'streamlit.write', 'st.write', (['f"""Daily cases({co}, {state}):"""'], {}), "(f'Daily cases({co}, {state}):')\n", (1800, 1832), True, 'import streamlit as st\n'), ((1837, 1863), 'streamlit.bar_chart', 'st.bar_chart', (['cases_by_day'], {}), '(cases_by_day)\n', (1849, 1863), True, 'import streamlit as st\n'), ((1989, 2030), 'streamlit.write', 'st.write', (['f"""Total deaths({co}, {state}):"""'], {}), "(f'Total deaths({co}, {state}):')\n", (1997, 2030), True, 'import streamlit as st\n'), ((2035, 2076), 'streamlit.line_chart', 'st.line_chart', (['county_state_deaths_by_day'], {}), '(county_state_deaths_by_day)\n', (2048, 2076), True, 'import streamlit as st\n'), ((2520, 2561), 'streamlit.write', 'st.write', (['f"""Daily deaths({co}, {state}):"""'], {}), "(f'Daily deaths({co}, {state}):')\n", (2528, 2561), True, 'import streamlit as st\n'), ((2566, 2592), 'streamlit.bar_chart', 'st.bar_chart', (['cases_by_day'], {}), '(cases_by_day)\n', (2578, 2592), True, 'import streamlit as st\n')]
|
from setuptools import setup
with open("README.md", "r") as f:
long_description = f.read()
setup(
name='JSONifier',
version='0.0.7',
description="A package for reading, writing, and editing of JSON files, Python dictionaries, and such.",
py_modules=["jsonify"],
package_dir={"": "src"},
long_description=long_description,
long_description_content_type="text/markdown",
author="minecraftpr03",
url="https://github.com/MasterCoder21/JSONifier",
author_email=None,
classifiers=[
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent"
]
)
|
[
"setuptools.setup"
] |
[((95, 809), 'setuptools.setup', 'setup', ([], {'name': '"""JSONifier"""', 'version': '"""0.0.7"""', 'description': '"""A package for reading, writing, and editing of JSON files, Python dictionaries, and such."""', 'py_modules': "['jsonify']", 'package_dir': "{'': 'src'}", 'long_description': 'long_description', 'long_description_content_type': '"""text/markdown"""', 'author': '"""minecraftpr03"""', 'url': '"""https://github.com/MasterCoder21/JSONifier"""', 'author_email': 'None', 'classifiers': "['Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent']"}), "(name='JSONifier', version='0.0.7', description=\n 'A package for reading, writing, and editing of JSON files, Python dictionaries, and such.'\n , py_modules=['jsonify'], package_dir={'': 'src'}, long_description=\n long_description, long_description_content_type='text/markdown', author\n ='minecraftpr03', url='https://github.com/MasterCoder21/JSONifier',\n author_email=None, classifiers=['Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent'])\n", (100, 809), False, 'from setuptools import setup\n')]
|
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
# pylint: disable=import-error,no-name-in-module
"""Test object relationships in the database."""
import warnings
from sqlalchemy import exc as sa_exc
import aiida
from aiida.backends.sqlalchemy.models.node import DbNode
from aiida.backends.sqlalchemy.models.user import DbUser
from aiida.backends.testbase import AiidaTestCase
from aiida.common.links import LinkType
from aiida.common.utils import get_new_uuid
from aiida.orm import CalculationNode, Data
class TestRelationshipsSQLA(AiidaTestCase):
"""Class of tests concerning the schema and the correct
implementation of relationships within the AiiDA ORM
The genereal naming convention is the following:
1)tests on one-to-many relationships: test_<Parent>_<child> (Parent class is capitalized).
2)tests on many-to-many relationships: test_<peer>_<peer> (none is
capitalized)."""
def test_outputs_children_relationship(self):
"""This test checks that the outputs_q, children_q relationship and the
corresponding properties work as expected."""
n_1 = Data().store()
n_2 = CalculationNode()
n_3 = Data().store()
# Create a link between these 2 nodes
n_2.add_incoming(n_1, link_type=LinkType.INPUT_CALC, link_label='N1')
n_2.store()
n_3.add_incoming(n_2, link_type=LinkType.CREATE, link_label='N2')
# Check that the result of outputs is a list
self.assertIsInstance(n_1.backend_entity.dbmodel.outputs, list, 'This is expected to be a list')
# Check that the result of outputs_q is a query
from sqlalchemy.orm.dynamic import AppenderQuery
self.assertIsInstance(
n_1.backend_entity.dbmodel.outputs_q, AppenderQuery, 'This is expected to be an AppenderQuery'
)
# Check that the result of outputs is correct
out = {_.pk for _ in n_1.backend_entity.dbmodel.outputs}
self.assertEqual(out, set([n_2.pk]))
def test_inputs_parents_relationship(self):
"""This test checks that the inputs_q, parents_q relationship and the
corresponding properties work as expected."""
n_1 = Data().store()
n_2 = CalculationNode()
n_3 = Data().store()
# Create a link between these 2 nodes
n_2.add_incoming(n_1, link_type=LinkType.INPUT_CALC, link_label='N1')
n_2.store()
n_3.add_incoming(n_2, link_type=LinkType.CREATE, link_label='N2')
# Check that the result of outputs is a list
self.assertIsInstance(n_1.backend_entity.dbmodel.inputs, list, 'This is expected to be a list')
# Check that the result of outputs_q is a query
from sqlalchemy.orm.dynamic import AppenderQuery
self.assertIsInstance(
n_1.backend_entity.dbmodel.inputs_q, AppenderQuery, 'This is expected to be an AppenderQuery'
)
# Check that the result of inputs is correct
out = {_.pk for _ in n_3.backend_entity.dbmodel.inputs}
self.assertEqual(out, set([n_2.pk]))
def test_user_node_1(self):
"""Test that when a user and a node having that user are created,
storing NODE induces storage of the USER
Assert the correct storage of user and node."""
# Create user
dbu1 = DbUser('<EMAIL>', 'spam', 'eggs', 'monty')
# Creat node
node_dict = dict(user=dbu1)
dbn_1 = DbNode(**node_dict)
# Check that the two are neither flushed nor committed
self.assertIsNone(dbu1.id)
self.assertIsNone(dbn_1.id)
session = aiida.backends.sqlalchemy.get_scoped_session()
# Add only the node and commit
session.add(dbn_1)
session.commit()
# Check that a pk has been assigned, which means that things have
# been flushed into the database
self.assertIsNotNone(dbn_1.id)
self.assertIsNotNone(dbu1.id)
def test_user_node_2(self):
"""Test that when a user and a node having that user are created,
storing USER does NOT induce storage of the NODE
Assert the correct storage of user and node."""
# Create user
dbu1 = DbUser('tests2<EMAIL>', 'spam', 'eggs', 'monty')
# Creat node
node_dict = dict(user=dbu1)
dbn_1 = DbNode(**node_dict)
# Check that the two are neither flushed nor committed
self.assertIsNone(dbu1.id)
self.assertIsNone(dbn_1.id)
session = aiida.backends.sqlalchemy.get_scoped_session()
# Catch all the SQLAlchemy warnings generated by the following code
with warnings.catch_warnings(): # pylint: disable=no-member
warnings.simplefilter('ignore', category=sa_exc.SAWarning) # pylint: disable=no-member
# Add only the user and commit
session.add(dbu1)
session.commit()
# Check that a pk has been assigned (or not), which means that things
# have been flushed into the database
self.assertIsNotNone(dbu1.id)
self.assertIsNone(dbn_1.id)
def test_user_node_3(self):
"""Test that when a user and two nodes having that user are created,
storing only ONE NODE induces storage of that node, of the user but
not of the other node
Assert the correct storage of the user and node. Assert the
non-storage of the other node."""
# Create user
dbu1 = DbUser('tests3@schema', 'spam', 'eggs', 'monty')
# Creat node
node_dict = dict(user=dbu1)
dbn_1 = DbNode(**node_dict)
dbn_2 = DbNode(**node_dict)
# Check that the two are neither flushed nor committed
self.assertIsNone(dbu1.id)
self.assertIsNone(dbn_1.id)
self.assertIsNone(dbn_2.id)
session = aiida.backends.sqlalchemy.get_scoped_session()
# Add only first node and commit
session.add(dbn_1)
with warnings.catch_warnings():
# suppress known SAWarning that we have not added dbn_2
warnings.simplefilter('ignore', category=sa_exc.SAWarning)
session.commit()
# Check for which object a pk has been assigned, which means that
# things have been at least flushed into the database
self.assertIsNotNone(dbu1.id)
self.assertIsNotNone(dbn_1.id)
self.assertIsNone(dbn_2.id)
def test_user_node_4(self):
"""Test that when several nodes are created with the same user and each
of them is assigned to the same name, storage of last node object
associated to that node does not trigger storage of all objects.
Assert the correct storage of the user and node. Assert the
non-storage of the other nodes."""
# Create user
dbu1 = DbUser('tests4@schema', 'spam', 'eggs', 'monty')
# Creat node objects assigningd them to the same name
# Check https://docs.python.org/2/tutorial/classes.html subsec. 9.1
for _ in range(5):
# It is important to change the uuid each time (or any other
# variable) so that a different objects (with a different pointer)
# is actually created in this scope.
dbn_1 = DbNode(user=dbu1, uuid=get_new_uuid())
# Check that the two are neither flushed nor committed
self.assertIsNone(dbu1.id)
self.assertIsNone(dbn_1.id)
session = aiida.backends.sqlalchemy.get_scoped_session()
# Add only first node and commit
session.add(dbn_1)
with warnings.catch_warnings():
# suppress known SAWarning that we have not add the other nodes
warnings.simplefilter('ignore', category=sa_exc.SAWarning)
session.commit()
# Check for which object a pk has been assigned, which means that
# things have been at least flushed into the database
self.assertIsNotNone(dbu1.id)
self.assertIsNotNone(dbn_1.id)
|
[
"aiida.backends.sqlalchemy.models.user.DbUser",
"warnings.simplefilter",
"aiida.common.utils.get_new_uuid",
"aiida.orm.CalculationNode",
"aiida.backends.sqlalchemy.models.node.DbNode",
"warnings.catch_warnings",
"aiida.backends.sqlalchemy.get_scoped_session",
"aiida.orm.Data"
] |
[((1722, 1739), 'aiida.orm.CalculationNode', 'CalculationNode', ([], {}), '()\n', (1737, 1739), False, 'from aiida.orm import CalculationNode, Data\n'), ((2798, 2815), 'aiida.orm.CalculationNode', 'CalculationNode', ([], {}), '()\n', (2813, 2815), False, 'from aiida.orm import CalculationNode, Data\n'), ((3897, 3939), 'aiida.backends.sqlalchemy.models.user.DbUser', 'DbUser', (['"""<EMAIL>"""', '"""spam"""', '"""eggs"""', '"""monty"""'], {}), "('<EMAIL>', 'spam', 'eggs', 'monty')\n", (3903, 3939), False, 'from aiida.backends.sqlalchemy.models.user import DbUser\n'), ((4014, 4033), 'aiida.backends.sqlalchemy.models.node.DbNode', 'DbNode', ([], {}), '(**node_dict)\n', (4020, 4033), False, 'from aiida.backends.sqlalchemy.models.node import DbNode\n'), ((4188, 4234), 'aiida.backends.sqlalchemy.get_scoped_session', 'aiida.backends.sqlalchemy.get_scoped_session', ([], {}), '()\n', (4232, 4234), False, 'import aiida\n'), ((4777, 4825), 'aiida.backends.sqlalchemy.models.user.DbUser', 'DbUser', (['"""tests2<EMAIL>"""', '"""spam"""', '"""eggs"""', '"""monty"""'], {}), "('tests2<EMAIL>', 'spam', 'eggs', 'monty')\n", (4783, 4825), False, 'from aiida.backends.sqlalchemy.models.user import DbUser\n'), ((4900, 4919), 'aiida.backends.sqlalchemy.models.node.DbNode', 'DbNode', ([], {}), '(**node_dict)\n', (4906, 4919), False, 'from aiida.backends.sqlalchemy.models.node import DbNode\n'), ((5074, 5120), 'aiida.backends.sqlalchemy.get_scoped_session', 'aiida.backends.sqlalchemy.get_scoped_session', ([], {}), '()\n', (5118, 5120), False, 'import aiida\n'), ((6033, 6081), 'aiida.backends.sqlalchemy.models.user.DbUser', 'DbUser', (['"""tests3@schema"""', '"""spam"""', '"""eggs"""', '"""monty"""'], {}), "('tests3@schema', 'spam', 'eggs', 'monty')\n", (6039, 6081), False, 'from aiida.backends.sqlalchemy.models.user import DbUser\n'), ((6156, 6175), 'aiida.backends.sqlalchemy.models.node.DbNode', 'DbNode', ([], {}), '(**node_dict)\n', (6162, 6175), False, 'from aiida.backends.sqlalchemy.models.node import DbNode\n'), ((6192, 6211), 'aiida.backends.sqlalchemy.models.node.DbNode', 'DbNode', ([], {}), '(**node_dict)\n', (6198, 6211), False, 'from aiida.backends.sqlalchemy.models.node import DbNode\n'), ((6402, 6448), 'aiida.backends.sqlalchemy.get_scoped_session', 'aiida.backends.sqlalchemy.get_scoped_session', ([], {}), '()\n', (6446, 6448), False, 'import aiida\n'), ((7386, 7434), 'aiida.backends.sqlalchemy.models.user.DbUser', 'DbUser', (['"""tests4@schema"""', '"""spam"""', '"""eggs"""', '"""monty"""'], {}), "('tests4@schema', 'spam', 'eggs', 'monty')\n", (7392, 7434), False, 'from aiida.backends.sqlalchemy.models.user import DbUser\n'), ((8016, 8062), 'aiida.backends.sqlalchemy.get_scoped_session', 'aiida.backends.sqlalchemy.get_scoped_session', ([], {}), '()\n', (8060, 8062), False, 'import aiida\n'), ((5211, 5236), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (5234, 5236), False, 'import warnings\n'), ((5279, 5337), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {'category': 'sa_exc.SAWarning'}), "('ignore', category=sa_exc.SAWarning)\n", (5300, 5337), False, 'import warnings\n'), ((6531, 6556), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (6554, 6556), False, 'import warnings\n'), ((6638, 6696), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {'category': 'sa_exc.SAWarning'}), "('ignore', category=sa_exc.SAWarning)\n", (6659, 6696), False, 'import warnings\n'), ((8145, 8170), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (8168, 8170), False, 'import warnings\n'), ((8260, 8318), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {'category': 'sa_exc.SAWarning'}), "('ignore', category=sa_exc.SAWarning)\n", (8281, 8318), False, 'import warnings\n'), ((1693, 1699), 'aiida.orm.Data', 'Data', ([], {}), '()\n', (1697, 1699), False, 'from aiida.orm import CalculationNode, Data\n'), ((1754, 1760), 'aiida.orm.Data', 'Data', ([], {}), '()\n', (1758, 1760), False, 'from aiida.orm import CalculationNode, Data\n'), ((2769, 2775), 'aiida.orm.Data', 'Data', ([], {}), '()\n', (2773, 2775), False, 'from aiida.orm import CalculationNode, Data\n'), ((2830, 2836), 'aiida.orm.Data', 'Data', ([], {}), '()\n', (2834, 2836), False, 'from aiida.orm import CalculationNode, Data\n'), ((7846, 7860), 'aiida.common.utils.get_new_uuid', 'get_new_uuid', ([], {}), '()\n', (7858, 7860), False, 'from aiida.common.utils import get_new_uuid\n')]
|
import asyncio
import json
import logging
import websockets
from django.contrib.auth import get_user_model
from . import models, router
from .utils import get_user_from_session, get_dialogs_with_user
logger = logging.getLogger('django-private-dialog')
ws_connections = {}
@asyncio.coroutine
def target_message(conn, payload):
"""
Distibuted payload (message) to one connection
:param conn: connection
:param payload: payload(json dumpable)
:return:
"""
try:
yield from conn.send(json.dumps(payload))
except Exception as e:
logger.debug('could not send', e)
@asyncio.coroutine
def fanout_message(connections, payload):
"""
Distributes payload (message) to all connected ws clients
"""
for conn in connections:
try:
yield from conn.send(json.dumps(payload))
except Exception as e:
logger.debug('could not send', e)
@asyncio.coroutine
def gone_online(stream):
"""
Distributes the users online status to everyone he has dialog with
"""
while True:
packet = yield from stream.get()
session_id = packet.get('session_key')
if session_id:
user_owner = get_user_from_session(session_id)
if user_owner:
logger.debug('User ' + user_owner.username + ' gone online')
# find all connections including user_owner as opponent,
# send them a message that the user has gone online
online_opponents = list(
filter(lambda x: x[1] == user_owner.username, ws_connections))
online_opponents_sockets = [
ws_connections[i] for i in online_opponents]
yield from fanout_message(online_opponents_sockets,
{'type': 'gone-online', 'usernames': [user_owner.username]})
else:
pass # invalid session id
else:
pass # no session id
@asyncio.coroutine
def check_online(stream):
"""
Used to check user's online opponents and show their online/offline status on page on init
"""
while True:
packet = yield from stream.get()
session_id = packet.get('session_key')
opponent_username = packet.get('username')
if session_id and opponent_username:
user_owner = get_user_from_session(session_id)
if user_owner:
# Find all connections including user_owner as opponent
online_opponents = list(
filter(lambda x: x[1] == user_owner.username, ws_connections))
logger.debug('User ' + user_owner.username + ' has ' +
str(len(online_opponents)) + ' opponents online')
# Send user online statuses of his opponents
socket = ws_connections.get(
(user_owner.username, opponent_username))
if socket:
online_opponents_usernames = [i[0]
for i in online_opponents]
yield from target_message(socket,
{'type': 'gone-online', 'usernames': online_opponents_usernames})
else:
pass # socket for the pair user_owner.username, opponent_username not found
# this can be in case the user has already gone offline
else:
pass # invalid session id
else:
pass # no session id or opponent username
@asyncio.coroutine
def gone_offline(stream):
"""
Distributes the users online status to everyone he has dialog with
"""
while True:
packet = yield from stream.get()
session_id = packet.get('session_key')
if session_id:
user_owner = get_user_from_session(session_id)
if user_owner:
logger.debug('User ' + user_owner.username + ' gone offline')
# find all connections including user_owner as opponent,
# send them a message that the user has gone offline
online_opponents = list(
filter(lambda x: x[1] == user_owner.username, ws_connections))
online_opponents_sockets = [
ws_connections[i] for i in online_opponents]
yield from fanout_message(online_opponents_sockets,
{'type': 'gone-offline', 'username': user_owner.username})
else:
pass # invalid session id
else:
pass # no session id
@asyncio.coroutine
def new_messages_handler(stream):
"""
Saves a new chat message to db and distributes msg to connected users
"""
# TODO: handle no user found exception
while True:
packet = yield from stream.get()
session_id = packet.get('session_key')
msg = packet.get('message')
username_opponent = packet.get('username')
if session_id and msg and username_opponent:
user_owner = get_user_from_session(session_id)
if user_owner:
user_opponent = get_user_model().objects.get(username=username_opponent)
dialog = get_dialogs_with_user(user_owner, user_opponent)
if len(dialog) > 0:
# Save the message
msg = models.Message.objects.create(
dialog=dialog[0],
sender=user_owner,
text=packet['message'],
read=False
)
packet['created'] = msg.get_formatted_create_datetime()
packet['sender_name'] = msg.sender.username
packet['message_id'] = msg.id
# Send the message
connections = []
# Find socket of the user which sent the message
if (user_owner.username, user_opponent.username) in ws_connections:
connections.append(
ws_connections[(user_owner.username, user_opponent.username)])
# Find socket of the opponent
if (user_opponent.username, user_owner.username) in ws_connections:
connections.append(
ws_connections[(user_opponent.username, user_owner.username)])
else:
# Find sockets of people who the opponent is talking with
opponent_connections = list(
filter(lambda x: x[0] == user_opponent.username, ws_connections))
opponent_connections_sockets = [
ws_connections[i] for i in opponent_connections]
connections.extend(opponent_connections_sockets)
yield from fanout_message(connections, packet)
else:
pass # no dialog found
else:
pass # no user_owner
else:
pass # missing one of params
@asyncio.coroutine
def users_changed_handler(stream):
"""
Sends connected client list of currently active users in the chatroom
"""
while True:
yield from stream.get()
# Get list list of current active users
users = [
{'username': username, 'uuid': uuid_str}
for username, uuid_str in ws_connections.values()
]
# Make packet with list of new users (sorted by username)
packet = {
'type': 'users-changed',
'value': sorted(users, key=lambda i: i['username'])
}
logger.debug(packet)
yield from fanout_message(ws_connections.keys(), packet)
@asyncio.coroutine
def is_typing_handler(stream):
"""
Show message to opponent if user is typing message
"""
while True:
packet = yield from stream.get()
session_id = packet.get('session_key')
user_opponent = packet.get('username')
typing = packet.get('typing')
if session_id and user_opponent and typing is not None:
user_owner = get_user_from_session(session_id)
if user_owner:
opponent_socket = ws_connections.get(
(user_opponent, user_owner.username))
if typing and opponent_socket:
yield from target_message(opponent_socket,
{'type': 'opponent-typing', 'username': user_opponent})
else:
pass # invalid session id
else:
pass # no session id or user_opponent or typing
@asyncio.coroutine
def read_message_handler(stream):
"""
Send message to user if the opponent has read the message
"""
while True:
packet = yield from stream.get()
session_id = packet.get('session_key')
user_opponent = packet.get('username')
message_id = packet.get('message_id')
if session_id and user_opponent and message_id is not None:
user_owner = get_user_from_session(session_id)
if user_owner:
message = models.Message.objects.filter(id=message_id).first()
if message:
message.read = True
message.save()
logger.debug('Message ' + str(message_id) + ' is now read')
opponent_socket = ws_connections.get(
(user_opponent, user_owner.username))
if opponent_socket:
yield from target_message(opponent_socket,
{'type': 'opponent-read-message',
'username': user_opponent, 'message_id': message_id})
else:
pass # message not found
else:
pass # invalid session id
else:
pass # no session id or user_opponent or typing
@asyncio.coroutine
def main_handler(websocket, path):
"""
An Asyncio Task is created for every new websocket client connection
that is established. This coroutine listens to messages from the connected
client and routes the message to the proper queue.
This coroutine can be thought of as a producer.
"""
# Get users name from the path
path = path.split('/')
username = path[2]
session_id = path[1]
user_owner = get_user_from_session(session_id)
if user_owner:
user_owner = user_owner.username
# Persist users connection, associate user w/a unique ID
ws_connections[(user_owner, username)] = websocket
# While the websocket is open, listen for incoming messages/events
# if unable to listening for messages/events, then disconnect the client
try:
while websocket.open:
data = yield from websocket.recv()
if not data:
continue
logger.debug(data)
try:
yield from router.MessageRouter(data)()
except Exception as e:
logger.error('could not route msg', e)
except websockets.exceptions.InvalidState: # User disconnected
pass
finally:
del ws_connections[(user_owner, username)]
else:
logger.info("Got invalid session_id attempt to connect " + session_id)
|
[
"django.contrib.auth.get_user_model",
"logging.getLogger",
"json.dumps"
] |
[((210, 252), 'logging.getLogger', 'logging.getLogger', (['"""django-private-dialog"""'], {}), "('django-private-dialog')\n", (227, 252), False, 'import logging\n'), ((518, 537), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (528, 537), False, 'import json\n'), ((824, 843), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (834, 843), False, 'import json\n'), ((5238, 5254), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (5252, 5254), False, 'from django.contrib.auth import get_user_model\n')]
|
import sys
import click
import carica_cfn_tools.version
from carica_cfn_tools.stack_config import Stack, CaricaCfnToolsError, Action
class ActionParamType(click.Choice):
def __init__(self):
super().__init__([str(action.value) for action in Action])
def convert(self, value, param, ctx):
if isinstance(value, Action):
return value
return Action(super().convert(value, param, ctx))
ACTION_HELP = f'CloudFormation action to perform (default is {Action.CREATE_OR_UPDATE.value})'
DIRECT_HELP = 'Make changes to the stack directly instead of through a change set'
IGNORE_EMPTY_UPDATES_HELP = 'Ignore "No updates are to be performed." errors when updating stacks'
WAIT_HELP = 'Wait for creates and updates to finish before exiting'
ROLE_ARN_HELP = 'Use this value as the RoleARN argument creating or updating stacks and changesets'
INC_TEMPLATE_HELP = 'Make resources in this SAM or CloudFormation template available for ' \
'inclusion in the stack\'s main template\'s "IncludedResources" section ' \
'(you can use this option multiple times)'
SAM_TO_CFN_HELP = 'Convert the stack\'s main template and all included templates from SAM to ' \
'CloudFormation before performing inclusions'
EXTRA_HELP = 'Include files and directories matched by this glob pattern as stack config "Extras" ' \
'that gets uploaded to S3 with other dependent resources (you can use this option ' \
'multiple times)'
JINJA_HELP = 'Process the SAM or CloudFormation template with the Jinja2 template engine after ' \
'included templates are processed'
JEXTRA_HELP = 'Include files and directories match by this glob pattern like normal "Extras" but ' \
'process matched files with the Jinja2 template engine before uploading'
VERBOSE_HELP = 'Print extra information while processing templates'
@click.command()
@click.argument('stack_config')
@click.option('--action', '-a', type=ActionParamType(), default=Action.CREATE_OR_UPDATE, help=ACTION_HELP)
@click.option('--direct', '-d', is_flag=True, help=DIRECT_HELP)
@click.option('--ignore-empty-updates', '-g', is_flag=True, help=IGNORE_EMPTY_UPDATES_HELP)
@click.option('--wait', '-w', is_flag=True, help=WAIT_HELP)
@click.option('--role-arn', '-r', help=ROLE_ARN_HELP)
@click.option('--include-template', '-i', multiple=True, help=INC_TEMPLATE_HELP)
@click.option('--sam-to-cfn/--no-sam-to-cfn', default=True, help=SAM_TO_CFN_HELP)
@click.option('--extra', '-e', multiple=True, help=EXTRA_HELP)
@click.option('--jinja/--no-jinja', '-J', default=False, help=JINJA_HELP)
@click.option('--jextra', '-j', multiple=True, help=JEXTRA_HELP)
@click.option('--verbose/--no-verbose', '-v', help=VERBOSE_HELP)
@click.version_option(version=carica_cfn_tools.version.__version__)
def cli(stack_config, action, direct, ignore_empty_updates, wait, role_arn, include_template, sam_to_cfn, verbose,
extra, jinja, jextra):
"""
Create or update the CloudFormation stack specified in STACK_CONFIG.
"""
try:
stack = Stack(stack_config, include_template, sam_to_cfn, extra, jinja, jextra, verbose)
if direct:
stack.apply_stack(action, wait, ignore_empty_updates, role_arn)
else:
stack.apply_change_set(action, wait, ignore_empty_updates, role_arn)
except CaricaCfnToolsError as e:
print('ERROR: ' + str(e), file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
cli()
|
[
"click.version_option",
"click.argument",
"click.option",
"click.command",
"carica_cfn_tools.stack_config.Stack",
"sys.exit"
] |
[((1927, 1942), 'click.command', 'click.command', ([], {}), '()\n', (1940, 1942), False, 'import click\n'), ((1944, 1974), 'click.argument', 'click.argument', (['"""stack_config"""'], {}), "('stack_config')\n", (1958, 1974), False, 'import click\n'), ((2083, 2145), 'click.option', 'click.option', (['"""--direct"""', '"""-d"""'], {'is_flag': '(True)', 'help': 'DIRECT_HELP'}), "('--direct', '-d', is_flag=True, help=DIRECT_HELP)\n", (2095, 2145), False, 'import click\n'), ((2147, 2242), 'click.option', 'click.option', (['"""--ignore-empty-updates"""', '"""-g"""'], {'is_flag': '(True)', 'help': 'IGNORE_EMPTY_UPDATES_HELP'}), "('--ignore-empty-updates', '-g', is_flag=True, help=\n IGNORE_EMPTY_UPDATES_HELP)\n", (2159, 2242), False, 'import click\n'), ((2239, 2297), 'click.option', 'click.option', (['"""--wait"""', '"""-w"""'], {'is_flag': '(True)', 'help': 'WAIT_HELP'}), "('--wait', '-w', is_flag=True, help=WAIT_HELP)\n", (2251, 2297), False, 'import click\n'), ((2299, 2351), 'click.option', 'click.option', (['"""--role-arn"""', '"""-r"""'], {'help': 'ROLE_ARN_HELP'}), "('--role-arn', '-r', help=ROLE_ARN_HELP)\n", (2311, 2351), False, 'import click\n'), ((2353, 2432), 'click.option', 'click.option', (['"""--include-template"""', '"""-i"""'], {'multiple': '(True)', 'help': 'INC_TEMPLATE_HELP'}), "('--include-template', '-i', multiple=True, help=INC_TEMPLATE_HELP)\n", (2365, 2432), False, 'import click\n'), ((2434, 2519), 'click.option', 'click.option', (['"""--sam-to-cfn/--no-sam-to-cfn"""'], {'default': '(True)', 'help': 'SAM_TO_CFN_HELP'}), "('--sam-to-cfn/--no-sam-to-cfn', default=True, help=SAM_TO_CFN_HELP\n )\n", (2446, 2519), False, 'import click\n'), ((2516, 2577), 'click.option', 'click.option', (['"""--extra"""', '"""-e"""'], {'multiple': '(True)', 'help': 'EXTRA_HELP'}), "('--extra', '-e', multiple=True, help=EXTRA_HELP)\n", (2528, 2577), False, 'import click\n'), ((2579, 2651), 'click.option', 'click.option', (['"""--jinja/--no-jinja"""', '"""-J"""'], {'default': '(False)', 'help': 'JINJA_HELP'}), "('--jinja/--no-jinja', '-J', default=False, help=JINJA_HELP)\n", (2591, 2651), False, 'import click\n'), ((2653, 2716), 'click.option', 'click.option', (['"""--jextra"""', '"""-j"""'], {'multiple': '(True)', 'help': 'JEXTRA_HELP'}), "('--jextra', '-j', multiple=True, help=JEXTRA_HELP)\n", (2665, 2716), False, 'import click\n'), ((2718, 2781), 'click.option', 'click.option', (['"""--verbose/--no-verbose"""', '"""-v"""'], {'help': 'VERBOSE_HELP'}), "('--verbose/--no-verbose', '-v', help=VERBOSE_HELP)\n", (2730, 2781), False, 'import click\n'), ((2783, 2849), 'click.version_option', 'click.version_option', ([], {'version': 'carica_cfn_tools.version.__version__'}), '(version=carica_cfn_tools.version.__version__)\n', (2803, 2849), False, 'import click\n'), ((3110, 3195), 'carica_cfn_tools.stack_config.Stack', 'Stack', (['stack_config', 'include_template', 'sam_to_cfn', 'extra', 'jinja', 'jextra', 'verbose'], {}), '(stack_config, include_template, sam_to_cfn, extra, jinja, jextra, verbose\n )\n', (3115, 3195), False, 'from carica_cfn_tools.stack_config import Stack, CaricaCfnToolsError, Action\n'), ((3477, 3488), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3485, 3488), False, 'import sys\n')]
|
from pathlib import Path
import itertools
# Get the puzzle input data
input_file = Path(__file__).parent / "input.txt"
puzzle_data = input_file.read_text().strip().split("\n")
puzzle_data = [int(x) for x in puzzle_data]
# ------------------------- Part 1/2 -------------------------
""" Puzzle Rules:
"""
data = [35, 20, 15, 25, 47,
40, 62, 55, 65, 95,
102, 117, 150, 182, 127,
219, 299, 277, 309, 576]
#prev_num = 5
prev_num = 25
for i, v in enumerate(puzzle_data):
if i >= prev_num:
has_pre = False
for comb in itertools.combinations(puzzle_data[i-prev_num:i], 2):
if sum(comb) == v:
has_pre = True
if not has_pre:
print(v, has_pre)
break
invalid = 41682220
# ------------------------- Part 2/2 -------------------------
""" Puzzle Rules:
"""
for i, v in enumerate(puzzle_data):
for j in range(len(puzzle_data)):
if invalid == sum(puzzle_data[i:j]):
sum_list = sorted(puzzle_data[i:j])
print(sum_list)
print(sum_list[0] + sum_list[-1])
break
# 5388976
|
[
"itertools.combinations",
"pathlib.Path"
] |
[((84, 98), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (88, 98), False, 'from pathlib import Path\n'), ((562, 616), 'itertools.combinations', 'itertools.combinations', (['puzzle_data[i - prev_num:i]', '(2)'], {}), '(puzzle_data[i - prev_num:i], 2)\n', (584, 616), False, 'import itertools\n')]
|
# -*- coding: utf-8 -*-
import re
import json
import scrapy
from locations.items import GeojsonPointItem
class BunningsSpider(scrapy.Spider):
name = "bunnings"
allowed_domains = ["bunnings.com.au"]
start_urls = (
'https://www.bunnings.com.au/stores/',
)
def parse(self, response):
raw_data = re.search( "com_bunnings_locations_mapLocations = (.+);", response.text,).group(1)
stores = json.loads(raw_data)
for idx, store in enumerate(stores):
store = store['Store']
properties = {
"lat": store["Location"]["Latitude"],
"lon": store["Location"]["Longitude"],
"name": store["StoreName"],
"addr_full": f'{store["Address"]["Address"]} {store["Address"]["AddressLineTwo"]}'.strip(),
"city": store["Address"]["Suburb"],
"state": store["Address"]["State"],
"postcode": store["Address"]["Postcode"],
"country": "AU",
"phone": store["Phone"],
"website": response.urljoin(store["StoreUrl"]),
"ref": idx
}
yield GeojsonPointItem(**properties)
|
[
"locations.items.GeojsonPointItem",
"re.search",
"json.loads"
] |
[((430, 450), 'json.loads', 'json.loads', (['raw_data'], {}), '(raw_data)\n', (440, 450), False, 'import json\n'), ((330, 401), 're.search', 're.search', (['"""com_bunnings_locations_mapLocations = (.+);"""', 'response.text'], {}), "('com_bunnings_locations_mapLocations = (.+);', response.text)\n", (339, 401), False, 'import re\n'), ((1180, 1210), 'locations.items.GeojsonPointItem', 'GeojsonPointItem', ([], {}), '(**properties)\n', (1196, 1210), False, 'from locations.items import GeojsonPointItem\n')]
|
from sklearn.model_selection import train_test_split
import pandas as pd
from training.models.random_forest_classifier import *
if __name__ == '__main__':
df = pd.read_excel('../../data_base/excel/datasetV2.xlsx', sheet_name='Casos Dengue')
# Separación de los datos
x, y = df.iloc[:, :-1].values, df.iloc[:, -1:].values
x_train, y_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=0, stratify=y)
feat_labels = df.columns[:-1]
forest = createModel(500, 1)
forest, variables = fit(forest, x_train, y_train)
showImportantColumns(feat_labels, variables)
|
[
"pandas.read_excel",
"sklearn.model_selection.train_test_split"
] |
[((165, 250), 'pandas.read_excel', 'pd.read_excel', (['"""../../data_base/excel/datasetV2.xlsx"""'], {'sheet_name': '"""Casos Dengue"""'}), "('../../data_base/excel/datasetV2.xlsx', sheet_name='Casos Dengue'\n )\n", (178, 250), True, 'import pandas as pd\n'), ((374, 439), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {'test_size': '(0.2)', 'random_state': '(0)', 'stratify': 'y'}), '(x, y, test_size=0.2, random_state=0, stratify=y)\n', (390, 439), False, 'from sklearn.model_selection import train_test_split\n')]
|
"""
Monthly Class
Meteorological data provided by Meteostat (https://dev.meteostat.net)
under the terms of the Creative Commons Attribution-NonCommercial
4.0 International Public License.
The code is licensed under the MIT license.
"""
from datetime import datetime
from typing import Union
import numpy as np
import pandas as pd
from meteostat.core.cache import get_file_path, file_in_cache
from meteostat.core.loader import processing_handler, load_handler
from meteostat.utilities.validations import validate_series
from meteostat.utilities.aggregations import degree_mean, weighted_average
from meteostat.interface.timeseries import Timeseries
from meteostat.interface.point import Point
class Monthly(Timeseries):
"""
Retrieve monthly weather data for one or multiple weather stations or
a single geographical point
"""
# The cache subdirectory
cache_subdir: str = 'monthly'
# Default frequency
_freq: str = '1MS'
# Columns
_columns: list = [
'year',
'month',
'tavg',
'tmin',
'tmax',
'prcp',
'snow',
'wdir',
'wspd',
'wpgt',
'pres',
'tsun'
]
# Index of first meteorological column
_first_met_col = 2
# Data types
_types: dict = {
'tavg': 'float64',
'tmin': 'float64',
'tmax': 'float64',
'prcp': 'float64',
'snow': 'float64',
'wdir': 'float64',
'wspd': 'float64',
'wpgt': 'float64',
'pres': 'float64',
'tsun': 'float64'
}
# Columns for date parsing
_parse_dates: dict = {
'time': [0, 1]
}
# Default aggregation functions
aggregations: dict = {
'tavg': 'mean',
'tmin': 'mean',
'tmax': 'mean',
'prcp': 'sum',
'snow': 'max',
'wdir': degree_mean,
'wspd': 'mean',
'wpgt': 'max',
'pres': 'mean',
'tsun': 'sum'
}
def _load(
self,
station: str
) -> None:
"""
Load file from Meteostat
"""
# File name
file = 'monthly/' + ('full' if self._model else 'obs') + \
'/' + station + '.csv.gz'
# Get local file path
path = get_file_path(self.cache_dir, self.cache_subdir, file)
# Check if file in cache
if self.max_age > 0 and file_in_cache(path, self.max_age):
# Read cached data
df = pd.read_pickle(path)
else:
# Get data from Meteostat
df = load_handler(
self.endpoint,
file,
self._columns,
self._types,
self._parse_dates)
# Validate Series
df = validate_series(df, station)
# Save as Pickle
if self.max_age > 0:
df.to_pickle(path)
# Filter time period and append to DataFrame
if self._start and self._end:
# Get time index
time = df.index.get_level_values('time')
# Filter & return
return df.loc[(time >= self._start) & (time <= self._end)]
# Return
return df
def _get_data(self) -> None:
"""
Get all required data
"""
if len(self._stations) > 0:
# List of datasets
datasets = []
for station in self._stations:
datasets.append((
str(station),
))
# Data Processing
return processing_handler(datasets, self._load, self.processes, self.threads)
# Empty DataFrame
return pd.DataFrame(columns=[*self._types])
def _resolve_point(
self,
method: str,
stations: pd.DataFrame,
alt: int,
adapt_temp: bool
) -> None:
"""
Project weather station data onto a single point
"""
if self._stations.size == 0 or self._data.size == 0:
return None
def adjust_temp(data: pd.DataFrame):
"""
Adjust temperature-like data based on altitude
"""
data.loc[data['tavg'] != np.NaN, 'tavg'] = data['tavg'] + \
((2 / 3) * ((data['elevation'] - alt) / 100))
data.loc[data['tmin'] != np.NaN, 'tmin'] = data['tmin'] + \
((2 / 3) * ((data['elevation'] - alt) / 100))
data.loc[data['tmax'] != np.NaN, 'tmax'] = data['tmax'] + \
((2 / 3) * ((data['elevation'] - alt) / 100))
return data
if method == 'nearest':
if adapt_temp:
# Join elevation of involved weather stations
data = self._data.join(
stations['elevation'], on='station')
# Adapt temperature-like data based on altitude
data = adjust_temp(data)
# Drop elevation & round
data = data.drop('elevation', axis=1).round(1)
else:
data = self._data
self._data = self._data.groupby(
pd.Grouper(level='time', freq=self._freq)).agg('first')
else:
# Join score and elevation of involved weather stations
data = self._data.join(
stations[['score', 'elevation']], on='station')
# Adapt temperature-like data based on altitude
if adapt_temp:
data = adjust_temp(data)
# Exclude non-mean data & perform aggregation
excluded = data['wdir']
excluded = excluded.groupby(
pd.Grouper(level='time', freq=self._freq)).agg('first')
# Aggregate mean data
data = data.groupby(
pd.Grouper(level='time', freq=self._freq)).apply(weighted_average)
# Drop RangeIndex
data.index = data.index.droplevel(1)
# Merge excluded fields
data['wdir'] = excluded
# Drop score and elevation
self._data = data.drop(['score', 'elevation'], axis=1).round(1)
# Set placeholder station ID
self._data['station'] = 'XXXXX'
self._data = self._data.set_index(
['station', self._data.index.get_level_values('time')])
self._stations = pd.Index(['XXXXX'])
def __init__(
self,
loc: Union[pd.DataFrame, Point, list, str],
start: datetime = None,
end: datetime = None,
model: bool = True
) -> None:
# Set list of weather stations
if isinstance(loc, pd.DataFrame):
self._stations = loc.index
elif isinstance(loc, Point):
stations = loc.get_stations('monthly', start, end, model)
self._stations = stations.index
else:
if not isinstance(loc, list):
loc = [loc]
self._stations = pd.Index(loc)
# Set start date
if start is not None:
self._start = start.replace(day=1)
# Set end date
self._end = end
# Set model
self._model = model
# Get data for all weather stations
self._data = self._get_data()
# Interpolate data
if isinstance(loc, Point):
self._resolve_point(loc.method, stations, loc.alt, loc.adapt_temp)
# Clear cache
if self.max_age > 0 and self.autoclean:
self.clear_cache()
def expected_rows(self) -> int:
"""
Return the number of rows expected for the defined date range
"""
return ((self._end.year - self._start.year) * 12 +
self._end.month - self._start.month) + 1
|
[
"pandas.DataFrame",
"meteostat.core.loader.processing_handler",
"pandas.Index",
"meteostat.core.loader.load_handler",
"pandas.Grouper",
"pandas.read_pickle",
"meteostat.core.cache.file_in_cache",
"meteostat.core.cache.get_file_path",
"meteostat.utilities.validations.validate_series"
] |
[((2269, 2323), 'meteostat.core.cache.get_file_path', 'get_file_path', (['self.cache_dir', 'self.cache_subdir', 'file'], {}), '(self.cache_dir, self.cache_subdir, file)\n', (2282, 2323), False, 'from meteostat.core.cache import get_file_path, file_in_cache\n'), ((3693, 3729), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': '[*self._types]'}), '(columns=[*self._types])\n', (3705, 3729), True, 'import pandas as pd\n'), ((6371, 6390), 'pandas.Index', 'pd.Index', (["['XXXXX']"], {}), "(['XXXXX'])\n", (6379, 6390), True, 'import pandas as pd\n'), ((2390, 2423), 'meteostat.core.cache.file_in_cache', 'file_in_cache', (['path', 'self.max_age'], {}), '(path, self.max_age)\n', (2403, 2423), False, 'from meteostat.core.cache import get_file_path, file_in_cache\n'), ((2474, 2494), 'pandas.read_pickle', 'pd.read_pickle', (['path'], {}), '(path)\n', (2488, 2494), True, 'import pandas as pd\n'), ((2566, 2651), 'meteostat.core.loader.load_handler', 'load_handler', (['self.endpoint', 'file', 'self._columns', 'self._types', 'self._parse_dates'], {}), '(self.endpoint, file, self._columns, self._types, self._parse_dates\n )\n', (2578, 2651), False, 'from meteostat.core.loader import processing_handler, load_handler\n'), ((2776, 2804), 'meteostat.utilities.validations.validate_series', 'validate_series', (['df', 'station'], {}), '(df, station)\n', (2791, 2804), False, 'from meteostat.utilities.validations import validate_series\n'), ((3580, 3650), 'meteostat.core.loader.processing_handler', 'processing_handler', (['datasets', 'self._load', 'self.processes', 'self.threads'], {}), '(datasets, self._load, self.processes, self.threads)\n', (3598, 3650), False, 'from meteostat.core.loader import processing_handler, load_handler\n'), ((6966, 6979), 'pandas.Index', 'pd.Index', (['loc'], {}), '(loc)\n', (6974, 6979), True, 'import pandas as pd\n'), ((5160, 5201), 'pandas.Grouper', 'pd.Grouper', ([], {'level': '"""time"""', 'freq': 'self._freq'}), "(level='time', freq=self._freq)\n", (5170, 5201), True, 'import pandas as pd\n'), ((5681, 5722), 'pandas.Grouper', 'pd.Grouper', ([], {'level': '"""time"""', 'freq': 'self._freq'}), "(level='time', freq=self._freq)\n", (5691, 5722), True, 'import pandas as pd\n'), ((5821, 5862), 'pandas.Grouper', 'pd.Grouper', ([], {'level': '"""time"""', 'freq': 'self._freq'}), "(level='time', freq=self._freq)\n", (5831, 5862), True, 'import pandas as pd\n')]
|
#!/usr/bin/env python
# Distributed under the MIT License.
# See LICENSE.txt for details.
from spectre.Visualization.Render1D import (find_extrema_over_data_set,
render_single_time)
import unittest
import os
import numpy as np
import matplotlib as mpl
mpl.use('agg')
class TestRender1D(unittest.TestCase):
def test_find_extrema_over_data_set(self):
test_array = np.array([1.1, 6.45, 0.34, 2.3])
expected_vals = (0.34, 6.45)
self.assertEqual(find_extrema_over_data_set(test_array), expected_vals)
def test_render_single_time(self):
var_name = "Variable Test"
time_slice = 1
output_prefix = "TestRenderSingleTime"
time = [0.0, 0.1]
coords = [[1, 2, 3, 4, 5], [1, 2, 3, 4, 5]]
data = [[5.2, 4.5, 9.0, 2.0, 8.0], [1.1, 4.0, 6.0, 5.3, 3.0]]
# test whether a pdf file is saved when run
render_single_time(var_name, time_slice, output_prefix, time, coords,
data)
self.assertTrue(os.path.isfile(output_prefix + '.pdf'))
os.remove(output_prefix + '.pdf')
if __name__ == '__main__':
unittest.main(verbosity=2)
|
[
"unittest.main",
"os.remove",
"spectre.Visualization.Render1D.find_extrema_over_data_set",
"os.path.isfile",
"matplotlib.use",
"numpy.array",
"spectre.Visualization.Render1D.render_single_time"
] |
[((299, 313), 'matplotlib.use', 'mpl.use', (['"""agg"""'], {}), "('agg')\n", (306, 313), True, 'import matplotlib as mpl\n'), ((1168, 1194), 'unittest.main', 'unittest.main', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (1181, 1194), False, 'import unittest\n'), ((423, 455), 'numpy.array', 'np.array', (['[1.1, 6.45, 0.34, 2.3]'], {}), '([1.1, 6.45, 0.34, 2.3])\n', (431, 455), True, 'import numpy as np\n'), ((926, 1001), 'spectre.Visualization.Render1D.render_single_time', 'render_single_time', (['var_name', 'time_slice', 'output_prefix', 'time', 'coords', 'data'], {}), '(var_name, time_slice, output_prefix, time, coords, data)\n', (944, 1001), False, 'from spectre.Visualization.Render1D import find_extrema_over_data_set, render_single_time\n'), ((1101, 1134), 'os.remove', 'os.remove', (["(output_prefix + '.pdf')"], {}), "(output_prefix + '.pdf')\n", (1110, 1134), False, 'import os\n'), ((518, 556), 'spectre.Visualization.Render1D.find_extrema_over_data_set', 'find_extrema_over_data_set', (['test_array'], {}), '(test_array)\n', (544, 556), False, 'from spectre.Visualization.Render1D import find_extrema_over_data_set, render_single_time\n'), ((1053, 1091), 'os.path.isfile', 'os.path.isfile', (["(output_prefix + '.pdf')"], {}), "(output_prefix + '.pdf')\n", (1067, 1091), False, 'import os\n')]
|
from setuptools import setup, find_packages
setup(
name="elegantrl",
version="0.3.3",
author="<NAME>, <NAME>, <NAME>, <NAME>",
author_email="<EMAIL>",
url="https://github.com/AI4Finance-LLC/ElegantRL",
license="Apache 2.0",
packages=find_packages(),
install_requires=[
"gym",
"matplotlib",
"numpy",
"pybullet",
"torch",
"opencv-python",
"box2d-py",
],
description="Lightweight, Efficient and Stable DRL Implementation Using PyTorch",
classifiers=[
# Trove classifiers
# Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
keywords="Deep Reinforcment Learning",
python_requires=">=3.6",
)
|
[
"setuptools.find_packages"
] |
[((262, 277), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (275, 277), False, 'from setuptools import setup, find_packages\n')]
|
import os
import sys
import numpy as np
import flopy
import matplotlib.pyplot as plt
# --modify default matplotlib settings
updates = {
"font.family": ["Univers 57 Condensed", "Arial"],
"mathtext.default": "regular",
"pdf.compression": 0,
"pdf.fonttype": 42,
"legend.fontsize": 7,
"axes.labelsize": 8,
"xtick.labelsize": 7,
"ytick.labelsize": 7,
}
plt.rcParams.update(updates)
def MergeData(ndim, zdata, tb):
sv = 0.05
md = np.empty((ndim), float)
md.fill(np.nan)
found = np.empty((ndim), bool)
found.fill(False)
for idx, layer in enumerate(zdata):
for jdx, z in enumerate(layer):
if found[jdx] == True:
continue
t0 = tb[idx][0] - sv
t1 = tb[idx][1] + sv
if z < t0 and z > t1:
md[jdx] = z
found[jdx] = True
return md
def LegBar(ax, x0, y0, t0, dx, dy, dt, cc):
for c in cc:
ax.plot([x0, x0 + dx], [y0, y0], color=c, linewidth=4)
ctxt = "{0:=3d} years".format(t0)
ax.text(x0 + 2.0 * dx, y0 + dy / 2.0, ctxt, size=5)
y0 += dy
t0 += dt
return
def run():
workspace = "swiex3"
cleanFiles = False
fext = "png"
narg = len(sys.argv)
iarg = 0
if narg > 1:
while iarg < narg - 1:
iarg += 1
basearg = sys.argv[iarg].lower()
if basearg == "--clean":
cleanFiles = True
elif basearg == "--pdf":
fext = "pdf"
if cleanFiles:
print("cleaning all files")
print("excluding *.py files")
files = os.listdir(workspace)
for f in files:
fpth = os.path.join(workspace, f)
if os.path.isdir(fpth):
continue
if ".py" != os.path.splitext(f)[1].lower():
print(" removing...{}".format(os.path.basename(f)))
try:
os.remove(fpth)
except:
pass
return 0
modelname = "swiex3"
exe_name = "mf2005"
nlay = 3
nrow = 1
ncol = 200
delr = 20.0
delc = 1.0
# well data
lrcQ1 = np.array([(0, 0, 199, 0.01), (2, 0, 199, 0.02)])
lrcQ2 = np.array([(0, 0, 199, 0.01 * 0.5), (2, 0, 199, 0.02 * 0.5)])
# ghb data
lrchc = np.zeros((30, 5))
lrchc[:, [0, 1, 3, 4]] = [0, 0, 0.0, 0.8 / 2.0]
lrchc[:, 2] = np.arange(0, 30)
# swi2 data
zini = np.hstack(
(-9 * np.ones(24), np.arange(-9, -50, -0.5), -50 * np.ones(94))
)[np.newaxis, :]
iso = np.zeros((1, 200), dtype=int)
iso[:, :30] = -2
# model objects
ml = flopy.modflow.Modflow(
modelname, version="mf2005", exe_name=exe_name, model_ws=workspace
)
discret = flopy.modflow.ModflowDis(
ml,
nrow=nrow,
ncol=ncol,
nlay=3,
delr=delr,
delc=delc,
laycbd=[0, 0, 0],
top=-9.0,
botm=[-29, -30, -50],
nper=2,
perlen=[365 * 1000, 1000 * 365],
nstp=[500, 500],
)
bas = flopy.modflow.ModflowBas(ml, ibound=1, strt=1.0)
bcf = flopy.modflow.ModflowBcf(
ml, laycon=[0, 0, 0], tran=[40.0, 1, 80.0], vcont=[0.005, 0.005]
)
wel = flopy.modflow.ModflowWel(ml, stress_period_data={0: lrcQ1, 1: lrcQ2})
ghb = flopy.modflow.ModflowGhb(ml, stress_period_data={0: lrchc})
swi = flopy.modflow.ModflowSwi2(
ml,
iswizt=55,
nsrf=1,
istrat=1,
toeslope=0.01,
tipslope=0.04,
nu=[0, 0.025],
zeta=[zini, zini, zini],
ssz=0.2,
isource=iso,
nsolver=1,
)
oc = flopy.modflow.ModflowOc(ml, save_every=100, save_types=["save head"])
pcg = flopy.modflow.ModflowPcg(ml)
# write the model files
ml.write_input()
# run the model
m = ml.run_model(silent=True)
headfile = os.path.join(workspace, "{}.hds".format(modelname))
hdobj = flopy.utils.HeadFile(headfile)
head = hdobj.get_data(totim=3.65000e05)
zetafile = os.path.join(workspace, "{}.zta".format(modelname))
zobj = flopy.utils.CellBudgetFile(zetafile)
zkstpkper = zobj.get_kstpkper()
zeta = []
for kk in zkstpkper:
zeta.append(zobj.get_data(kstpkper=kk, text="ZETASRF 1")[0])
zeta = np.array(zeta)
fwid, fhgt = 7.00, 4.50
flft, frgt, fbot, ftop = 0.125, 0.95, 0.125, 0.925
colormap = plt.cm.plasma # winter
cc = []
icolor = 11
cr = np.linspace(0.0, 0.9, icolor)
for idx in cr:
cc.append(colormap(idx))
lw = 0.5
x = np.arange(-30 * delr + 0.5 * delr, (ncol - 30) * delr, delr)
xedge = np.linspace(-30.0 * delr, (ncol - 30.0) * delr, len(x) + 1)
zedge = [[-9.0, -29.0], [-29.0, -30.0], [-30.0, -50.0]]
fig = plt.figure(figsize=(fwid, fhgt), facecolor="w")
fig.subplots_adjust(
wspace=0.25, hspace=0.25, left=flft, right=frgt, bottom=fbot, top=ftop
)
ax = fig.add_subplot(311)
ax.text(
-0.075,
1.05,
"A",
transform=ax.transAxes,
va="center",
ha="center",
size="8",
)
# confining unit
ax.fill(
[-600, 3400, 3400, -600],
[-29, -29, -30, -30],
fc=[0.8, 0.8, 0.8],
ec=[0.8, 0.8, 0.8],
)
#
z = np.copy(zini[0, :])
zr = z.copy()
p = (zr < -9.0) & (zr > -50.0)
ax.plot(x[p], zr[p], color=cc[0], linewidth=lw, drawstyle="steps-mid")
#
for i in range(5):
zt = MergeData(
ncol, [zeta[i, 0, 0, :], zeta[i, 1, 0, :], zeta[i, 2, 0, :]], zedge
)
dr = zt.copy()
ax.plot(x, dr, color=cc[i + 1], linewidth=lw, drawstyle="steps-mid")
# Manufacture a legend bar
LegBar(ax, -200.0, -33.75, 0, 25, -2.5, 200, cc[0:6])
# axes
ax.set_ylim(-50, -9)
ax.set_ylabel("Elevation, in meters")
ax.set_xlim(-250.0, 2500.0)
ax = fig.add_subplot(312)
ax.text(
-0.075,
1.05,
"B",
transform=ax.transAxes,
va="center",
ha="center",
size="8",
)
# confining unit
ax.fill(
[-600, 3400, 3400, -600],
[-29, -29, -30, -30],
fc=[0.8, 0.8, 0.8],
ec=[0.8, 0.8, 0.8],
)
#
for i in range(4, 10):
zt = MergeData(
ncol, [zeta[i, 0, 0, :], zeta[i, 1, 0, :], zeta[i, 2, 0, :]], zedge
)
dr = zt.copy()
ax.plot(x, dr, color=cc[i + 1], linewidth=lw, drawstyle="steps-mid")
# Manufacture a legend bar
LegBar(ax, -200.0, -33.75, 1000, 25, -2.5, 200, cc[5:11])
# axes
ax.set_ylim(-50, -9)
ax.set_ylabel("Elevation, in meters")
ax.set_xlim(-250.0, 2500.0)
ax = fig.add_subplot(313)
ax.text(
-0.075,
1.05,
"C",
transform=ax.transAxes,
va="center",
ha="center",
size="8",
)
# confining unit
ax.fill(
[-600, 3400, 3400, -600],
[-29, -29, -30, -30],
fc=[0.8, 0.8, 0.8],
ec=[0.8, 0.8, 0.8],
)
#
zt = MergeData(
ncol, [zeta[4, 0, 0, :], zeta[4, 1, 0, :], zeta[4, 2, 0, :]], zedge
)
ax.plot(
x,
zt,
marker="o",
markersize=3,
linewidth=0.0,
markeredgecolor="blue",
markerfacecolor="None",
)
# <NAME>
zeta1 = -9 - 40.0 * (head[0, 0, :])
gbh = np.empty(len(zeta1), float)
gbho = np.empty(len(zeta1), float)
for idx, z1 in enumerate(zeta1):
if z1 >= -9.0 or z1 <= -50.0:
gbh[idx] = np.nan
gbho[idx] = 0.0
else:
gbh[idx] = z1
gbho[idx] = z1
ax.plot(x, gbh, "r")
np.savetxt(os.path.join(workspace, "Ghyben-Herzberg.out"), gbho)
# fake figures
ax.plot([-100.0, -100], [-100.0, -100], "r", label="Ghyben-Herzberg")
ax.plot(
[-100.0, -100],
[-100.0, -100],
"bo",
markersize=3,
markeredgecolor="blue",
markerfacecolor="None",
label="SWI2",
)
# legend
leg = ax.legend(loc="lower left", numpoints=1)
leg._drawFrame = False
# axes
ax.set_ylim(-50, -9)
ax.set_xlabel("Horizontal distance, in meters")
ax.set_ylabel("Elevation, in meters")
ax.set_xlim(-250.0, 2500.0)
outfig = os.path.join(workspace, "Figure08_swi2ex3.{0}".format(fext))
fig.savefig(outfig, dpi=300)
print("created...", outfig)
return 0
if __name__ == "__main__":
success = run()
|
[
"os.remove",
"flopy.modflow.ModflowOc",
"numpy.empty",
"numpy.ones",
"matplotlib.pyplot.figure",
"flopy.modflow.ModflowBcf",
"numpy.arange",
"flopy.modflow.ModflowSwi2",
"os.path.join",
"flopy.utils.CellBudgetFile",
"flopy.modflow.ModflowPcg",
"numpy.copy",
"flopy.modflow.ModflowGhb",
"matplotlib.pyplot.rcParams.update",
"numpy.linspace",
"flopy.modflow.Modflow",
"flopy.modflow.ModflowWel",
"os.path.basename",
"os.listdir",
"flopy.utils.HeadFile",
"os.path.isdir",
"numpy.zeros",
"flopy.modflow.ModflowDis",
"flopy.modflow.ModflowBas",
"numpy.array",
"os.path.splitext"
] |
[((384, 412), 'matplotlib.pyplot.rcParams.update', 'plt.rcParams.update', (['updates'], {}), '(updates)\n', (403, 412), True, 'import matplotlib.pyplot as plt\n'), ((470, 491), 'numpy.empty', 'np.empty', (['ndim', 'float'], {}), '(ndim, float)\n', (478, 491), True, 'import numpy as np\n'), ((526, 546), 'numpy.empty', 'np.empty', (['ndim', 'bool'], {}), '(ndim, bool)\n', (534, 546), True, 'import numpy as np\n'), ((2191, 2239), 'numpy.array', 'np.array', (['[(0, 0, 199, 0.01), (2, 0, 199, 0.02)]'], {}), '([(0, 0, 199, 0.01), (2, 0, 199, 0.02)])\n', (2199, 2239), True, 'import numpy as np\n'), ((2252, 2312), 'numpy.array', 'np.array', (['[(0, 0, 199, 0.01 * 0.5), (2, 0, 199, 0.02 * 0.5)]'], {}), '([(0, 0, 199, 0.01 * 0.5), (2, 0, 199, 0.02 * 0.5)])\n', (2260, 2312), True, 'import numpy as np\n'), ((2340, 2357), 'numpy.zeros', 'np.zeros', (['(30, 5)'], {}), '((30, 5))\n', (2348, 2357), True, 'import numpy as np\n'), ((2428, 2444), 'numpy.arange', 'np.arange', (['(0)', '(30)'], {}), '(0, 30)\n', (2437, 2444), True, 'import numpy as np\n'), ((2586, 2615), 'numpy.zeros', 'np.zeros', (['(1, 200)'], {'dtype': 'int'}), '((1, 200), dtype=int)\n', (2594, 2615), True, 'import numpy as np\n'), ((2666, 2759), 'flopy.modflow.Modflow', 'flopy.modflow.Modflow', (['modelname'], {'version': '"""mf2005"""', 'exe_name': 'exe_name', 'model_ws': 'workspace'}), "(modelname, version='mf2005', exe_name=exe_name,\n model_ws=workspace)\n", (2687, 2759), False, 'import flopy\n'), ((2784, 2982), 'flopy.modflow.ModflowDis', 'flopy.modflow.ModflowDis', (['ml'], {'nrow': 'nrow', 'ncol': 'ncol', 'nlay': '(3)', 'delr': 'delr', 'delc': 'delc', 'laycbd': '[0, 0, 0]', 'top': '(-9.0)', 'botm': '[-29, -30, -50]', 'nper': '(2)', 'perlen': '[365 * 1000, 1000 * 365]', 'nstp': '[500, 500]'}), '(ml, nrow=nrow, ncol=ncol, nlay=3, delr=delr, delc=\n delc, laycbd=[0, 0, 0], top=-9.0, botm=[-29, -30, -50], nper=2, perlen=\n [365 * 1000, 1000 * 365], nstp=[500, 500])\n', (2808, 2982), False, 'import flopy\n'), ((3086, 3134), 'flopy.modflow.ModflowBas', 'flopy.modflow.ModflowBas', (['ml'], {'ibound': '(1)', 'strt': '(1.0)'}), '(ml, ibound=1, strt=1.0)\n', (3110, 3134), False, 'import flopy\n'), ((3145, 3240), 'flopy.modflow.ModflowBcf', 'flopy.modflow.ModflowBcf', (['ml'], {'laycon': '[0, 0, 0]', 'tran': '[40.0, 1, 80.0]', 'vcont': '[0.005, 0.005]'}), '(ml, laycon=[0, 0, 0], tran=[40.0, 1, 80.0], vcont=\n [0.005, 0.005])\n', (3169, 3240), False, 'import flopy\n'), ((3260, 3333), 'flopy.modflow.ModflowWel', 'flopy.modflow.ModflowWel', (['ml'], {'stress_period_data': '{(0): lrcQ1, (1): lrcQ2}'}), '(ml, stress_period_data={(0): lrcQ1, (1): lrcQ2})\n', (3284, 3333), False, 'import flopy\n'), ((3340, 3401), 'flopy.modflow.ModflowGhb', 'flopy.modflow.ModflowGhb', (['ml'], {'stress_period_data': '{(0): lrchc}'}), '(ml, stress_period_data={(0): lrchc})\n', (3364, 3401), False, 'import flopy\n'), ((3410, 3580), 'flopy.modflow.ModflowSwi2', 'flopy.modflow.ModflowSwi2', (['ml'], {'iswizt': '(55)', 'nsrf': '(1)', 'istrat': '(1)', 'toeslope': '(0.01)', 'tipslope': '(0.04)', 'nu': '[0, 0.025]', 'zeta': '[zini, zini, zini]', 'ssz': '(0.2)', 'isource': 'iso', 'nsolver': '(1)'}), '(ml, iswizt=55, nsrf=1, istrat=1, toeslope=0.01,\n tipslope=0.04, nu=[0, 0.025], zeta=[zini, zini, zini], ssz=0.2, isource\n =iso, nsolver=1)\n', (3435, 3580), False, 'import flopy\n'), ((3676, 3745), 'flopy.modflow.ModflowOc', 'flopy.modflow.ModflowOc', (['ml'], {'save_every': '(100)', 'save_types': "['save head']"}), "(ml, save_every=100, save_types=['save head'])\n", (3699, 3745), False, 'import flopy\n'), ((3756, 3784), 'flopy.modflow.ModflowPcg', 'flopy.modflow.ModflowPcg', (['ml'], {}), '(ml)\n', (3780, 3784), False, 'import flopy\n'), ((3968, 3998), 'flopy.utils.HeadFile', 'flopy.utils.HeadFile', (['headfile'], {}), '(headfile)\n', (3988, 3998), False, 'import flopy\n'), ((4122, 4158), 'flopy.utils.CellBudgetFile', 'flopy.utils.CellBudgetFile', (['zetafile'], {}), '(zetafile)\n', (4148, 4158), False, 'import flopy\n'), ((4315, 4329), 'numpy.array', 'np.array', (['zeta'], {}), '(zeta)\n', (4323, 4329), True, 'import numpy as np\n'), ((4491, 4520), 'numpy.linspace', 'np.linspace', (['(0.0)', '(0.9)', 'icolor'], {}), '(0.0, 0.9, icolor)\n', (4502, 4520), True, 'import numpy as np\n'), ((4595, 4655), 'numpy.arange', 'np.arange', (['(-30 * delr + 0.5 * delr)', '((ncol - 30) * delr)', 'delr'], {}), '(-30 * delr + 0.5 * delr, (ncol - 30) * delr, delr)\n', (4604, 4655), True, 'import numpy as np\n'), ((4799, 4846), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(fwid, fhgt)', 'facecolor': '"""w"""'}), "(figsize=(fwid, fhgt), facecolor='w')\n", (4809, 4846), True, 'import matplotlib.pyplot as plt\n'), ((5316, 5335), 'numpy.copy', 'np.copy', (['zini[0, :]'], {}), '(zini[0, :])\n', (5323, 5335), True, 'import numpy as np\n'), ((1639, 1660), 'os.listdir', 'os.listdir', (['workspace'], {}), '(workspace)\n', (1649, 1660), False, 'import os\n'), ((7695, 7741), 'os.path.join', 'os.path.join', (['workspace', '"""Ghyben-Herzberg.out"""'], {}), "(workspace, 'Ghyben-Herzberg.out')\n", (7707, 7741), False, 'import os\n'), ((1704, 1730), 'os.path.join', 'os.path.join', (['workspace', 'f'], {}), '(workspace, f)\n', (1716, 1730), False, 'import os\n'), ((1746, 1765), 'os.path.isdir', 'os.path.isdir', (['fpth'], {}), '(fpth)\n', (1759, 1765), False, 'import os\n'), ((2510, 2534), 'numpy.arange', 'np.arange', (['(-9)', '(-50)', '(-0.5)'], {}), '(-9, -50, -0.5)\n', (2519, 2534), True, 'import numpy as np\n'), ((1958, 1973), 'os.remove', 'os.remove', (['fpth'], {}), '(fpth)\n', (1967, 1973), False, 'import os\n'), ((2497, 2508), 'numpy.ones', 'np.ones', (['(24)'], {}), '(24)\n', (2504, 2508), True, 'import numpy as np\n'), ((2542, 2553), 'numpy.ones', 'np.ones', (['(94)'], {}), '(94)\n', (2549, 2553), True, 'import numpy as np\n'), ((1895, 1914), 'os.path.basename', 'os.path.basename', (['f'], {}), '(f)\n', (1911, 1914), False, 'import os\n'), ((1816, 1835), 'os.path.splitext', 'os.path.splitext', (['f'], {}), '(f)\n', (1832, 1835), False, 'import os\n')]
|
#- Python 3 source code
#- qq-wait-times-dormant-bin5.py ~~
#
# This program creates a Quantile-Quantile plot (or more accurately here, a
# Percentile-Percentile plot) of the distributions for the wait times of
# bin 5 jobs submitted by non-CSC108 projects during the two weeks prior to
# the "dormant" period from July 21 through August 4, when CSC108 was not
# running ATLAS jobs, versus the wait times experienced during the dormant
# period itself.
#
# This program will not run correctly on OLCF machines unless the appropriate
# module has already been loaded:
#
# $ module load python_anaconda2
#
# ~~ (c) SRW, 24 Aug 2018
# ~~ last updated 04 Dec 2018
from datetime import datetime
import matplotlib
import matplotlib.pyplot as pyplot
import numpy
import os
import sqlite3
###
def analyze(connection):
cursor = connection.cursor()
query = """
SELECT DISTINCT
JobID,
(StartTime - SubmissionTime) AS WaitTime
FROM
active
WHERE
(Account != "CSC108" OR User != "doleynik")
AND SubmissionTime <= StartTime
-- An estimate for July 7 through July 21
AND (1530939600 < SampleTime AND SampleTime < 1532149200)
AND ((ReqNodes IS NULL
AND (ReqProcs / 16) <= 125)
OR (ReqNodes IS NOT NULL
AND ReqNodes <= 125))
;
"""
with_csc108 = []
for row in cursor.execute(query):
with_csc108.append(row["WaitTime"])
# Now we will change the query to find WaitTimes for jobs that ran while
# CSC108 was "dormant".
query = """
SELECT DISTINCT JobID,
(StartTime - SubmissionTime) AS WaitTime
FROM
active
WHERE
(Account != "CSC108" OR User != "doleynik")
AND SubmissionTime < StartTime
-- An estimate for July 21 through August 4
AND (1532149200 < SampleTime AND SampleTime < 1533358800)
AND ((ReqNodes IS NULL
AND (ReqProcs / 16) <= 125)
OR (ReqNodes IS NOT NULL
AND ReqNodes <= 125))
;
"""
wo_csc108 = []
for row in cursor.execute(query):
wo_csc108.append(row["WaitTime"])
# Next, compute the percentiles or quantiles. It really doesn't matter which,
# because we are only going to use those to relate the two distributions. I
# will just call them "marks".
marks_to_use = range(10, 90)
marks_with = numpy.percentile(with_csc108, marks_to_use)
marks_wo = numpy.percentile(wo_csc108, marks_to_use)
# Create the QQ plot.
fig = pyplot.figure()
ax = fig.add_subplot(111)
pyplot.plot(marks_with, marks_wo, 'bo')
ax.set(xlabel = "Pre-Dormant (July 7 - 21)",
ylabel = "Dormant (July 21 - August 4)",
title = "QQ Plot: Wait Times for Bin 5 Jobs for Fixed Time Periods")
ax.grid()
current_script = os.path.basename(__file__)
fig.savefig(os.path.splitext(current_script)[0] + ".png", dpi = 300)
###
def main():
# Store current working directory.
cwd = os.getcwd()
# Find the data directory, where this script is running remotely at OLCF and
# locally on a personal laptop, for example.
if os.path.isdir("/lustre/atlas/proj-shared/csc108/data/moab/"):
data_dir = "/lustre/atlas/proj-shared/csc108/data/moab/"
elif os.path.isdir(os.path.join(cwd, "moab")):
data_dir = os.path.join(cwd, "moab")
else:
raise Exception("Data directory not found.")
# Create string to represent path to database file.
dbfilename = os.path.join(data_dir, "moab-data.sqlite")
# Open connection to the database (file).
connection = sqlite3.connect(dbfilename)
# Enable users to access columns by name instead of by index.
connection.row_factory = sqlite3.Row
# Ensure read-only access to the database
connection.execute("PRAGMA query_only = true;")
# Run custom analyis code.
analyze(connection)
# Commit any changes and close the connection to the database.
connection.commit()
connection.close()
###
if __name__ == "__main__":
main()
#- vim:set syntax=python:
|
[
"matplotlib.pyplot.plot",
"os.path.basename",
"os.getcwd",
"os.path.isdir",
"numpy.percentile",
"matplotlib.pyplot.figure",
"sqlite3.connect",
"os.path.splitext",
"os.path.join"
] |
[((2761, 2804), 'numpy.percentile', 'numpy.percentile', (['with_csc108', 'marks_to_use'], {}), '(with_csc108, marks_to_use)\n', (2777, 2804), False, 'import numpy\n'), ((2820, 2861), 'numpy.percentile', 'numpy.percentile', (['wo_csc108', 'marks_to_use'], {}), '(wo_csc108, marks_to_use)\n', (2836, 2861), False, 'import numpy\n'), ((2898, 2913), 'matplotlib.pyplot.figure', 'pyplot.figure', ([], {}), '()\n', (2911, 2913), True, 'import matplotlib.pyplot as pyplot\n'), ((2949, 2988), 'matplotlib.pyplot.plot', 'pyplot.plot', (['marks_with', 'marks_wo', '"""bo"""'], {}), "(marks_with, marks_wo, 'bo')\n", (2960, 2988), True, 'import matplotlib.pyplot as pyplot\n'), ((3201, 3227), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (3217, 3227), False, 'import os\n'), ((3368, 3379), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3377, 3379), False, 'import os\n'), ((3515, 3575), 'os.path.isdir', 'os.path.isdir', (['"""/lustre/atlas/proj-shared/csc108/data/moab/"""'], {}), "('/lustre/atlas/proj-shared/csc108/data/moab/')\n", (3528, 3575), False, 'import os\n'), ((3874, 3916), 'os.path.join', 'os.path.join', (['data_dir', '"""moab-data.sqlite"""'], {}), "(data_dir, 'moab-data.sqlite')\n", (3886, 3916), False, 'import os\n'), ((3980, 4007), 'sqlite3.connect', 'sqlite3.connect', (['dbfilename'], {}), '(dbfilename)\n', (3995, 4007), False, 'import sqlite3\n'), ((3665, 3690), 'os.path.join', 'os.path.join', (['cwd', '"""moab"""'], {}), "(cwd, 'moab')\n", (3677, 3690), False, 'import os\n'), ((3712, 3737), 'os.path.join', 'os.path.join', (['cwd', '"""moab"""'], {}), "(cwd, 'moab')\n", (3724, 3737), False, 'import os\n'), ((3244, 3276), 'os.path.splitext', 'os.path.splitext', (['current_script'], {}), '(current_script)\n', (3260, 3276), False, 'import os\n')]
|
import os
import ctypes
import numpy
from algopy.base_type import Ring
_ctps = numpy.ctypeslib.load_library('libctps', os.path.dirname(__file__))
double_ptr = ctypes.POINTER(ctypes.c_double)
argtypes1 = [ctypes.c_int, double_ptr, double_ptr, double_ptr]
_ctps.ctps_add.argtypes = argtypes1
_ctps.ctps_sub.argtypes = argtypes1
_ctps.ctps_mul.argtypes = argtypes1
_ctps.ctps_div.argtypes = argtypes1
class CTPS(Ring):
def __init__(self, data):
"""
CTPS = Cross Derivative Taylor Polynomial
Implements the factor ring R[t1,...,tK]/<t1^2,...,tK^2>
Calls C functions internally. I.e. functionality *should* be the same as for the class CTPS.
"""
self.data = numpy.array(data)
@classmethod
def __scalar_to_data__(cls, xdata, x):
xdata[0] = x
@classmethod
def __zeros_like__(cls, data):
return numpy.zeros_like(data)
@classmethod
def add(cls, retval_data, lhs_data, rhs_data):
K = retval_data.size
_ctps.ctps_add(K,
lhs_data.ctypes.data_as(double_ptr),
rhs_data.ctypes.data_as(double_ptr),
retval_data.ctypes.data_as(double_ptr))
@classmethod
def sub(cls, retval_data, lhs_data, rhs_data):
K = retval_data.size
_ctps.ctps_sub(K,
lhs_data.ctypes.data_as(double_ptr),
rhs_data.ctypes.data_as(double_ptr),
retval_data.ctypes.data_as(double_ptr))
@classmethod
def mul(cls, retval_data, lhs_data, rhs_data):
K = retval_data.size
_ctps.ctps_mul(K,
lhs_data.ctypes.data_as(double_ptr),
rhs_data.ctypes.data_as(double_ptr),
retval_data.ctypes.data_as(double_ptr))
@classmethod
def div(cls, retval_data, lhs_data, rhs_data):
K = retval_data.size
_ctps.ctps_div(K,
lhs_data.ctypes.data_as(double_ptr),
rhs_data.ctypes.data_as(double_ptr),
retval_data.ctypes.data_as(double_ptr))
def __repr__(self):
return self.__str__()
def __str__(self):
return str(self.data)
|
[
"os.path.dirname",
"numpy.zeros_like",
"numpy.array",
"ctypes.POINTER"
] |
[((163, 194), 'ctypes.POINTER', 'ctypes.POINTER', (['ctypes.c_double'], {}), '(ctypes.c_double)\n', (177, 194), False, 'import ctypes\n'), ((121, 146), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (136, 146), False, 'import os\n'), ((721, 738), 'numpy.array', 'numpy.array', (['data'], {}), '(data)\n', (732, 738), False, 'import numpy\n'), ((905, 927), 'numpy.zeros_like', 'numpy.zeros_like', (['data'], {}), '(data)\n', (921, 927), False, 'import numpy\n')]
|
from __future__ import print_function
import argparse
import os
import h5py
import numpy as np
import sys
from molecules.model import MoleculeVAE
from molecules.utils import one_hot_array, one_hot_index, from_one_hot_array, \
decode_smiles_from_indexes, load_dataset
from pylab import figure, axes, scatter, title, show
from rdkit import Chem
from rdkit.Chem import Draw
LATENT_DIM = 292
TARGET = 'autoencoder'
def get_arguments():
parser = argparse.ArgumentParser(description='Molecular autoencoder network')
parser.add_argument('data', type=str, help='File of latent representation tensors for decoding.')
parser.add_argument('model', type=str, help='Trained Keras model to use.')
parser.add_argument('--save_h5', type=str, help='Name of a file to write HDF5 output to.')
parser.add_argument('--target', type=str, default=TARGET,
help='What model to sample from: autoencoder, encoder, decoder.')
parser.add_argument('--latent_dim', type=int, metavar='N', default=LATENT_DIM,
help='Dimensionality of the latent representation.')
return parser.parse_args()
def read_latent_data(filename):
h5f = h5py.File(filename, 'r')
data = h5f['latent_vectors'][:]
charset = h5f['charset'][:]
h5f.close()
return (data, charset)
def autoencoder(args, model):
latent_dim = args.latent_dim
data, charset = load_dataset(args.data, split = False)
if os.path.isfile(args.model):
model.load(charset, args.model, latent_rep_size = latent_dim)
else:
raise ValueError("Model file %s doesn't exist" % args.model)
sampled = model.autoencoder.predict(data[0].reshape(1, 120, len(charset))).argmax(axis=2)[0]
mol = decode_smiles_from_indexes(map(from_one_hot_array, data[0]), charset)
sampled = decode_smiles_from_indexes(sampled, charset)
print(mol)
print(sampled)
def decoder(args, model):
latent_dim = args.latent_dim
data, charset = read_latent_data(args.data)
if os.path.isfile(args.model):
model.load(charset, args.model, latent_rep_size = latent_dim)
else:
raise ValueError("Model file %s doesn't exist" % args.model)
sampled = model.decoder.predict(data[0].reshape(1, latent_dim)).argmax(axis=2)[0]
sampled = decode_smiles_from_indexes(sampled, charset)
print(sampled)
def encoder(args, model):
latent_dim = args.latent_dim
data, charset = load_dataset(args.data, split = False)
if os.path.isfile(args.model):
model.load(charset, args.model, latent_rep_size = latent_dim)
else:
raise ValueError("Model file %s doesn't exist" % args.model)
x_latent = model.encoder.predict(data)
if args.save_h5:
h5f = h5py.File(args.save_h5, 'w')
h5f.create_dataset('charset', data = charset)
h5f.create_dataset('latent_vectors', data = x_latent)
h5f.close()
else:
np.savetxt(sys.stdout, x_latent, delimiter = '\t')
def main():
args = get_arguments()
model = MoleculeVAE()
if args.target == 'autoencoder':
autoencoder(args, model)
elif args.target == 'encoder':
encoder(args, model)
elif args.target == 'decoder':
decoder(args, model)
if __name__ == '__main__':
main()
|
[
"h5py.File",
"argparse.ArgumentParser",
"molecules.model.MoleculeVAE",
"numpy.savetxt",
"molecules.utils.decode_smiles_from_indexes",
"os.path.isfile",
"molecules.utils.load_dataset"
] |
[((455, 523), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Molecular autoencoder network"""'}), "(description='Molecular autoencoder network')\n", (478, 523), False, 'import argparse\n'), ((1186, 1210), 'h5py.File', 'h5py.File', (['filename', '"""r"""'], {}), "(filename, 'r')\n", (1195, 1210), False, 'import h5py\n'), ((1407, 1443), 'molecules.utils.load_dataset', 'load_dataset', (['args.data'], {'split': '(False)'}), '(args.data, split=False)\n', (1419, 1443), False, 'from molecules.utils import one_hot_array, one_hot_index, from_one_hot_array, decode_smiles_from_indexes, load_dataset\n'), ((1454, 1480), 'os.path.isfile', 'os.path.isfile', (['args.model'], {}), '(args.model)\n', (1468, 1480), False, 'import os\n'), ((1823, 1867), 'molecules.utils.decode_smiles_from_indexes', 'decode_smiles_from_indexes', (['sampled', 'charset'], {}), '(sampled, charset)\n', (1849, 1867), False, 'from molecules.utils import one_hot_array, one_hot_index, from_one_hot_array, decode_smiles_from_indexes, load_dataset\n'), ((2018, 2044), 'os.path.isfile', 'os.path.isfile', (['args.model'], {}), '(args.model)\n', (2032, 2044), False, 'import os\n'), ((2296, 2340), 'molecules.utils.decode_smiles_from_indexes', 'decode_smiles_from_indexes', (['sampled', 'charset'], {}), '(sampled, charset)\n', (2322, 2340), False, 'from molecules.utils import one_hot_array, one_hot_index, from_one_hot_array, decode_smiles_from_indexes, load_dataset\n'), ((2440, 2476), 'molecules.utils.load_dataset', 'load_dataset', (['args.data'], {'split': '(False)'}), '(args.data, split=False)\n', (2452, 2476), False, 'from molecules.utils import one_hot_array, one_hot_index, from_one_hot_array, decode_smiles_from_indexes, load_dataset\n'), ((2487, 2513), 'os.path.isfile', 'os.path.isfile', (['args.model'], {}), '(args.model)\n', (2501, 2513), False, 'import os\n'), ((3029, 3042), 'molecules.model.MoleculeVAE', 'MoleculeVAE', ([], {}), '()\n', (3040, 3042), False, 'from molecules.model import MoleculeVAE\n'), ((2743, 2771), 'h5py.File', 'h5py.File', (['args.save_h5', '"""w"""'], {}), "(args.save_h5, 'w')\n", (2752, 2771), False, 'import h5py\n'), ((2926, 2974), 'numpy.savetxt', 'np.savetxt', (['sys.stdout', 'x_latent'], {'delimiter': '"""\t"""'}), "(sys.stdout, x_latent, delimiter='\\t')\n", (2936, 2974), True, 'import numpy as np\n')]
|
from random import randint
from flask import Flask, request, jsonify, redirect, make_response
app = Flask(__name__)
auth = randint(100, 50000)
@app.route('/get-auth', methods=['POST'])
def get_auth_cookie():
req = request.get_json()
if req['pass'] == '<PASSWORD>':
res = make_response(jsonify({'auth': str(auth)}))
res.set_cookie('auth', str(auth))
else:
res = make_response(jsonify({'erro': 'nao autorizado'}), 401)
res.set_cookie('auth', '0')
return res
@app.route('/get-complex-object', methods=['GET'])
def get_complex_object():
print(bool(request.args.get('returnObject')))
if bool(request.args.get('returnObject')):
return_object = {
"complexObj":
[
{
"id": "0001",
"type": "donut",
"name": "Cake",
"ppu": 0.55,
"batters":
{
"batter":
[
{"id": "1001", "type": "Regular"},
{"id": "1002", "type": "Chocolate"},
{"id": "1003", "type": "Blueberry"},
{"id": "1004", "type": "Devil's Food"}
]
},
"topping":
[
{"id": "5001", "type": "None"},
{"id": "5002", "type": "Glazed"},
{"id": "5005", "type": "Sugar"},
{"id": "5007", "type": "Powdered Sugar"},
{"id": "5006", "type": "Chocolate with Sprinkles"},
{"id": "5003", "type": "Chocolate"},
{"id": "5004", "type": "Maple"}
]
},
{
"id": "0002",
"type": "donut",
"name": "Raised",
"ppu": 0.55,
"batters":
{
"batter":
[
{"id": "1001", "type": "Regular"}
]
},
"topping":
[
{"id": "5001", "type": "None"},
{"id": "5002", "type": "Glazed"},
{"id": "5005", "type": "Sugar"},
{"id": "5003", "type": "Chocolate"},
{"id": "5004", "type": "Maple"}
]
},
{
"id": "0003",
"type": "donut",
"name": "Old Fashioned",
"ppu": 0.55,
"batters":
{
"batter":
[
{"id": "1001", "type": "Regular"},
{"id": "1002", "type": "Chocolate"}
]
},
"topping":
[
{"id": "5001", "type": "None"},
{"id": "5002", "type": "Glazed"},
{"id": "5003", "type": "Chocolate"},
{"id": "5004", "type": "Maple"}
]
}
]
}
return jsonify(return_object)
return jsonify({"erro": "erro"})
@app.route('/nao-autorizado-param', methods=['GET'])
def get_redirect():
if request.args.get('auth') and int(request.args.get('auth')) == auth:
return jsonify({'redirected': False})
return redirect("http://localhost:5000/redirected", code=302)
@app.route('/nao-autorizado-cookie', methods=['GET'])
def get_redirect_cookie():
if 'auth' in request.cookies and request.cookies['auth'] == str(auth):
return jsonify({'redirected': False})
return redirect("http://localhost:5000/redirected", code=302)
@app.route('/redirected', methods=['GET'])
def redirected():
return jsonify([{"redirected": True}])
if __name__ == '__main__':
app.run(debug=True, host="0.0.0.0") # run app in debug mode on port 5000.
|
[
"random.randint",
"flask.redirect",
"flask.request.args.get",
"flask.Flask",
"flask.jsonify",
"flask.request.get_json"
] |
[((102, 117), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (107, 117), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((126, 145), 'random.randint', 'randint', (['(100)', '(50000)'], {}), '(100, 50000)\n', (133, 145), False, 'from random import randint\n'), ((223, 241), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (239, 241), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((3983, 4008), 'flask.jsonify', 'jsonify', (["{'erro': 'erro'}"], {}), "({'erro': 'erro'})\n", (3990, 4008), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((4216, 4270), 'flask.redirect', 'redirect', (['"""http://localhost:5000/redirected"""'], {'code': '(302)'}), "('http://localhost:5000/redirected', code=302)\n", (4224, 4270), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((4486, 4540), 'flask.redirect', 'redirect', (['"""http://localhost:5000/redirected"""'], {'code': '(302)'}), "('http://localhost:5000/redirected', code=302)\n", (4494, 4540), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((4615, 4646), 'flask.jsonify', 'jsonify', (["[{'redirected': True}]"], {}), "([{'redirected': True}])\n", (4622, 4646), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((650, 682), 'flask.request.args.get', 'request.args.get', (['"""returnObject"""'], {}), "('returnObject')\n", (666, 682), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((3949, 3971), 'flask.jsonify', 'jsonify', (['return_object'], {}), '(return_object)\n', (3956, 3971), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((4091, 4115), 'flask.request.args.get', 'request.args.get', (['"""auth"""'], {}), "('auth')\n", (4107, 4115), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((4174, 4204), 'flask.jsonify', 'jsonify', (["{'redirected': False}"], {}), "({'redirected': False})\n", (4181, 4204), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((4444, 4474), 'flask.jsonify', 'jsonify', (["{'redirected': False}"], {}), "({'redirected': False})\n", (4451, 4474), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((416, 451), 'flask.jsonify', 'jsonify', (["{'erro': 'nao autorizado'}"], {}), "({'erro': 'nao autorizado'})\n", (423, 451), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((603, 635), 'flask.request.args.get', 'request.args.get', (['"""returnObject"""'], {}), "('returnObject')\n", (619, 635), False, 'from flask import Flask, request, jsonify, redirect, make_response\n'), ((4124, 4148), 'flask.request.args.get', 'request.args.get', (['"""auth"""'], {}), "('auth')\n", (4140, 4148), False, 'from flask import Flask, request, jsonify, redirect, make_response\n')]
|
from django.conf.urls import url
from bfs.views import *
urlpatterns = [
url(r'^', index),
]
|
[
"django.conf.urls.url"
] |
[((79, 94), 'django.conf.urls.url', 'url', (['"""^"""', 'index'], {}), "('^', index)\n", (82, 94), False, 'from django.conf.urls import url\n')]
|
# Copyright 2012-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import copy
import os
import sys
from unittest import mock
import testtools
from osc_lib import shell
from osc_lib.tests import utils
DEFAULT_AUTH_URL = "http://127.0.0.1:5000/v2.0/"
DEFAULT_PROJECT_ID = "xxxx-yyyy-zzzz"
DEFAULT_PROJECT_NAME = "project"
DEFAULT_DOMAIN_ID = "aaaa-bbbb-cccc"
DEFAULT_DOMAIN_NAME = "default"
DEFAULT_USER_DOMAIN_ID = "aaaa-bbbb-cccc"
DEFAULT_USER_DOMAIN_NAME = "domain"
DEFAULT_PROJECT_DOMAIN_ID = "aaaa-bbbb-cccc"
DEFAULT_PROJECT_DOMAIN_NAME = "domain"
DEFAULT_USERNAME = "username"
DEFAULT_PASSWORD = "password"
DEFAULT_CLOUD = "altocumulus"
DEFAULT_REGION_NAME = "ZZ9_Plural_Z_Alpha"
DEFAULT_TOKEN = "token"
DEFAULT_SERVICE_URL = "http://127.0.0.1:8771/v3.0/"
DEFAULT_AUTH_PLUGIN = "v2password"
DEFAULT_INTERFACE = "internal"
DEFAULT_COMPUTE_API_VERSION = ""
DEFAULT_IDENTITY_API_VERSION = ""
DEFAULT_IMAGE_API_VERSION = ""
DEFAULT_VOLUME_API_VERSION = ""
DEFAULT_NETWORK_API_VERSION = ""
LIB_COMPUTE_API_VERSION = ""
LIB_IDENTITY_API_VERSION = ""
LIB_IMAGE_API_VERSION = ""
LIB_VOLUME_API_VERSION = ""
LIB_NETWORK_API_VERSION = ""
CLOUD_1 = {
'clouds': {
'scc': {
'auth': {
'auth_url': DEFAULT_AUTH_URL,
'project_name': DEFAULT_PROJECT_NAME,
'username': 'zaphod',
},
'region_name': 'occ-cloud,krikkit',
'donut': 'glazed',
'interface': 'public',
}
}
}
CLOUD_2 = {
'clouds': {
'megacloud': {
'cloud': 'megadodo',
'auth': {
'project_name': 'heart-o-gold',
'username': 'zaphod',
},
'region_name': 'occ-cloud,krikkit,occ-env',
'log_file': '/tmp/test_log_file',
'log_level': 'debug',
'cert': 'mycert',
'key': 'mickey',
}
}
}
PUBLIC_1 = {
'public-clouds': {
'megadodo': {
'auth': {
'auth_url': DEFAULT_AUTH_URL,
'project_name': DEFAULT_PROJECT_NAME,
},
'region_name': 'occ-public',
'donut': 'cake',
}
}
}
# The option table values is a tuple of (<value>, <test-opt>, <test-env>)
# where <value> is the test value to use, <test-opt> is True if this option
# should be tested as a CLI option and <test-env> is True of this option
# should be tested as an environment variable.
# Global options that should be parsed before shell.initialize_app() is called
global_options = {
'--os-cloud': (DEFAULT_CLOUD, True, True),
'--os-region-name': (DEFAULT_REGION_NAME, True, True),
'--os-default-domain': (DEFAULT_DOMAIN_NAME, True, True),
'--os-cacert': ('/dev/null', True, True),
'--timing': (True, True, False),
'--os-interface': (DEFAULT_INTERFACE, True, True)
}
if shell.osprofiler_profiler:
global_options['--os-profile'] = ('SECRET_KEY', True, True)
class TestShellArgV(utils.TestShell):
"""Test the deferred help flag"""
def setUp(self):
super(TestShellArgV, self).setUp()
def test_shell_argv(self):
"""Test argv decoding
Python 2 does nothing with argv while Python 3 decodes it into
Unicode before we ever see it. We manually decode when running
under Python 2 so verify that we get the right argv types.
Use the argv supplied by the test runner so we get actual Python
runtime behaviour; we only need to check the type of argv[0]
which will alwyas be present.
"""
with mock.patch(
"osc_lib.shell.OpenStackShell.run",
self.app,
):
# Ensure type gets through unmolested through shell.main()
argv = sys.argv
shell.main(sys.argv)
self.assertEqual(type(argv[0]), type(self.app.call_args[0][0][0]))
# When shell.main() gets sys.argv itself it should be decoded
shell.main()
self.assertEqual(type(u'x'), type(self.app.call_args[0][0][0]))
class TestShellHelp(utils.TestShell):
"""Test the deferred help flag"""
def setUp(self):
super(TestShellHelp, self).setUp()
self.useFixture(utils.EnvFixture())
@testtools.skip("skip until bug 1444983 is resolved")
def test_help_options(self):
flag = "-h list server"
kwargs = {
"deferred_help": True,
}
with mock.patch(self.app_patch + ".initialize_app", self.app):
_shell, _cmd = utils.make_shell(), flag
utils.fake_execute(_shell, _cmd)
self.assertEqual(
kwargs["deferred_help"],
_shell.options.deferred_help,
)
class TestShellOptions(utils.TestShell):
"""Test the option handling by argparse and openstack.config.loader
This covers getting the CLI options through the initial processing
and validates the arguments to initialize_app() and occ_get_one()
"""
def setUp(self):
super(TestShellOptions, self).setUp()
self.useFixture(utils.EnvFixture())
def test_empty_auth(self):
os.environ = {}
self._assert_initialize_app_arg("", {})
self._assert_cloud_region_arg("", {})
def test_no_options(self):
os.environ = {}
self._assert_initialize_app_arg("", {})
self._assert_cloud_region_arg("", {})
def test_global_options(self):
self._test_options_init_app(global_options)
self._test_options_get_one_cloud(global_options)
def test_global_env(self):
self._test_env_init_app(global_options)
self._test_env_get_one_cloud(global_options)
class TestShellCli(utils.TestShell):
"""Test handling of specific global options
_shell.options is the parsed command line from argparse
_shell.client_manager.* are the values actually used
"""
def setUp(self):
super(TestShellCli, self).setUp()
env = {}
self.useFixture(utils.EnvFixture(env.copy()))
def test_shell_args_no_options(self):
_shell = utils.make_shell()
with mock.patch(
"osc_lib.shell.OpenStackShell.initialize_app",
self.app,
):
utils.fake_execute(_shell, "list user")
self.app.assert_called_with(["list", "user"])
def test_shell_args_tls_options(self):
"""Test the TLS verify and CA cert file options"""
_shell = utils.make_shell()
# Default
utils.fake_execute(_shell, "module list")
self.assertIsNone(_shell.options.verify)
self.assertIsNone(_shell.options.insecure)
self.assertIsNone(_shell.options.cacert)
self.assertTrue(_shell.client_manager.verify)
self.assertIsNone(_shell.client_manager.cacert)
# --verify
utils.fake_execute(_shell, "--verify module list")
self.assertTrue(_shell.options.verify)
self.assertIsNone(_shell.options.insecure)
self.assertIsNone(_shell.options.cacert)
self.assertTrue(_shell.client_manager.verify)
self.assertIsNone(_shell.client_manager.cacert)
# --insecure
utils.fake_execute(_shell, "--insecure module list")
self.assertIsNone(_shell.options.verify)
self.assertTrue(_shell.options.insecure)
self.assertIsNone(_shell.options.cacert)
self.assertFalse(_shell.client_manager.verify)
self.assertIsNone(_shell.client_manager.cacert)
# --os-cacert
utils.fake_execute(_shell, "--os-cacert foo module list")
self.assertIsNone(_shell.options.verify)
self.assertIsNone(_shell.options.insecure)
self.assertEqual('foo', _shell.options.cacert)
self.assertEqual('foo', _shell.client_manager.verify)
self.assertEqual('foo', _shell.client_manager.cacert)
# --os-cacert and --verify
utils.fake_execute(_shell, "--os-cacert foo --verify module list")
self.assertTrue(_shell.options.verify)
self.assertIsNone(_shell.options.insecure)
self.assertEqual('foo', _shell.options.cacert)
self.assertEqual('foo', _shell.client_manager.verify)
self.assertEqual('foo', _shell.client_manager.cacert)
# --os-cacert and --insecure
# NOTE(dtroyer): Per bug https://bugs.launchpad.net/bugs/1447784
# in this combination --insecure now overrides any
# --os-cacert setting, where before --insecure
# was ignored if --os-cacert was set.
utils.fake_execute(_shell, "--os-cacert foo --insecure module list")
self.assertIsNone(_shell.options.verify)
self.assertTrue(_shell.options.insecure)
self.assertEqual('foo', _shell.options.cacert)
self.assertFalse(_shell.client_manager.verify)
self.assertIsNone(_shell.client_manager.cacert)
def test_shell_args_cert_options(self):
"""Test client cert options"""
_shell = utils.make_shell()
# Default
utils.fake_execute(_shell, "module list")
self.assertEqual('', _shell.options.cert)
self.assertEqual('', _shell.options.key)
self.assertIsNone(_shell.client_manager.cert)
# --os-cert
utils.fake_execute(_shell, "--os-cert mycert module list")
self.assertEqual('mycert', _shell.options.cert)
self.assertEqual('', _shell.options.key)
self.assertEqual('mycert', _shell.client_manager.cert)
# --os-key
utils.fake_execute(_shell, "--os-key mickey module list")
self.assertEqual('', _shell.options.cert)
self.assertEqual('mickey', _shell.options.key)
self.assertIsNone(_shell.client_manager.cert)
# --os-cert and --os-key
utils.fake_execute(
_shell,
"--os-cert mycert --os-key mickey module list"
)
self.assertEqual('mycert', _shell.options.cert)
self.assertEqual('mickey', _shell.options.key)
self.assertEqual(('mycert', 'mickey'), _shell.client_manager.cert)
@mock.patch("openstack.config.loader.OpenStackConfig._load_config_file")
def test_shell_args_cloud_no_vendor(self, config_mock):
"""Test cloud config options without the vendor file"""
config_mock.return_value = ('file.yaml', copy.deepcopy(CLOUD_1))
_shell = utils.make_shell()
utils.fake_execute(
_shell,
"--os-cloud scc module list",
)
self.assertEqual(
'scc',
_shell.cloud.name,
)
# These come from clouds.yaml
self.assertEqual(
DEFAULT_AUTH_URL,
_shell.cloud.config['auth']['auth_url'],
)
self.assertEqual(
DEFAULT_PROJECT_NAME,
_shell.cloud.config['auth']['project_name'],
)
self.assertEqual(
'zaphod',
_shell.cloud.config['auth']['username'],
)
self.assertEqual(
'occ-cloud',
_shell.cloud.config['region_name'],
)
self.assertEqual(
'occ-cloud',
_shell.client_manager.region_name,
)
self.assertEqual(
'glazed',
_shell.cloud.config['donut'],
)
self.assertEqual(
'public',
_shell.cloud.config['interface'],
)
self.assertIsNone(_shell.cloud.config['cert'])
self.assertIsNone(_shell.cloud.config['key'])
self.assertIsNone(_shell.client_manager.cert)
@mock.patch("openstack.config.loader.OpenStackConfig._load_vendor_file")
@mock.patch("openstack.config.loader.OpenStackConfig._load_config_file")
def test_shell_args_cloud_public(self, config_mock, public_mock):
"""Test cloud config options with the vendor file"""
config_mock.return_value = ('file.yaml', copy.deepcopy(CLOUD_2))
public_mock.return_value = ('file.yaml', copy.deepcopy(PUBLIC_1))
_shell = utils.make_shell()
utils.fake_execute(
_shell,
"--os-cloud megacloud module list",
)
self.assertEqual(
'megacloud',
_shell.cloud.name,
)
# These come from clouds-public.yaml
self.assertEqual(
DEFAULT_AUTH_URL,
_shell.cloud.config['auth']['auth_url'],
)
self.assertEqual(
'cake',
_shell.cloud.config['donut'],
)
# These come from clouds.yaml
self.assertEqual(
'heart-o-gold',
_shell.cloud.config['auth']['project_name'],
)
self.assertEqual(
'zaphod',
_shell.cloud.config['auth']['username'],
)
self.assertEqual(
'occ-cloud',
_shell.cloud.config['region_name'],
)
self.assertEqual(
'occ-cloud',
_shell.client_manager.region_name,
)
self.assertEqual('mycert', _shell.cloud.config['cert'])
self.assertEqual('mickey', _shell.cloud.config['key'])
self.assertEqual(('mycert', 'mickey'), _shell.client_manager.cert)
@mock.patch("openstack.config.loader.OpenStackConfig._load_vendor_file")
@mock.patch("openstack.config.loader.OpenStackConfig._load_config_file")
def test_shell_args_precedence(self, config_mock, vendor_mock):
config_mock.return_value = ('file.yaml', copy.deepcopy(CLOUD_2))
vendor_mock.return_value = ('file.yaml', copy.deepcopy(PUBLIC_1))
_shell = utils.make_shell()
# Test command option overriding config file value
utils.fake_execute(
_shell,
"--os-cloud megacloud --os-region-name krikkit module list",
)
self.assertEqual(
'megacloud',
_shell.cloud.name,
)
# These come from clouds-public.yaml
self.assertEqual(
DEFAULT_AUTH_URL,
_shell.cloud.config['auth']['auth_url'],
)
self.assertEqual(
'cake',
_shell.cloud.config['donut'],
)
# These come from clouds.yaml
self.assertEqual(
'heart-o-gold',
_shell.cloud.config['auth']['project_name'],
)
self.assertEqual(
'zaphod',
_shell.cloud.config['auth']['username'],
)
self.assertEqual(
'krikkit',
_shell.cloud.config['region_name'],
)
self.assertEqual(
'krikkit',
_shell.client_manager.region_name,
)
class TestShellCliPrecedence(utils.TestShell):
"""Test option precedencr order"""
def setUp(self):
super(TestShellCliPrecedence, self).setUp()
env = {
'OS_CLOUD': 'megacloud',
'OS_REGION_NAME': 'occ-env',
}
self.useFixture(utils.EnvFixture(env.copy()))
@mock.patch("openstack.config.loader.OpenStackConfig._load_vendor_file")
@mock.patch("openstack.config.loader.OpenStackConfig._load_config_file")
def test_shell_args_precedence_1(self, config_mock, vendor_mock):
"""Test environment overriding occ"""
config_mock.return_value = ('file.yaml', copy.deepcopy(CLOUD_2))
vendor_mock.return_value = ('file.yaml', copy.deepcopy(PUBLIC_1))
_shell = utils.make_shell()
# Test env var
utils.fake_execute(
_shell,
"module list",
)
self.assertEqual(
'megacloud',
_shell.cloud.name,
)
# These come from clouds-public.yaml
self.assertEqual(
DEFAULT_AUTH_URL,
_shell.cloud.config['auth']['auth_url'],
)
self.assertEqual(
'cake',
_shell.cloud.config['donut'],
)
# These come from clouds.yaml
self.assertEqual(
'heart-o-gold',
_shell.cloud.config['auth']['project_name'],
)
self.assertEqual(
'zaphod',
_shell.cloud.config['auth']['username'],
)
# These come from the environment
self.assertEqual(
'occ-env',
_shell.cloud.config['region_name'],
)
self.assertEqual(
'occ-env',
_shell.client_manager.region_name,
)
@mock.patch("openstack.config.loader.OpenStackConfig._load_vendor_file")
@mock.patch("openstack.config.loader.OpenStackConfig._load_config_file")
def test_shell_args_precedence_2(self, config_mock, vendor_mock):
"""Test command line overriding environment and occ"""
config_mock.return_value = ('file.yaml', copy.deepcopy(CLOUD_2))
vendor_mock.return_value = ('file.yaml', copy.deepcopy(PUBLIC_1))
_shell = utils.make_shell()
# Test command option overriding config file value
utils.fake_execute(
_shell,
"--os-region-name krikkit list user",
)
self.assertEqual(
'megacloud',
_shell.cloud.name,
)
# These come from clouds-public.yaml
self.assertEqual(
DEFAULT_AUTH_URL,
_shell.cloud.config['auth']['auth_url'],
)
self.assertEqual(
'cake',
_shell.cloud.config['donut'],
)
# These come from clouds.yaml
self.assertEqual(
'heart-o-gold',
_shell.cloud.config['auth']['project_name'],
)
self.assertEqual(
'zaphod',
_shell.cloud.config['auth']['username'],
)
# These come from the command line
self.assertEqual(
'krikkit',
_shell.cloud.config['region_name'],
)
self.assertEqual(
'krikkit',
_shell.client_manager.region_name,
)
@mock.patch("openstack.config.loader.OpenStackConfig._load_vendor_file")
@mock.patch("openstack.config.loader.OpenStackConfig._load_config_file")
def test_shell_args_precedence_3(self, config_mock, vendor_mock):
"""Test command line overriding environment and occ"""
config_mock.return_value = ('file.yaml', copy.deepcopy(CLOUD_1))
vendor_mock.return_value = ('file.yaml', copy.deepcopy(PUBLIC_1))
_shell = utils.make_shell()
# Test command option overriding config file value
utils.fake_execute(
_shell,
"--os-cloud scc --os-region-name krikkit list user",
)
self.assertEqual(
'scc',
_shell.cloud.name,
)
# These come from clouds-public.yaml
self.assertEqual(
DEFAULT_AUTH_URL,
_shell.cloud.config['auth']['auth_url'],
)
self.assertEqual(
'glazed',
_shell.cloud.config['donut'],
)
# These come from clouds.yaml
self.assertEqual(
DEFAULT_PROJECT_NAME,
_shell.cloud.config['auth']['project_name'],
)
self.assertEqual(
'zaphod',
_shell.cloud.config['auth']['username'],
)
# These come from the command line
self.assertEqual(
'krikkit',
_shell.cloud.config['region_name'],
)
self.assertEqual(
'krikkit',
_shell.client_manager.region_name,
)
|
[
"copy.deepcopy",
"osc_lib.tests.utils.fake_execute",
"unittest.mock.patch",
"osc_lib.tests.utils.EnvFixture",
"osc_lib.shell.main",
"testtools.skip",
"osc_lib.tests.utils.make_shell"
] |
[((4820, 4872), 'testtools.skip', 'testtools.skip', (['"""skip until bug 1444983 is resolved"""'], {}), "('skip until bug 1444983 is resolved')\n", (4834, 4872), False, 'import testtools\n'), ((10663, 10734), 'unittest.mock.patch', 'mock.patch', (['"""openstack.config.loader.OpenStackConfig._load_config_file"""'], {}), "('openstack.config.loader.OpenStackConfig._load_config_file')\n", (10673, 10734), False, 'from unittest import mock\n'), ((12142, 12213), 'unittest.mock.patch', 'mock.patch', (['"""openstack.config.loader.OpenStackConfig._load_vendor_file"""'], {}), "('openstack.config.loader.OpenStackConfig._load_vendor_file')\n", (12152, 12213), False, 'from unittest import mock\n'), ((12219, 12290), 'unittest.mock.patch', 'mock.patch', (['"""openstack.config.loader.OpenStackConfig._load_config_file"""'], {}), "('openstack.config.loader.OpenStackConfig._load_config_file')\n", (12229, 12290), False, 'from unittest import mock\n'), ((13764, 13835), 'unittest.mock.patch', 'mock.patch', (['"""openstack.config.loader.OpenStackConfig._load_vendor_file"""'], {}), "('openstack.config.loader.OpenStackConfig._load_vendor_file')\n", (13774, 13835), False, 'from unittest import mock\n'), ((13841, 13912), 'unittest.mock.patch', 'mock.patch', (['"""openstack.config.loader.OpenStackConfig._load_config_file"""'], {}), "('openstack.config.loader.OpenStackConfig._load_config_file')\n", (13851, 13912), False, 'from unittest import mock\n'), ((15520, 15591), 'unittest.mock.patch', 'mock.patch', (['"""openstack.config.loader.OpenStackConfig._load_vendor_file"""'], {}), "('openstack.config.loader.OpenStackConfig._load_vendor_file')\n", (15530, 15591), False, 'from unittest import mock\n'), ((15597, 15668), 'unittest.mock.patch', 'mock.patch', (['"""openstack.config.loader.OpenStackConfig._load_config_file"""'], {}), "('openstack.config.loader.OpenStackConfig._load_config_file')\n", (15607, 15668), False, 'from unittest import mock\n'), ((16965, 17036), 'unittest.mock.patch', 'mock.patch', (['"""openstack.config.loader.OpenStackConfig._load_vendor_file"""'], {}), "('openstack.config.loader.OpenStackConfig._load_vendor_file')\n", (16975, 17036), False, 'from unittest import mock\n'), ((17042, 17113), 'unittest.mock.patch', 'mock.patch', (['"""openstack.config.loader.OpenStackConfig._load_config_file"""'], {}), "('openstack.config.loader.OpenStackConfig._load_config_file')\n", (17052, 17113), False, 'from unittest import mock\n'), ((18487, 18558), 'unittest.mock.patch', 'mock.patch', (['"""openstack.config.loader.OpenStackConfig._load_vendor_file"""'], {}), "('openstack.config.loader.OpenStackConfig._load_vendor_file')\n", (18497, 18558), False, 'from unittest import mock\n'), ((18564, 18635), 'unittest.mock.patch', 'mock.patch', (['"""openstack.config.loader.OpenStackConfig._load_config_file"""'], {}), "('openstack.config.loader.OpenStackConfig._load_config_file')\n", (18574, 18635), False, 'from unittest import mock\n'), ((6666, 6684), 'osc_lib.tests.utils.make_shell', 'utils.make_shell', ([], {}), '()\n', (6682, 6684), False, 'from osc_lib.tests import utils\n'), ((7040, 7058), 'osc_lib.tests.utils.make_shell', 'utils.make_shell', ([], {}), '()\n', (7056, 7058), False, 'from osc_lib.tests import utils\n'), ((7086, 7127), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""module list"""'], {}), "(_shell, 'module list')\n", (7104, 7127), False, 'from osc_lib.tests import utils\n'), ((7415, 7465), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--verify module list"""'], {}), "(_shell, '--verify module list')\n", (7433, 7465), False, 'from osc_lib.tests import utils\n'), ((7753, 7805), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--insecure module list"""'], {}), "(_shell, '--insecure module list')\n", (7771, 7805), False, 'from osc_lib.tests import utils\n'), ((8095, 8152), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--os-cacert foo module list"""'], {}), "(_shell, '--os-cacert foo module list')\n", (8113, 8152), False, 'from osc_lib.tests import utils\n'), ((8476, 8542), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--os-cacert foo --verify module list"""'], {}), "(_shell, '--os-cacert foo --verify module list')\n", (8494, 8542), False, 'from osc_lib.tests import utils\n'), ((9144, 9212), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--os-cacert foo --insecure module list"""'], {}), "(_shell, '--os-cacert foo --insecure module list')\n", (9162, 9212), False, 'from osc_lib.tests import utils\n'), ((9578, 9596), 'osc_lib.tests.utils.make_shell', 'utils.make_shell', ([], {}), '()\n', (9594, 9596), False, 'from osc_lib.tests import utils\n'), ((9624, 9665), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""module list"""'], {}), "(_shell, 'module list')\n", (9642, 9665), False, 'from osc_lib.tests import utils\n'), ((9848, 9906), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--os-cert mycert module list"""'], {}), "(_shell, '--os-cert mycert module list')\n", (9866, 9906), False, 'from osc_lib.tests import utils\n'), ((10103, 10160), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--os-key mickey module list"""'], {}), "(_shell, '--os-key mickey module list')\n", (10121, 10160), False, 'from osc_lib.tests import utils\n'), ((10362, 10436), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--os-cert mycert --os-key mickey module list"""'], {}), "(_shell, '--os-cert mycert --os-key mickey module list')\n", (10380, 10436), False, 'from osc_lib.tests import utils\n'), ((10949, 10967), 'osc_lib.tests.utils.make_shell', 'utils.make_shell', ([], {}), '()\n', (10965, 10967), False, 'from osc_lib.tests import utils\n'), ((10977, 11033), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--os-cloud scc module list"""'], {}), "(_shell, '--os-cloud scc module list')\n", (10995, 11033), False, 'from osc_lib.tests import utils\n'), ((12586, 12604), 'osc_lib.tests.utils.make_shell', 'utils.make_shell', ([], {}), '()\n', (12602, 12604), False, 'from osc_lib.tests import utils\n'), ((12614, 12676), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--os-cloud megacloud module list"""'], {}), "(_shell, '--os-cloud megacloud module list')\n", (12632, 12676), False, 'from osc_lib.tests import utils\n'), ((14145, 14163), 'osc_lib.tests.utils.make_shell', 'utils.make_shell', ([], {}), '()\n', (14161, 14163), False, 'from osc_lib.tests import utils\n'), ((14232, 14323), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--os-cloud megacloud --os-region-name krikkit module list"""'], {}), "(_shell,\n '--os-cloud megacloud --os-region-name krikkit module list')\n", (14250, 14323), False, 'from osc_lib.tests import utils\n'), ((15949, 15967), 'osc_lib.tests.utils.make_shell', 'utils.make_shell', ([], {}), '()\n', (15965, 15967), False, 'from osc_lib.tests import utils\n'), ((16000, 16041), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""module list"""'], {}), "(_shell, 'module list')\n", (16018, 16041), False, 'from osc_lib.tests import utils\n'), ((17411, 17429), 'osc_lib.tests.utils.make_shell', 'utils.make_shell', ([], {}), '()\n', (17427, 17429), False, 'from osc_lib.tests import utils\n'), ((17498, 17562), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--os-region-name krikkit list user"""'], {}), "(_shell, '--os-region-name krikkit list user')\n", (17516, 17562), False, 'from osc_lib.tests import utils\n'), ((18933, 18951), 'osc_lib.tests.utils.make_shell', 'utils.make_shell', ([], {}), '()\n', (18949, 18951), False, 'from osc_lib.tests import utils\n'), ((19020, 19099), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""--os-cloud scc --os-region-name krikkit list user"""'], {}), "(_shell, '--os-cloud scc --os-region-name krikkit list user')\n", (19038, 19099), False, 'from osc_lib.tests import utils\n'), ((4139, 4195), 'unittest.mock.patch', 'mock.patch', (['"""osc_lib.shell.OpenStackShell.run"""', 'self.app'], {}), "('osc_lib.shell.OpenStackShell.run', self.app)\n", (4149, 4195), False, 'from unittest import mock\n'), ((4351, 4371), 'osc_lib.shell.main', 'shell.main', (['sys.argv'], {}), '(sys.argv)\n', (4361, 4371), False, 'from osc_lib import shell\n'), ((4538, 4550), 'osc_lib.shell.main', 'shell.main', ([], {}), '()\n', (4548, 4550), False, 'from osc_lib import shell\n'), ((4794, 4812), 'osc_lib.tests.utils.EnvFixture', 'utils.EnvFixture', ([], {}), '()\n', (4810, 4812), False, 'from osc_lib.tests import utils\n'), ((5015, 5071), 'unittest.mock.patch', 'mock.patch', (["(self.app_patch + '.initialize_app')", 'self.app'], {}), "(self.app_patch + '.initialize_app', self.app)\n", (5025, 5071), False, 'from unittest import mock\n'), ((5137, 5169), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '_cmd'], {}), '(_shell, _cmd)\n', (5155, 5169), False, 'from osc_lib.tests import utils\n'), ((5659, 5677), 'osc_lib.tests.utils.EnvFixture', 'utils.EnvFixture', ([], {}), '()\n', (5675, 5677), False, 'from osc_lib.tests import utils\n'), ((6698, 6765), 'unittest.mock.patch', 'mock.patch', (['"""osc_lib.shell.OpenStackShell.initialize_app"""', 'self.app'], {}), "('osc_lib.shell.OpenStackShell.initialize_app', self.app)\n", (6708, 6765), False, 'from unittest import mock\n'), ((6822, 6861), 'osc_lib.tests.utils.fake_execute', 'utils.fake_execute', (['_shell', '"""list user"""'], {}), "(_shell, 'list user')\n", (6840, 6861), False, 'from osc_lib.tests import utils\n'), ((10908, 10930), 'copy.deepcopy', 'copy.deepcopy', (['CLOUD_1'], {}), '(CLOUD_1)\n', (10921, 10930), False, 'import copy\n'), ((12471, 12493), 'copy.deepcopy', 'copy.deepcopy', (['CLOUD_2'], {}), '(CLOUD_2)\n', (12484, 12493), False, 'import copy\n'), ((12544, 12567), 'copy.deepcopy', 'copy.deepcopy', (['PUBLIC_1'], {}), '(PUBLIC_1)\n', (12557, 12567), False, 'import copy\n'), ((14030, 14052), 'copy.deepcopy', 'copy.deepcopy', (['CLOUD_2'], {}), '(CLOUD_2)\n', (14043, 14052), False, 'import copy\n'), ((14103, 14126), 'copy.deepcopy', 'copy.deepcopy', (['PUBLIC_1'], {}), '(PUBLIC_1)\n', (14116, 14126), False, 'import copy\n'), ((15834, 15856), 'copy.deepcopy', 'copy.deepcopy', (['CLOUD_2'], {}), '(CLOUD_2)\n', (15847, 15856), False, 'import copy\n'), ((15907, 15930), 'copy.deepcopy', 'copy.deepcopy', (['PUBLIC_1'], {}), '(PUBLIC_1)\n', (15920, 15930), False, 'import copy\n'), ((17296, 17318), 'copy.deepcopy', 'copy.deepcopy', (['CLOUD_2'], {}), '(CLOUD_2)\n', (17309, 17318), False, 'import copy\n'), ((17369, 17392), 'copy.deepcopy', 'copy.deepcopy', (['PUBLIC_1'], {}), '(PUBLIC_1)\n', (17382, 17392), False, 'import copy\n'), ((18818, 18840), 'copy.deepcopy', 'copy.deepcopy', (['CLOUD_1'], {}), '(CLOUD_1)\n', (18831, 18840), False, 'import copy\n'), ((18891, 18914), 'copy.deepcopy', 'copy.deepcopy', (['PUBLIC_1'], {}), '(PUBLIC_1)\n', (18904, 18914), False, 'import copy\n'), ((5100, 5118), 'osc_lib.tests.utils.make_shell', 'utils.make_shell', ([], {}), '()\n', (5116, 5118), False, 'from osc_lib.tests import utils\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# created by Lipson on 2018/4/19.
# email to <EMAIL>
#
from colorama import Fore
from colorama import init
init(autoreset=True)
def print_err(msg):
print(Fore.RED + "ERROR: " + msg)
def print_warn(msg):
print(Fore.YELLOW + "WARNING: " + msg)
def print_info(msg):
print(Fore.GREEN + "INFO: " + msg)
|
[
"colorama.init"
] |
[((156, 176), 'colorama.init', 'init', ([], {'autoreset': '(True)'}), '(autoreset=True)\n', (160, 176), False, 'from colorama import init\n')]
|
# Generated by Django 2.2.1 on 2019-05-17 02:07
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('heavy_celery', '0004_auto_20190303_0128'),
]
operations = [
migrations.DeleteModel(
name='WorkerHeartBeat',
),
]
|
[
"django.db.migrations.DeleteModel"
] |
[((232, 278), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""WorkerHeartBeat"""'}), "(name='WorkerHeartBeat')\n", (254, 278), False, 'from django.db import migrations\n')]
|
from django.conf.urls import url
from event import views
urlpatterns = [
url(r"^event/ics/(?P<code>.*).ics$", views.event_ics, name="events_event_ics"),
url(r"^event/(?P<code>.*)/live$", views.live, name="events_live"),
url(
r"^event/(?P<event_id>.*)/register$",
views.checkin_event_register,
name="events_checkin_event_register",
),
url(
r"^event/(?P<event_id>.*)/download$",
views.checkin_event_download,
name="events_checkin_event_download",
),
url(r"^event/(?P<code>.*)$", views.event, name="events_event"),
url(
r"^registration/(?P<registration_id>.*)/url$",
views.registration_url,
name="registration_url",
),
url(r"^$", views.events, name="events_events"),
url(r"^checkin$", views.checkin_events, name="events_checkin_events"),
url(
r"^checkin/attend/(?P<registration_id>.*)$",
views.checkin_event_attend,
name="events_checkin_event_attend",
),
url(
r"^checkin/(?P<code>.*)/qr$",
views.checkin_event_qr,
name="events_checkin_event_qr",
),
url(r"^checkin/(?P<code>.*)$", views.checkin_event, name="events_checkin_event"),
]
|
[
"django.conf.urls.url"
] |
[((79, 156), 'django.conf.urls.url', 'url', (['"""^event/ics/(?P<code>.*).ics$"""', 'views.event_ics'], {'name': '"""events_event_ics"""'}), "('^event/ics/(?P<code>.*).ics$', views.event_ics, name='events_event_ics')\n", (82, 156), False, 'from django.conf.urls import url\n'), ((163, 227), 'django.conf.urls.url', 'url', (['"""^event/(?P<code>.*)/live$"""', 'views.live'], {'name': '"""events_live"""'}), "('^event/(?P<code>.*)/live$', views.live, name='events_live')\n", (166, 227), False, 'from django.conf.urls import url\n'), ((234, 347), 'django.conf.urls.url', 'url', (['"""^event/(?P<event_id>.*)/register$"""', 'views.checkin_event_register'], {'name': '"""events_checkin_event_register"""'}), "('^event/(?P<event_id>.*)/register$', views.checkin_event_register, name\n ='events_checkin_event_register')\n", (237, 347), False, 'from django.conf.urls import url\n'), ((380, 493), 'django.conf.urls.url', 'url', (['"""^event/(?P<event_id>.*)/download$"""', 'views.checkin_event_download'], {'name': '"""events_checkin_event_download"""'}), "('^event/(?P<event_id>.*)/download$', views.checkin_event_download, name\n ='events_checkin_event_download')\n", (383, 493), False, 'from django.conf.urls import url\n'), ((526, 587), 'django.conf.urls.url', 'url', (['"""^event/(?P<code>.*)$"""', 'views.event'], {'name': '"""events_event"""'}), "('^event/(?P<code>.*)$', views.event, name='events_event')\n", (529, 587), False, 'from django.conf.urls import url\n'), ((594, 696), 'django.conf.urls.url', 'url', (['"""^registration/(?P<registration_id>.*)/url$"""', 'views.registration_url'], {'name': '"""registration_url"""'}), "('^registration/(?P<registration_id>.*)/url$', views.registration_url,\n name='registration_url')\n", (597, 696), False, 'from django.conf.urls import url\n'), ((730, 775), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.events'], {'name': '"""events_events"""'}), "('^$', views.events, name='events_events')\n", (733, 775), False, 'from django.conf.urls import url\n'), ((782, 850), 'django.conf.urls.url', 'url', (['"""^checkin$"""', 'views.checkin_events'], {'name': '"""events_checkin_events"""'}), "('^checkin$', views.checkin_events, name='events_checkin_events')\n", (785, 850), False, 'from django.conf.urls import url\n'), ((857, 972), 'django.conf.urls.url', 'url', (['"""^checkin/attend/(?P<registration_id>.*)$"""', 'views.checkin_event_attend'], {'name': '"""events_checkin_event_attend"""'}), "('^checkin/attend/(?P<registration_id>.*)$', views.checkin_event_attend,\n name='events_checkin_event_attend')\n", (860, 972), False, 'from django.conf.urls import url\n'), ((1006, 1099), 'django.conf.urls.url', 'url', (['"""^checkin/(?P<code>.*)/qr$"""', 'views.checkin_event_qr'], {'name': '"""events_checkin_event_qr"""'}), "('^checkin/(?P<code>.*)/qr$', views.checkin_event_qr, name=\n 'events_checkin_event_qr')\n", (1009, 1099), False, 'from django.conf.urls import url\n'), ((1132, 1211), 'django.conf.urls.url', 'url', (['"""^checkin/(?P<code>.*)$"""', 'views.checkin_event'], {'name': '"""events_checkin_event"""'}), "('^checkin/(?P<code>.*)$', views.checkin_event, name='events_checkin_event')\n", (1135, 1211), False, 'from django.conf.urls import url\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 19 09:51:19 2021
@author: lferiani
"""
import numpy as np
import pandas as pd
import seaborn as sns
from tqdm import tqdm
from matplotlib import pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
from experiment_constants import MD_COLS, STIM_START_S
from timeseries_helper import plot_stimuli
def plot_frac(df, modecolnames, ax=None, **kwargs):
"""plot_frac
plots modecolname of df with shaded errorbar
example:
plot_frac(frac_motion_mode_with_ci, 'frac_worms_fw', ax=ax)
"""
if ax is None:
ax = plt.gca()
coldict = {'frac_worms_fw': ('tab:green', 'forwards'),
'frac_worms_bw': ('tab:orange', 'backwards'),
'frac_worms_st': ('tab:purple', 'stationary'),
'frac_worms_nan': ('tab:gray', 'undefined')}
# styledict = {'N2': '--',
# 'CB4856': '-'}
for strain, df_g in df.groupby('worm_strain'):
df_g = df_g.droplevel('worm_strain')
for col in modecolnames:
color, motmode = coldict[col]
df_g.plot(y=col, ax=ax,
color=color,
# linestyle=styledict[strain],
label=strain+' '+motmode,
**kwargs)
lower = df_g[col+'_ci_lower']
upper = df_g[col+'_ci_upper']
ax.fill_between(x=lower.index,
y1=lower.values,
y2=upper.values,
alpha=0.3,
facecolor=ax.lines[-1].get_color())
ax.set_ylim((0, 1))
ax.set_ylabel('fraction of worms')
return
def plot_stacked_frac_mode(df, strain=None, **kwargs):
"""plot_stacked_frac_mode
make AoE II style cumulative fraction plot"""
if ('worm_strain' in df.index.names):
fig = []
for strain, df_g in df.groupby('worm_strain'):
df_g = df_g.droplevel('worm_strain')
ff = plot_stacked_frac_mode(df_g, strain=strain, **kwargs)
fig.append(ff)
else:
fig, ax = plt.subplots(**kwargs)
fracalpha = 0.5
erralpha = 0.5
facecolours = [(*t, fracalpha) for t in sns.color_palette()]
coldict = {'frac_worms_fw': ('tab:green', 'forwards'),
'frac_worms_bw': ('tab:orange', 'backwards'),
'frac_worms_st': ('tab:purple', 'stationary'),
'frac_worms_nan': ('tab:gray', 'undefined')}
yprev = 0
ycum = 0
for icol, col in enumerate(['frac_worms_'+x
for x in ['fw', 'st', 'bw', 'nan']]):
# line between fractions:
yprev = ycum
ycum = ycum + df[col]
ax.fill_between(x=df.index,
y1=yprev,
y2=ycum,
label=col,
linewidth=0.5,
linestyle='-',
facecolor=coldict[col][0],
alpha=fracalpha,
edgecolor='darkgray')
# now lower and upper bounds of frac fw
ylow = df['frac_worms_fw_ci_lower']
yupp = df['frac_worms_fw_ci_upper']
ax.fill_between(x=df.index,
y1=ylow,
y2=yupp,
facecolor='gray',
edgecolor='None',
alpha=erralpha,
linewidth=0.2,
linestyle='--')
# get errorbar for frac bw. then sum them to frac_fw+frac_st
offset = (df['frac_worms_fw']
+ df['frac_worms_st']
- df['frac_worms_bw'])
ylow = df['frac_worms_bw_ci_lower'] + offset
yupp = df['frac_worms_bw_ci_upper'] + offset
# import pdb; pdb.set_trace()
ax.fill_between(x=df.index,
y1=ylow,
y2=yupp,
facecolor='gray',
edgecolor='None',
alpha=erralpha,
linewidth=0.2,
linestyle='--')
ax.legend(loc='lower right')
ax.set_ylim(0, 1)
ax.set_xlim(df.index.min(), df.index.max())
ax.set_ylabel('cumulative fraction')
ax.set_title(strain)
if 'time_s' in df.reset_index().columns:
plot_stimuli(ax=ax, units='s', stimulus_start=STIM_START)
ax.set_xlabel('time, (s)')
else:
plot_stimuli(ax=ax, units='frames', stimulus_start=STIM_START)
ax.set_xlabel('time, (frames)')
return fig
def shadederrorbar(
x, y, data, which_err='sem',
fig_kwargs={}, plot_kwargs={}, patches_kwargs={}, leg_kwargs={}):
if y == 'all':
y = data.columns.to_list()
if not isinstance(y, list):
y = [y]
df_plot = data.groupby(x)[y].agg(['mean', which_err]).reset_index()
# import pdb; pdb.set_trace()
for y_ in y:
df_plot[(y_, 'lower')] = (df_plot[(y_, 'mean')]
- df_plot[(y_, which_err)])
df_plot[(y_, 'upper')] = (df_plot[(y_, 'mean')]
+ df_plot[(y_, which_err)])
# import pdb; pdb.set_trace()
fig, ax = plt.subplots(**fig_kwargs)
for y_ in y:
df_plot.plot(x, (y_, 'mean'), ax=ax, **plot_kwargs, label=y_)
patch_colors = [line.get_color() for line in ax.get_lines()]
for y_, patch_color in zip(y, patch_colors):
ax.fill_between(x=df_plot[x],
y1=df_plot[(y_, 'lower')],
y2=df_plot[(y_, 'upper')],
color=patch_color,
alpha=0.4,
**patches_kwargs)
plt.legend(**leg_kwargs)
return fig, ax
# def plot_timeseries_multipage(timeseries_df, feats_toplot, figures_dir):
# for ledint, timeseries_df_g in timeseries_df.groupby('led_intensity'):
# with PdfPages(
# figures_dir / 'downsampled_feats_ledint{}.pdf'.format(ledint),
# keep_empty=False) as pdf:
# for feat in tqdm(feats_toplot):
# fig, ax = plt.subplots(figsize=(12.8, 4.8))
# sns.lineplot(x='time_binned_s', y=feat,
# hue='motion_mode',
# style='worm_strain',
# data=timeseries_df_g.query('motion_mode != 0'),
# estimator=np.mean, ci='sd',
# legend='full')
# plot_stimuli(ax=ax, units='s',
# stimulus_start=STIM_START)
# pdf.savefig(fig)
# plt.close(fig)
def plot_timeseries_multipage(
timeseries_data, feats_toplot, fig_savepath,
time_col='timestamp_binned_s',
plot_units='s', fps=None,
stimulus_start_s=[60, 160, 260],
stimulus_duration_s=10,
**sns_kwargs,
):
"""
plot_timeseries_multipage Wrapper for sns.lineplot.
Plot feats in feats_toplot into a multipage pdf at fig_savepath.
This function does not do any filtering to the data, so make sure
timeseries_data comes pre-filtered!!!
Parameters
----------
timeseries_data : Pandas Dataframe
from /timeseries_data, possibly downsampled.
feats_toplot : list
features to plot. Each feature will be in its own plot
fig_savepath : string or Path
path to output pdf file
time_col : str, optional
which colums of the timeseries_data dataframe to use as x axis,
by default 'timestamp_binned_s'
plot_units : str, optional
's' or 'frames', used by plot_stimuli
fps : float, optional
necessary if plot_units is 'frames'
stimulus_start_s : list, optional
times at which the blue leds come online, in seconds
stimulus_duration_s : float, optional
duration of each burst of blue light, in seconds
sns_kwargs : optional
name, value parameters to further pass to seaborn.
By default this function uses estimator=np.mean, ci='sd', legend='full'
"""
# default values for sns.lineplot
default_sns_kwargs = {'estimator': np.mean, 'ci': 'sd', 'legend': 'full'}
for k, v in default_sns_kwargs.items():
if k not in sns_kwargs.items():
sns_kwargs[k] = v
# open a pdf
with PdfPages(fig_savepath, keep_empty=False) as pdf:
# one page per feat to plot
for feat in tqdm(feats_toplot):
# create standard size figure
fig, ax = plt.subplots(figsize=(12.8, 4.8))
# timeseries plot
sns.lineplot(
x=time_col, y=feat, data=timeseries_data, **sns_kwargs
)
# add stimuli
plot_stimuli(
ax=ax,
plot_units=plot_units,
fps=fps,
stimulus_start_s=stimulus_start_s,
stimulus_duration_s=stimulus_duration_s)
pdf.savefig(fig)
plt.close(fig)
return
def plot_onehot_variables_timeseries(
timeseries_df,
x_col='timestamp_binned_s',
onehot_value_cols=[],
xlabel=None,
ylabel=None,
title=None,
ax=None,
ylim=None,
xlim=None,
**sns_kwargs
):
# input checks
if not isinstance(onehot_value_cols, list):
onehot_value_cols = [onehot_value_cols]
for col in onehot_value_cols + ['well_name', 'worm_index']:
assert col in timeseries_df.columns
for col in onehot_value_cols:
assert timeseries_df[col].astype(float).isin([0, 1]).all(), (
'onehot_values_cols should only be zeros and ones'
)
assert 'variable' not in timeseries_df.columns
assert 'value' not in timeseries_df.columns
assert 'estimator' in sns_kwargs.keys(), (
'for safety, explicitly state the estimator')
assert 'hue' not in sns_kwargs.keys(), ((
'hue is reserved to be the dummy variable in the long df. '
'hues will discriminate your onehot_value_cols'
))
# set default ci to None for speed. sns defaults to bootstrap 95th
if 'ci' not in sns_kwargs.keys():
sns_kwargs['ci'] = None
# timseries is a wide dataframe with columns value_cols storing 1,0
# we need to melt the df to use sns.lineplot properly
df_long = timeseries_df.melt(
id_vars=x_col,
value_vars=onehot_value_cols,
)
df_ram_usage = df_long.memory_usage(deep=True).sum() // 1024**2
print(f'df_long uses {df_ram_usage} MB ram')
# create figure if we need it
if ax is None:
fig, ax = plt.subplots(figsize=(12.8, 4.8))
else:
fig = ax.figure
# actual plot
plot = sns.lineplot(
data=df_long,
x=x_col,
y='value',
hue='variable',
ax=ax,
**sns_kwargs,
)
# make a oneline legend:
# first remove old legend
handles, labels = plot.axes.get_legend_handles_labels()
plot.get_legend().remove()
# fix limits
if xlim is None:
ax.set_xlim((df_long[x_col].min(), df_long[x_col].max()))
else:
ax.set_xlim(xlim)
if ylim:
ax.set_ylim(ylim)
# re-add oneline legend
plot.axes.legend(
handles, labels, ncol=len(onehot_value_cols), loc='upper center',
bbox_to_anchor=(0.5, 1), frameon=True)
# put labels on axis now
if xlabel:
ax.set_xlabel(xlabel)
if ylabel:
ax.set_ylabel(ylabel)
if title:
ax.set_title(title)
return fig, ax
|
[
"seaborn.lineplot",
"matplotlib.backends.backend_pdf.PdfPages",
"tqdm.tqdm",
"timeseries_helper.plot_stimuli",
"matplotlib.pyplot.close",
"matplotlib.pyplot.legend",
"seaborn.color_palette",
"matplotlib.pyplot.gca",
"matplotlib.pyplot.subplots"
] |
[((5404, 5430), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '(**fig_kwargs)\n', (5416, 5430), True, 'from matplotlib import pyplot as plt\n'), ((5897, 5921), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '(**leg_kwargs)\n', (5907, 5921), True, 'from matplotlib import pyplot as plt\n'), ((10980, 11068), 'seaborn.lineplot', 'sns.lineplot', ([], {'data': 'df_long', 'x': 'x_col', 'y': '"""value"""', 'hue': '"""variable"""', 'ax': 'ax'}), "(data=df_long, x=x_col, y='value', hue='variable', ax=ax, **\n sns_kwargs)\n", (10992, 11068), True, 'import seaborn as sns\n'), ((624, 633), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (631, 633), True, 'from matplotlib import pyplot as plt\n'), ((2139, 2161), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '(**kwargs)\n', (2151, 2161), True, 'from matplotlib import pyplot as plt\n'), ((8576, 8616), 'matplotlib.backends.backend_pdf.PdfPages', 'PdfPages', (['fig_savepath'], {'keep_empty': '(False)'}), '(fig_savepath, keep_empty=False)\n', (8584, 8616), False, 'from matplotlib.backends.backend_pdf import PdfPages\n'), ((8681, 8699), 'tqdm.tqdm', 'tqdm', (['feats_toplot'], {}), '(feats_toplot)\n', (8685, 8699), False, 'from tqdm import tqdm\n'), ((10882, 10915), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(12.8, 4.8)'}), '(figsize=(12.8, 4.8))\n', (10894, 10915), True, 'from matplotlib import pyplot as plt\n'), ((4514, 4571), 'timeseries_helper.plot_stimuli', 'plot_stimuli', ([], {'ax': 'ax', 'units': '"""s"""', 'stimulus_start': 'STIM_START'}), "(ax=ax, units='s', stimulus_start=STIM_START)\n", (4526, 4571), False, 'from timeseries_helper import plot_stimuli\n'), ((4637, 4699), 'timeseries_helper.plot_stimuli', 'plot_stimuli', ([], {'ax': 'ax', 'units': '"""frames"""', 'stimulus_start': 'STIM_START'}), "(ax=ax, units='frames', stimulus_start=STIM_START)\n", (4649, 4699), False, 'from timeseries_helper import plot_stimuli\n'), ((8765, 8798), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(12.8, 4.8)'}), '(figsize=(12.8, 4.8))\n', (8777, 8798), True, 'from matplotlib import pyplot as plt\n'), ((8841, 8909), 'seaborn.lineplot', 'sns.lineplot', ([], {'x': 'time_col', 'y': 'feat', 'data': 'timeseries_data'}), '(x=time_col, y=feat, data=timeseries_data, **sns_kwargs)\n', (8853, 8909), True, 'import seaborn as sns\n'), ((8982, 9114), 'timeseries_helper.plot_stimuli', 'plot_stimuli', ([], {'ax': 'ax', 'plot_units': 'plot_units', 'fps': 'fps', 'stimulus_start_s': 'stimulus_start_s', 'stimulus_duration_s': 'stimulus_duration_s'}), '(ax=ax, plot_units=plot_units, fps=fps, stimulus_start_s=\n stimulus_start_s, stimulus_duration_s=stimulus_duration_s)\n', (8994, 9114), False, 'from timeseries_helper import plot_stimuli\n'), ((9232, 9246), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (9241, 9246), True, 'from matplotlib import pyplot as plt\n'), ((2257, 2276), 'seaborn.color_palette', 'sns.color_palette', ([], {}), '()\n', (2274, 2276), True, 'import seaborn as sns\n')]
|
# Copyright (c) 2011, SD Elements. See LICENSE.txt for details.
from django.contrib.auth.models import User
from django.db import models
from django.utils import timezone
from django.conf import settings
# Finding proper User model that we can set Foreign key to.
# In newer versions of Django default user model can be specified in settings
# as `AUTH_USER_MODEL`
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', User)
class PasswordExpiry(models.Model):
"""
Associate a password expiry date with a user. For now, this date is
effectively just a flag to tell us whether the user has ever changed
their password, used to force users to change their initial passwords
when they log in for the first time. Instances are created by
security.RequirePasswordChangeMiddleware.
"""
class Meta(object):
verbose_name_plural = "PasswordExpiries"
# Not one-to-one because some users may never receive an expiry date.
user = models.ForeignKey(
USER_MODEL,
unique=True,
on_delete=models.CASCADE
)
password_expiry_date = models.DateTimeField(
auto_now_add=True,
null=True,
help_text="The date and time when the user's password expires. If "
"this is empty, the password never expires.",
)
def is_expired(self):
if self.password_expiry_date is None:
return False
else:
return self.password_expiry_date <= timezone.now()
def never_expire(self):
self.password_expiry_date = None
self.save()
def __unicode__(self):
return u'Password Expiry: {0}'.format(self.user)
# http://www.w3.org/TR/CSP/#sample-violation-report
class CspReport(models.Model):
"""
Content Security Policy violation report object. Each report represents
a single alert raised by client browser in response to CSP received from
the server.
Each alert means the browser was unable to access a web resource (image,
CSS, frame, script) because server's policy prohibited it from accessing
it. These alerts should be reviewed on regular basis, as they will occur in
two cases: first, false positives where too restrictive CSP is blocking
legitimate website features and needs tuning. Second, when real attacks
were fired against the user and this raises a question how the malicious
code appeared on your website.
CSP reports are available in Django admin view. To be logged into databse,
CSP reports view needs to be configured properly. See csp_report_
view for more information. Content Security Policy can be switched
on for a web application using ContentSecurityPolicyMiddleware_ middleware.
"""
document_uri = models.URLField(
max_length=1000,
help_text="The address of the protected resource, "
"with any fragment component removed",
)
referrer = models.URLField(
max_length=1000,
help_text="The referrer attribute of the protected resource",
)
blocked_uri = models.URLField(
max_length=1000,
help_text="URI of the resource that was prevented from loading due to "
"the policy violation, with any fragment component removed",
)
violated_directive = models.CharField(
max_length=1000,
help_text="The policy directive that was violated",
)
original_policy = models.TextField(
null=True,
max_length=1000,
help_text="The original policy as received by the user-agent.",
)
date_received = models.DateTimeField(
auto_now_add=True,
help_text="When this report was received",
)
sender_ip = models.GenericIPAddressField(
help_text="IP of the browser sending this report",
)
user_agent = models.CharField(
max_length=1000,
help_text="User-Agent of reporting browser",
)
def __unicode__(self):
return u'CSP Report: {0} from {1}'.format(
self.blocked_uri,
self.document_uri,
)
|
[
"django.db.models.TextField",
"django.db.models.URLField",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.utils.timezone.now",
"django.db.models.GenericIPAddressField",
"django.db.models.DateTimeField"
] |
[((968, 1036), 'django.db.models.ForeignKey', 'models.ForeignKey', (['USER_MODEL'], {'unique': '(True)', 'on_delete': 'models.CASCADE'}), '(USER_MODEL, unique=True, on_delete=models.CASCADE)\n', (985, 1036), False, 'from django.db import models\n'), ((1095, 1266), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)', 'help_text': '"""The date and time when the user\'s password expires. If this is empty, the password never expires."""'}), '(auto_now_add=True, null=True, help_text=\n "The date and time when the user\'s password expires. If this is empty, the password never expires."\n )\n', (1115, 1266), False, 'from django.db import models\n'), ((2749, 2879), 'django.db.models.URLField', 'models.URLField', ([], {'max_length': '(1000)', 'help_text': '"""The address of the protected resource, with any fragment component removed"""'}), "(max_length=1000, help_text=\n 'The address of the protected resource, with any fragment component removed'\n )\n", (2764, 2879), False, 'from django.db import models\n'), ((2929, 3028), 'django.db.models.URLField', 'models.URLField', ([], {'max_length': '(1000)', 'help_text': '"""The referrer attribute of the protected resource"""'}), "(max_length=1000, help_text=\n 'The referrer attribute of the protected resource')\n", (2944, 3028), False, 'from django.db import models\n'), ((3065, 3237), 'django.db.models.URLField', 'models.URLField', ([], {'max_length': '(1000)', 'help_text': '"""URI of the resource that was prevented from loading due to the policy violation, with any fragment component removed"""'}), "(max_length=1000, help_text=\n 'URI of the resource that was prevented from loading due to the policy violation, with any fragment component removed'\n )\n", (3080, 3237), False, 'from django.db import models\n'), ((3297, 3387), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)', 'help_text': '"""The policy directive that was violated"""'}), "(max_length=1000, help_text=\n 'The policy directive that was violated')\n", (3313, 3387), False, 'from django.db import models\n'), ((3428, 3541), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': '(1000)', 'help_text': '"""The original policy as received by the user-agent."""'}), "(null=True, max_length=1000, help_text=\n 'The original policy as received by the user-agent.')\n", (3444, 3541), False, 'from django.db import models\n'), ((3589, 3676), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'help_text': '"""When this report was received"""'}), "(auto_now_add=True, help_text=\n 'When this report was received')\n", (3609, 3676), False, 'from django.db import models\n'), ((3711, 3790), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', ([], {'help_text': '"""IP of the browser sending this report"""'}), "(help_text='IP of the browser sending this report')\n", (3739, 3790), False, 'from django.db import models\n'), ((3823, 3901), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)', 'help_text': '"""User-Agent of reporting browser"""'}), "(max_length=1000, help_text='User-Agent of reporting browser')\n", (3839, 3901), False, 'from django.db import models\n'), ((1469, 1483), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1481, 1483), False, 'from django.utils import timezone\n')]
|
import os
import sys
deltext=""
if sys.platform.startswith("linux") :
deltext="rm"
copytext="cp"
if sys.platform.startswith("darwin") :
deltext="rm"
copytext="cp"
if sys.platform.startswith("win") :
deltext="del"
copytext="copy"
def replace(namefile,oldtext,newtext):
f = open(namefile,'r')
filedata = f.read()
f.close()
newdata = filedata.replace(oldtext,newtext)
f = open(namefile,'w')
f.write(newdata)
f.close()
def rsaset(tb,tff,nb,base,ml) :
bd=tb+"_"+base
fnameh="config_big_"+bd+".h"
os.system(copytext+" config_big.h "+fnameh)
replace(fnameh,"XXX",bd)
replace(fnameh,"@NB@",nb)
replace(fnameh,"@BASE@",base)
fnameh="config_ff_"+tff+".h"
os.system(copytext+" config_ff.h "+fnameh)
replace(fnameh,"XXX",bd)
replace(fnameh,"WWW",tff)
replace(fnameh,"@ML@",ml);
fnamec="big_"+bd+".c"
fnameh="big_"+bd+".h"
os.system(copytext+" big.c "+fnamec)
os.system(copytext+" big.h "+fnameh)
replace(fnamec,"XXX",bd)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="ff_"+tff+".c"
fnameh="ff_"+tff+".h"
os.system(copytext+" ff.c "+fnamec)
os.system(copytext+" ff.h "+fnameh)
replace(fnamec,"WWW",tff)
replace(fnamec,"XXX",bd)
replace(fnameh,"WWW",tff)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="rsa_"+tff+".c"
fnameh="rsa_"+tff+".h"
os.system(copytext+" rsa.c "+fnamec)
os.system(copytext+" rsa.h "+fnameh)
replace(fnamec,"WWW",tff)
replace(fnamec,"XXX",bd)
replace(fnameh,"WWW",tff)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
def curveset(tb,tf,tc,nb,base,nbt,m8,mt,ct,pf,stw,sx,ab,cs) :
bd=tb+"_"+base
fnameh="config_big_"+bd+".h"
os.system(copytext+" config_big.h "+fnameh)
replace(fnameh,"XXX",bd)
replace(fnameh,"@NB@",nb)
replace(fnameh,"@BASE@",base)
fnameh="config_field_"+tf+".h"
os.system(copytext+" config_field.h "+fnameh)
replace(fnameh,"XXX",bd)
replace(fnameh,"YYY",tf)
replace(fnameh,"@NBT@",nbt)
replace(fnameh,"@M8@",m8)
replace(fnameh,"@MT@",mt)
ib=int(base)
ib=int(base)
inb=int(nb)
inbt=int(nbt)
sh=ib*(1+((8*inb-1)//ib))-inbt
if sh > 14 :
sh=14
replace(fnameh,"@SH@",str(sh))
fnameh="config_curve_"+tc+".h"
os.system(copytext+" config_curve.h "+fnameh)
replace(fnameh,"XXX",bd)
replace(fnameh,"YYY",tf)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"@CT@",ct)
replace(fnameh,"@PF@",pf)
replace(fnameh,"@ST@",stw)
replace(fnameh,"@SX@",sx)
replace(fnameh,"@CS@",cs)
replace(fnameh,"@AB@",ab)
fnamec="big_"+bd+".c"
fnameh="big_"+bd+".h"
os.system(copytext+" big.c "+fnamec)
os.system(copytext+" big.h "+fnameh)
replace(fnamec,"XXX",bd)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="fp_"+tf+".c"
fnameh="fp_"+tf+".h"
os.system(copytext+" fp.c "+fnamec)
os.system(copytext+" fp.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
os.system("gcc -O3 -std=c99 -c rom_field_"+tf+".c");
fnamec="ecp_"+tc+".c"
fnameh="ecp_"+tc+".h"
os.system(copytext+" ecp.c "+fnamec)
os.system(copytext+" ecp.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="ecdh_"+tc+".c"
fnameh="ecdh_"+tc+".h"
os.system(copytext+" ecdh.c "+fnamec)
os.system(copytext+" ecdh.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
os.system("gcc -O3 -std=c99 -c rom_curve_"+tc+".c");
if pf != "NOT" :
fnamec="fp2_"+tf+".c"
fnameh="fp2_"+tf+".h"
os.system(copytext+" fp2.c "+fnamec)
os.system(copytext+" fp2.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="fp4_"+tf+".c"
fnameh="fp4_"+tf+".h"
os.system(copytext+" fp4.c "+fnamec)
os.system(copytext+" fp4.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
os.system("gcc -O3 -std=c99 -c "+fnamec)
if cs == "128" :
fnamec="fp12_"+tf+".c"
fnameh="fp12_"+tf+".h"
os.system(copytext+" fp12.c "+fnamec)
os.system(copytext+" fp12.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="ecp2_"+tc+".c"
fnameh="ecp2_"+tc+".h"
os.system(copytext+" ecp2.c "+fnamec)
os.system(copytext+" ecp2.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="pair_"+tc+".c"
fnameh="pair_"+tc+".h"
os.system(copytext+" pair.c "+fnamec)
os.system(copytext+" pair.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="mpin_"+tc+".c"
fnameh="mpin_"+tc+".h"
os.system(copytext+" mpin.c "+fnamec)
os.system(copytext+" mpin.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="bls_"+tc+".c"
fnameh="bls_"+tc+".h"
os.system(copytext+" bls.c "+fnamec)
os.system(copytext+" bls.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
if cs == "192" :
fnamec="fp8_"+tf+".c"
fnameh="fp8_"+tf+".h"
os.system(copytext+" fp8.c "+fnamec)
os.system(copytext+" fp8.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="fp24_"+tf+".c"
fnameh="fp24_"+tf+".h"
os.system(copytext+" fp24.c "+fnamec)
os.system(copytext+" fp24.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="ecp4_"+tc+".c"
fnameh="ecp4_"+tc+".h"
os.system(copytext+" ecp4.c "+fnamec)
os.system(copytext+" ecp4.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="pair192_"+tc+".c"
fnameh="pair192_"+tc+".h"
os.system(copytext+" pair192.c "+fnamec)
os.system(copytext+" pair192.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="mpin192_"+tc+".c"
fnameh="mpin192_"+tc+".h"
os.system(copytext+" mpin192.c "+fnamec)
os.system(copytext+" mpin192.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="bls192_"+tc+".c"
fnameh="bls192_"+tc+".h"
os.system(copytext+" bls192.c "+fnamec)
os.system(copytext+" bls192.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
if cs == "256" :
fnamec="fp8_"+tf+".c"
fnameh="fp8_"+tf+".h"
os.system(copytext+" fp8.c "+fnamec)
os.system(copytext+" fp8.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="ecp8_"+tc+".c"
fnameh="ecp8_"+tc+".h"
os.system(copytext+" ecp8.c "+fnamec)
os.system(copytext+" ecp8.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="fp16_"+tf+".c"
fnameh="fp16_"+tf+".h"
os.system(copytext+" fp16.c "+fnamec)
os.system(copytext+" fp16.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="fp48_"+tf+".c"
fnameh="fp48_"+tf+".h"
os.system(copytext+" fp48.c "+fnamec)
os.system(copytext+" fp48.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="pair256_"+tc+".c"
fnameh="pair256_"+tc+".h"
os.system(copytext+" pair256.c "+fnamec)
os.system(copytext+" pair256.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="mpin256_"+tc+".c"
fnameh="mpin256_"+tc+".h"
os.system(copytext+" mpin256.c "+fnamec)
os.system(copytext+" mpin256.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
fnamec="bls256_"+tc+".c"
fnameh="bls256_"+tc+".h"
os.system(copytext+" bls256.c "+fnamec)
os.system(copytext+" bls256.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
os.system("gcc -O3 -std=c99 -c "+fnamec)
replace("arch.h","@WL@","32")
print("Elliptic Curves")
print("1. ED25519")
print("2. C25519")
print("3. NIST256")
print("4. BRAINPOOL")
print("5. ANSSI")
print("6. HIFIVE")
print("7. GOLDILOCKS")
print("8. NIST384")
print("9. C41417")
print("10. NIST521\n")
print("11. NUMS256W")
print("12. NUMS256E")
print("13. NUMS384W")
print("14. NUMS384E")
print("15. NUMS512W")
print("16. NUMS512E")
print("17. SECP256K1\n")
print("Pairing-Friendly Elliptic Curves")
print("18. BN254")
print("19. BN254CX")
print("20. BLS383")
print("21. BLS381")
print("22. FP256BN")
print("23. FP512BN")
print("24. BLS461\n")
print("25. BLS24")
print("26. BLS48\n")
print("RSA")
print("27. RSA2048")
print("28. RSA3072")
print("29. RSA4096")
selection=[]
ptr=0
max=30
curve_selected=False
pfcurve_selected=False
rsa_selected=False
while ptr<max:
x=int(input("Choose a Scheme to support - 0 to finish: "))
if x == 0:
break
# print("Choice= ",x)
already=False
for i in range(0,ptr):
if x==selection[i]:
already=True
break
if already:
continue
selection.append(x)
ptr=ptr+1
# curveset(big,field,curve,big_length_bytes,bits_in_base,modulus_bits,modulus_mod_8,modulus_type,curve_type,pairing_friendly,sextic twist,sign of x,ate bits,curve security)
# for each curve give names for big, field and curve. In many cases the latter two will be the same.
# Typically "big" is the size in bits, always a multiple of 8, "field" describes the modulus, and "curve" is the common name for the elliptic curve
# big_length_bytes is "big" divided by 8
# Next give the number base used for 32 bit architectures, as n where the base is 2^n (note that these must be fixed for the same "big" name, if is ever re-used for another curve)
# modulus_bits is the bit length of the modulus, typically the same or slightly smaller than "big"
# modulus_mod_8 is the remainder when the modulus is divided by 8
# modulus_type is NOT_SPECIAL, or PSEUDO_MERSENNE, or MONTGOMERY_Friendly, or GENERALISED_MERSENNE (supported for GOLDILOCKS only)
# curve_type is WEIERSTRASS, EDWARDS or MONTGOMERY
# pairing_friendly is BN, BLS or NOT (if not pairing friendly)
# if pairing friendly. M or D type twist, and sign of the family parameter x
# ate bits is number of bits in Ate parameter (from romgen program)
# curve security is AES equiavlent, rounded up.
if x==1:
curveset("256","25519","ED25519","32","29","255","5","PSEUDO_MERSENNE","EDWARDS","NOT","","","","128")
curve_selected=True
if x==2:
curveset("256","25519","C25519","32","29","255","5","PSEUDO_MERSENNE","MONTGOMERY","NOT","","","","128")
curve_selected=True
if x==3:
curveset("256","NIST256","NIST256","32","28","256","7","NOT_SPECIAL","WEIERSTRASS","NOT","","","","128")
curve_selected=True
if x==4:
curveset("256","BRAINPOOL","BRAINPOOL","32","28","256","7","NOT_SPECIAL","WEIERSTRASS","NOT","","","","128")
curve_selected=True
if x==5:
curveset("256","ANSSI","ANSSI","32","28","256","7","NOT_SPECIAL","WEIERSTRASS","NOT","","","","128")
curve_selected=True
if x==6:
curveset("336","HIFIVE","HIFIVE","42","29","336","5","PSEUDO_MERSENNE","EDWARDS","NOT","","","","192")
curve_selected=True
if x==7:
curveset("448","GOLDILOCKS","GOLDILOCKS","56","29","448","7","GENERALISED_MERSENNE","EDWARDS","NOT","","","","256")
curve_selected=True
if x==8:
curveset("384","NIST384","NIST384","48","29","384","7","NOT_SPECIAL","WEIERSTRASS","NOT","","","","192")
curve_selected=True
if x==9:
curveset("416","C41417","C41417","52","29","414","7","PSEUDO_MERSENNE","EDWARDS","NOT","","","","256")
curve_selected=True
if x==10:
curveset("528","NIST521","NIST521","66","28","521","7","PSEUDO_MERSENNE","WEIERSTRASS","NOT","","","","256")
curve_selected=True
if x==11:
curveset("256","256PMW","NUMS256W","32","28","256","3","PSEUDO_MERSENNE","WEIERSTRASS","NOT","","","","128")
curve_selected=True
if x==12:
curveset("256","256PME","NUMS256E","32","29","256","3","PSEUDO_MERSENNE","EDWARDS","NOT","","","","128")
curve_selected=True
if x==13:
curveset("384","384PM","NUMS384W","48","29","384","3","PSEUDO_MERSENNE","WEIERSTRASS","NOT","","","","192")
curve_selected=True
if x==14:
curveset("384","384PM","NUMS384E","48","29","384","3","PSEUDO_MERSENNE","EDWARDS","NOT","","","","192")
curve_selected=True
if x==15:
curveset("512","512PM","NUMS512W","64","29","512","7","PSEUDO_MERSENNE","WEIERSTRASS","NOT","","","","256")
curve_selected=True
if x==16:
curveset("512","512PM","NUMS512E","64","29","512","7","PSEUDO_MERSENNE","EDWARDS","NOT","","","","256")
curve_selected=True
if x==17:
curveset("256","SECP256K1","SECP256K1","32","28","256","7","NOT_SPECIAL","WEIERSTRASS","NOT","","","","128")
curve_selected=True
if x==18:
curveset("256","BN254","BN254","32","28","254","3","NOT_SPECIAL","WEIERSTRASS","BN","D_TYPE","NEGATIVEX","66","128")
pfcurve_selected=True
if x==19:
curveset("256","BN254CX","BN254CX","32","28","254","3","NOT_SPECIAL","WEIERSTRASS","BN","D_TYPE","NEGATIVEX","66","128")
pfcurve_selected=True
if x==20:
curveset("384","BLS383","BLS383","48","29","383","3","NOT_SPECIAL","WEIERSTRASS","BLS","M_TYPE","POSITIVEX","65","128")
pfcurve_selected=True
if x==21:
curveset("384","BLS381","BLS381","48","29","381","3","NOT_SPECIAL","WEIERSTRASS","BLS","M_TYPE","NEGATIVEX","65","128")
pfcurve_selected=True
if x==22:
curveset("256","FP256BN","FP256BN","32","28","256","3","NOT_SPECIAL","WEIERSTRASS","BN","M_TYPE","NEGATIVEX","66","128")
pfcurve_selected=True
if x==23:
curveset("512","FP512BN","FP512BN","64","29","512","3","NOT_SPECIAL","WEIERSTRASS","BN","M_TYPE","POSITIVEX","130","128")
pfcurve_selected=True
# https://eprint.iacr.org/2017/334.pdf
if x==24:
curveset("464","BLS461","BLS461","58","28","461","3","NOT_SPECIAL","WEIERSTRASS","BLS","M_TYPE","NEGATIVEX","78","128")
pfcurve_selected=True
if x==25:
curveset("480","BLS24","BLS24","60","29","479","3","NOT_SPECIAL","WEIERSTRASS","BLS","M_TYPE","POSITIVEX","49","192")
pfcurve_selected=True
if x==26:
curveset("560","BLS48","BLS48","70","29","556","3","NOT_SPECIAL","WEIERSTRASS","BLS","M_TYPE","POSITIVEX","32","256")
pfcurve_selected=True
# rsaset(big,ring,big_length_bytes,bits_in_base,multiplier)
# for each choice give distinct names for "big" and "ring".
# Typically "big" is the length in bits of the underlying big number type
# "ring" is the RSA modulus size = "big" times 2^m
# big_length_bytes is "big" divided by 8
# Next give the number base used for 32 bit architecture, as n where the base is 2^n
# multiplier is 2^m (see above)
# There are choices here, different ways of getting the same result, but some faster than others
if x==27:
#256 is slower but may allow reuse of 256-bit BIGs used for elliptic curve
#512 is faster.. but best is 1024
rsaset("1024","2048","128","28","2")
#rsaset("512","2048","64","29","4")
#rsaset("256","2048","32","29","8")
rsa_selected=True
if x==28:
rsaset("384","3072","48","28","8")
rsa_selected=True
if x==29:
#rsaset("256","4096","32","29","16")
rsaset("512","4096","64","29","8")
rsa_selected=True
os.system(deltext+" big.*")
os.system(deltext+" fp.*")
os.system(deltext+" ecp.*")
os.system(deltext+" ecdh.*")
os.system(deltext+" ff.*")
os.system(deltext+" rsa.*")
os.system(deltext+" config_big.h")
os.system(deltext+" config_field.h")
os.system(deltext+" config_curve.h")
os.system(deltext+" config_ff.h")
os.system(deltext+" fp2.*")
os.system(deltext+" fp4.*")
os.system(deltext+" fp8.*")
os.system(deltext+" fp16.*")
os.system(deltext+" fp12.*")
os.system(deltext+" fp24.*")
os.system(deltext+" fp48.*")
os.system(deltext+" ecp2.*")
os.system(deltext+" ecp4.*")
os.system(deltext+" ecp8.*")
os.system(deltext+" pair.*")
os.system(deltext+" mpin.*")
os.system(deltext+" bls.*")
os.system(deltext+" pair192.*")
os.system(deltext+" mpin192.*")
os.system(deltext+" bls192.*")
os.system(deltext+" pair256.*")
os.system(deltext+" mpin256.*")
os.system(deltext+" bls256.*")
# create library
os.system("gcc -O3 -std=c99 -c randapi.c")
if curve_selected :
os.system("gcc -O3 -std=c99 -c ecdh_support.c")
if rsa_selected :
os.system("gcc -O3 -std=c99 -c rsa_support.c")
if pfcurve_selected :
os.system("gcc -O3 -std=c99 -c pbc_support.c")
os.system("gcc -O3 -std=c99 -c hash.c")
os.system("gcc -O3 -std=c99 -c rand.c")
os.system("gcc -O3 -std=c99 -c oct.c")
os.system("gcc -O3 -std=c99 -c aes.c")
os.system("gcc -O3 -std=c99 -c gcm.c")
os.system("gcc -O3 -std=c99 -c newhope.c")
if sys.platform.startswith("win") :
os.system("for %i in (*.o) do @echo %~nxi >> f.list")
os.system("ar rc amcl.a @f.list")
os.system(deltext+" f.list")
else :
os.system("ar rc amcl.a *.o")
os.system(deltext+" *.o")
#print("Your section was ");
#for i in range(0,ptr):
# print (selection[i])
|
[
"sys.platform.startswith",
"os.system"
] |
[((36, 68), 'sys.platform.startswith', 'sys.platform.startswith', (['"""linux"""'], {}), "('linux')\n", (59, 68), False, 'import sys\n'), ((104, 137), 'sys.platform.startswith', 'sys.platform.startswith', (['"""darwin"""'], {}), "('darwin')\n", (127, 137), False, 'import sys\n'), ((173, 203), 'sys.platform.startswith', 'sys.platform.startswith', (['"""win"""'], {}), "('win')\n", (196, 203), False, 'import sys\n'), ((17865, 17894), 'os.system', 'os.system', (["(deltext + ' big.*')"], {}), "(deltext + ' big.*')\n", (17874, 17894), False, 'import os\n'), ((17893, 17921), 'os.system', 'os.system', (["(deltext + ' fp.*')"], {}), "(deltext + ' fp.*')\n", (17902, 17921), False, 'import os\n'), ((17920, 17949), 'os.system', 'os.system', (["(deltext + ' ecp.*')"], {}), "(deltext + ' ecp.*')\n", (17929, 17949), False, 'import os\n'), ((17948, 17978), 'os.system', 'os.system', (["(deltext + ' ecdh.*')"], {}), "(deltext + ' ecdh.*')\n", (17957, 17978), False, 'import os\n'), ((17977, 18005), 'os.system', 'os.system', (["(deltext + ' ff.*')"], {}), "(deltext + ' ff.*')\n", (17986, 18005), False, 'import os\n'), ((18004, 18033), 'os.system', 'os.system', (["(deltext + ' rsa.*')"], {}), "(deltext + ' rsa.*')\n", (18013, 18033), False, 'import os\n'), ((18032, 18068), 'os.system', 'os.system', (["(deltext + ' config_big.h')"], {}), "(deltext + ' config_big.h')\n", (18041, 18068), False, 'import os\n'), ((18067, 18105), 'os.system', 'os.system', (["(deltext + ' config_field.h')"], {}), "(deltext + ' config_field.h')\n", (18076, 18105), False, 'import os\n'), ((18104, 18142), 'os.system', 'os.system', (["(deltext + ' config_curve.h')"], {}), "(deltext + ' config_curve.h')\n", (18113, 18142), False, 'import os\n'), ((18141, 18176), 'os.system', 'os.system', (["(deltext + ' config_ff.h')"], {}), "(deltext + ' config_ff.h')\n", (18150, 18176), False, 'import os\n'), ((18175, 18204), 'os.system', 'os.system', (["(deltext + ' fp2.*')"], {}), "(deltext + ' fp2.*')\n", (18184, 18204), False, 'import os\n'), ((18203, 18232), 'os.system', 'os.system', (["(deltext + ' fp4.*')"], {}), "(deltext + ' fp4.*')\n", (18212, 18232), False, 'import os\n'), ((18231, 18260), 'os.system', 'os.system', (["(deltext + ' fp8.*')"], {}), "(deltext + ' fp8.*')\n", (18240, 18260), False, 'import os\n'), ((18259, 18289), 'os.system', 'os.system', (["(deltext + ' fp16.*')"], {}), "(deltext + ' fp16.*')\n", (18268, 18289), False, 'import os\n'), ((18290, 18320), 'os.system', 'os.system', (["(deltext + ' fp12.*')"], {}), "(deltext + ' fp12.*')\n", (18299, 18320), False, 'import os\n'), ((18319, 18349), 'os.system', 'os.system', (["(deltext + ' fp24.*')"], {}), "(deltext + ' fp24.*')\n", (18328, 18349), False, 'import os\n'), ((18348, 18378), 'os.system', 'os.system', (["(deltext + ' fp48.*')"], {}), "(deltext + ' fp48.*')\n", (18357, 18378), False, 'import os\n'), ((18378, 18408), 'os.system', 'os.system', (["(deltext + ' ecp2.*')"], {}), "(deltext + ' ecp2.*')\n", (18387, 18408), False, 'import os\n'), ((18407, 18437), 'os.system', 'os.system', (["(deltext + ' ecp4.*')"], {}), "(deltext + ' ecp4.*')\n", (18416, 18437), False, 'import os\n'), ((18436, 18466), 'os.system', 'os.system', (["(deltext + ' ecp8.*')"], {}), "(deltext + ' ecp8.*')\n", (18445, 18466), False, 'import os\n'), ((18466, 18496), 'os.system', 'os.system', (["(deltext + ' pair.*')"], {}), "(deltext + ' pair.*')\n", (18475, 18496), False, 'import os\n'), ((18495, 18525), 'os.system', 'os.system', (["(deltext + ' mpin.*')"], {}), "(deltext + ' mpin.*')\n", (18504, 18525), False, 'import os\n'), ((18524, 18553), 'os.system', 'os.system', (["(deltext + ' bls.*')"], {}), "(deltext + ' bls.*')\n", (18533, 18553), False, 'import os\n'), ((18553, 18586), 'os.system', 'os.system', (["(deltext + ' pair192.*')"], {}), "(deltext + ' pair192.*')\n", (18562, 18586), False, 'import os\n'), ((18585, 18618), 'os.system', 'os.system', (["(deltext + ' mpin192.*')"], {}), "(deltext + ' mpin192.*')\n", (18594, 18618), False, 'import os\n'), ((18617, 18649), 'os.system', 'os.system', (["(deltext + ' bls192.*')"], {}), "(deltext + ' bls192.*')\n", (18626, 18649), False, 'import os\n'), ((18649, 18682), 'os.system', 'os.system', (["(deltext + ' pair256.*')"], {}), "(deltext + ' pair256.*')\n", (18658, 18682), False, 'import os\n'), ((18681, 18714), 'os.system', 'os.system', (["(deltext + ' mpin256.*')"], {}), "(deltext + ' mpin256.*')\n", (18690, 18714), False, 'import os\n'), ((18713, 18745), 'os.system', 'os.system', (["(deltext + ' bls256.*')"], {}), "(deltext + ' bls256.*')\n", (18722, 18745), False, 'import os\n'), ((18762, 18804), 'os.system', 'os.system', (['"""gcc -O3 -std=c99 -c randapi.c"""'], {}), "('gcc -O3 -std=c99 -c randapi.c')\n", (18771, 18804), False, 'import os\n'), ((19011, 19050), 'os.system', 'os.system', (['"""gcc -O3 -std=c99 -c hash.c"""'], {}), "('gcc -O3 -std=c99 -c hash.c')\n", (19020, 19050), False, 'import os\n'), ((19051, 19090), 'os.system', 'os.system', (['"""gcc -O3 -std=c99 -c rand.c"""'], {}), "('gcc -O3 -std=c99 -c rand.c')\n", (19060, 19090), False, 'import os\n'), ((19091, 19129), 'os.system', 'os.system', (['"""gcc -O3 -std=c99 -c oct.c"""'], {}), "('gcc -O3 -std=c99 -c oct.c')\n", (19100, 19129), False, 'import os\n'), ((19130, 19168), 'os.system', 'os.system', (['"""gcc -O3 -std=c99 -c aes.c"""'], {}), "('gcc -O3 -std=c99 -c aes.c')\n", (19139, 19168), False, 'import os\n'), ((19169, 19207), 'os.system', 'os.system', (['"""gcc -O3 -std=c99 -c gcm.c"""'], {}), "('gcc -O3 -std=c99 -c gcm.c')\n", (19178, 19207), False, 'import os\n'), ((19208, 19250), 'os.system', 'os.system', (['"""gcc -O3 -std=c99 -c newhope.c"""'], {}), "('gcc -O3 -std=c99 -c newhope.c')\n", (19217, 19250), False, 'import os\n'), ((19255, 19285), 'sys.platform.startswith', 'sys.platform.startswith', (['"""win"""'], {}), "('win')\n", (19278, 19285), False, 'import sys\n'), ((19449, 19476), 'os.system', 'os.system', (["(deltext + ' *.o')"], {}), "(deltext + ' *.o')\n", (19458, 19476), False, 'import os\n'), ((515, 562), 'os.system', 'os.system', (["(copytext + ' config_big.h ' + fnameh)"], {}), "(copytext + ' config_big.h ' + fnameh)\n", (524, 562), False, 'import os\n'), ((675, 721), 'os.system', 'os.system', (["(copytext + ' config_ff.h ' + fnameh)"], {}), "(copytext + ' config_ff.h ' + fnameh)\n", (684, 721), False, 'import os\n'), ((848, 888), 'os.system', 'os.system', (["(copytext + ' big.c ' + fnamec)"], {}), "(copytext + ' big.c ' + fnamec)\n", (857, 888), False, 'import os\n'), ((886, 926), 'os.system', 'os.system', (["(copytext + ' big.h ' + fnameh)"], {}), "(copytext + ' big.h ' + fnameh)\n", (895, 926), False, 'import os\n'), ((977, 1019), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (986, 1019), False, 'import os\n'), ((1067, 1106), 'os.system', 'os.system', (["(copytext + ' ff.c ' + fnamec)"], {}), "(copytext + ' ff.c ' + fnamec)\n", (1076, 1106), False, 'import os\n'), ((1104, 1143), 'os.system', 'os.system', (["(copytext + ' ff.h ' + fnameh)"], {}), "(copytext + ' ff.h ' + fnameh)\n", (1113, 1143), False, 'import os\n'), ((1248, 1290), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (1257, 1290), False, 'import os\n'), ((1340, 1380), 'os.system', 'os.system', (["(copytext + ' rsa.c ' + fnamec)"], {}), "(copytext + ' rsa.c ' + fnamec)\n", (1349, 1380), False, 'import os\n'), ((1378, 1418), 'os.system', 'os.system', (["(copytext + ' rsa.h ' + fnameh)"], {}), "(copytext + ' rsa.h ' + fnameh)\n", (1387, 1418), False, 'import os\n'), ((1523, 1565), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (1532, 1565), False, 'import os\n'), ((1675, 1722), 'os.system', 'os.system', (["(copytext + ' config_big.h ' + fnameh)"], {}), "(copytext + ' config_big.h ' + fnameh)\n", (1684, 1722), False, 'import os\n'), ((1839, 1888), 'os.system', 'os.system', (["(copytext + ' config_field.h ' + fnameh)"], {}), "(copytext + ' config_field.h ' + fnameh)\n", (1848, 1888), False, 'import os\n'), ((2199, 2248), 'os.system', 'os.system', (["(copytext + ' config_curve.h ' + fnameh)"], {}), "(copytext + ' config_curve.h ' + fnameh)\n", (2208, 2248), False, 'import os\n'), ((2536, 2576), 'os.system', 'os.system', (["(copytext + ' big.c ' + fnamec)"], {}), "(copytext + ' big.c ' + fnamec)\n", (2545, 2576), False, 'import os\n'), ((2574, 2614), 'os.system', 'os.system', (["(copytext + ' big.h ' + fnameh)"], {}), "(copytext + ' big.h ' + fnameh)\n", (2583, 2614), False, 'import os\n'), ((2665, 2707), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (2674, 2707), False, 'import os\n'), ((2753, 2792), 'os.system', 'os.system', (["(copytext + ' fp.c ' + fnamec)"], {}), "(copytext + ' fp.c ' + fnamec)\n", (2762, 2792), False, 'import os\n'), ((2790, 2829), 'os.system', 'os.system', (["(copytext + ' fp.h ' + fnameh)"], {}), "(copytext + ' fp.h ' + fnameh)\n", (2799, 2829), False, 'import os\n'), ((2932, 2974), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (2941, 2974), False, 'import os\n'), ((2975, 3030), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c rom_field_' + tf + '.c')"], {}), "('gcc -O3 -std=c99 -c rom_field_' + tf + '.c')\n", (2984, 3030), False, 'import os\n'), ((3077, 3117), 'os.system', 'os.system', (["(copytext + ' ecp.c ' + fnamec)"], {}), "(copytext + ' ecp.c ' + fnamec)\n", (3086, 3117), False, 'import os\n'), ((3115, 3155), 'os.system', 'os.system', (["(copytext + ' ecp.h ' + fnameh)"], {}), "(copytext + ' ecp.h ' + fnameh)\n", (3124, 3155), False, 'import os\n'), ((3310, 3352), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (3319, 3352), False, 'import os\n'), ((3402, 3443), 'os.system', 'os.system', (["(copytext + ' ecdh.c ' + fnamec)"], {}), "(copytext + ' ecdh.c ' + fnamec)\n", (3411, 3443), False, 'import os\n'), ((3441, 3482), 'os.system', 'os.system', (["(copytext + ' ecdh.h ' + fnameh)"], {}), "(copytext + ' ecdh.h ' + fnameh)\n", (3450, 3482), False, 'import os\n'), ((3637, 3679), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (3646, 3679), False, 'import os\n'), ((3680, 3735), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c rom_curve_' + tc + '.c')"], {}), "('gcc -O3 -std=c99 -c rom_curve_' + tc + '.c')\n", (3689, 3735), False, 'import os\n'), ((18826, 18873), 'os.system', 'os.system', (['"""gcc -O3 -std=c99 -c ecdh_support.c"""'], {}), "('gcc -O3 -std=c99 -c ecdh_support.c')\n", (18835, 18873), False, 'import os\n'), ((18893, 18939), 'os.system', 'os.system', (['"""gcc -O3 -std=c99 -c rsa_support.c"""'], {}), "('gcc -O3 -std=c99 -c rsa_support.c')\n", (18902, 18939), False, 'import os\n'), ((18963, 19009), 'os.system', 'os.system', (['"""gcc -O3 -std=c99 -c pbc_support.c"""'], {}), "('gcc -O3 -std=c99 -c pbc_support.c')\n", (18972, 19009), False, 'import os\n'), ((19289, 19342), 'os.system', 'os.system', (['"""for %i in (*.o) do @echo %~nxi >> f.list"""'], {}), "('for %i in (*.o) do @echo %~nxi >> f.list')\n", (19298, 19342), False, 'import os\n'), ((19344, 19377), 'os.system', 'os.system', (['"""ar rc amcl.a @f.list"""'], {}), "('ar rc amcl.a @f.list')\n", (19353, 19377), False, 'import os\n'), ((19379, 19409), 'os.system', 'os.system', (["(deltext + ' f.list')"], {}), "(deltext + ' f.list')\n", (19388, 19409), False, 'import os\n'), ((19417, 19446), 'os.system', 'os.system', (['"""ar rc amcl.a *.o"""'], {}), "('ar rc amcl.a *.o')\n", (19426, 19446), False, 'import os\n'), ((3803, 3843), 'os.system', 'os.system', (["(copytext + ' fp2.c ' + fnamec)"], {}), "(copytext + ' fp2.c ' + fnamec)\n", (3812, 3843), False, 'import os\n'), ((3842, 3882), 'os.system', 'os.system', (["(copytext + ' fp2.h ' + fnameh)"], {}), "(copytext + ' fp2.h ' + fnameh)\n", (3851, 3882), False, 'import os\n'), ((3989, 4031), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (3998, 4031), False, 'import os\n'), ((4082, 4122), 'os.system', 'os.system', (["(copytext + ' fp4.c ' + fnamec)"], {}), "(copytext + ' fp4.c ' + fnamec)\n", (4091, 4122), False, 'import os\n'), ((4121, 4161), 'os.system', 'os.system', (["(copytext + ' fp4.h ' + fnameh)"], {}), "(copytext + ' fp4.h ' + fnameh)\n", (4130, 4161), False, 'import os\n'), ((4322, 4364), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (4331, 4364), False, 'import os\n'), ((4440, 4481), 'os.system', 'os.system', (["(copytext + ' fp12.c ' + fnamec)"], {}), "(copytext + ' fp12.c ' + fnamec)\n", (4449, 4481), False, 'import os\n'), ((4481, 4522), 'os.system', 'os.system', (["(copytext + ' fp12.h ' + fnameh)"], {}), "(copytext + ' fp12.h ' + fnameh)\n", (4490, 4522), False, 'import os\n'), ((4690, 4732), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (4699, 4732), False, 'import os\n'), ((4788, 4829), 'os.system', 'os.system', (["(copytext + ' ecp2.c ' + fnamec)"], {}), "(copytext + ' ecp2.c ' + fnamec)\n", (4797, 4829), False, 'import os\n'), ((4829, 4870), 'os.system', 'os.system', (["(copytext + ' ecp2.h ' + fnameh)"], {}), "(copytext + ' ecp2.h ' + fnameh)\n", (4838, 4870), False, 'import os\n'), ((5038, 5080), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (5047, 5080), False, 'import os\n'), ((5136, 5177), 'os.system', 'os.system', (["(copytext + ' pair.c ' + fnamec)"], {}), "(copytext + ' pair.c ' + fnamec)\n", (5145, 5177), False, 'import os\n'), ((5177, 5218), 'os.system', 'os.system', (["(copytext + ' pair.h ' + fnameh)"], {}), "(copytext + ' pair.h ' + fnameh)\n", (5186, 5218), False, 'import os\n'), ((5386, 5428), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (5395, 5428), False, 'import os\n'), ((5484, 5525), 'os.system', 'os.system', (["(copytext + ' mpin.c ' + fnamec)"], {}), "(copytext + ' mpin.c ' + fnamec)\n", (5493, 5525), False, 'import os\n'), ((5525, 5566), 'os.system', 'os.system', (["(copytext + ' mpin.h ' + fnameh)"], {}), "(copytext + ' mpin.h ' + fnameh)\n", (5534, 5566), False, 'import os\n'), ((5734, 5776), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (5743, 5776), False, 'import os\n'), ((5830, 5870), 'os.system', 'os.system', (["(copytext + ' bls.c ' + fnamec)"], {}), "(copytext + ' bls.c ' + fnamec)\n", (5839, 5870), False, 'import os\n'), ((5870, 5910), 'os.system', 'os.system', (["(copytext + ' bls.h ' + fnameh)"], {}), "(copytext + ' bls.h ' + fnameh)\n", (5879, 5910), False, 'import os\n'), ((6078, 6120), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (6087, 6120), False, 'import os\n'), ((6193, 6233), 'os.system', 'os.system', (["(copytext + ' fp8.c ' + fnamec)"], {}), "(copytext + ' fp8.c ' + fnamec)\n", (6202, 6233), False, 'import os\n'), ((6233, 6273), 'os.system', 'os.system', (["(copytext + ' fp8.h ' + fnameh)"], {}), "(copytext + ' fp8.h ' + fnameh)\n", (6242, 6273), False, 'import os\n'), ((6441, 6483), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (6450, 6483), False, 'import os\n'), ((6540, 6581), 'os.system', 'os.system', (["(copytext + ' fp24.c ' + fnamec)"], {}), "(copytext + ' fp24.c ' + fnamec)\n", (6549, 6581), False, 'import os\n'), ((6581, 6622), 'os.system', 'os.system', (["(copytext + ' fp24.h ' + fnameh)"], {}), "(copytext + ' fp24.h ' + fnameh)\n", (6590, 6622), False, 'import os\n'), ((6790, 6832), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (6799, 6832), False, 'import os\n'), ((6888, 6929), 'os.system', 'os.system', (["(copytext + ' ecp4.c ' + fnamec)"], {}), "(copytext + ' ecp4.c ' + fnamec)\n", (6897, 6929), False, 'import os\n'), ((6929, 6970), 'os.system', 'os.system', (["(copytext + ' ecp4.h ' + fnameh)"], {}), "(copytext + ' ecp4.h ' + fnameh)\n", (6938, 6970), False, 'import os\n'), ((7138, 7180), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (7147, 7180), False, 'import os\n'), ((7242, 7286), 'os.system', 'os.system', (["(copytext + ' pair192.c ' + fnamec)"], {}), "(copytext + ' pair192.c ' + fnamec)\n", (7251, 7286), False, 'import os\n'), ((7286, 7330), 'os.system', 'os.system', (["(copytext + ' pair192.h ' + fnameh)"], {}), "(copytext + ' pair192.h ' + fnameh)\n", (7295, 7330), False, 'import os\n'), ((7498, 7540), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (7507, 7540), False, 'import os\n'), ((7602, 7646), 'os.system', 'os.system', (["(copytext + ' mpin192.c ' + fnamec)"], {}), "(copytext + ' mpin192.c ' + fnamec)\n", (7611, 7646), False, 'import os\n'), ((7646, 7690), 'os.system', 'os.system', (["(copytext + ' mpin192.h ' + fnameh)"], {}), "(copytext + ' mpin192.h ' + fnameh)\n", (7655, 7690), False, 'import os\n'), ((7858, 7900), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (7867, 7900), False, 'import os\n'), ((7962, 8005), 'os.system', 'os.system', (["(copytext + ' bls192.c ' + fnamec)"], {}), "(copytext + ' bls192.c ' + fnamec)\n", (7971, 8005), False, 'import os\n'), ((8005, 8048), 'os.system', 'os.system', (["(copytext + ' bls192.h ' + fnameh)"], {}), "(copytext + ' bls192.h ' + fnameh)\n", (8014, 8048), False, 'import os\n'), ((8216, 8258), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (8225, 8258), False, 'import os\n'), ((8333, 8373), 'os.system', 'os.system', (["(copytext + ' fp8.c ' + fnamec)"], {}), "(copytext + ' fp8.c ' + fnamec)\n", (8342, 8373), False, 'import os\n'), ((8373, 8413), 'os.system', 'os.system', (["(copytext + ' fp8.h ' + fnameh)"], {}), "(copytext + ' fp8.h ' + fnameh)\n", (8382, 8413), False, 'import os\n'), ((8581, 8623), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (8590, 8623), False, 'import os\n'), ((8680, 8721), 'os.system', 'os.system', (["(copytext + ' ecp8.c ' + fnamec)"], {}), "(copytext + ' ecp8.c ' + fnamec)\n", (8689, 8721), False, 'import os\n'), ((8721, 8762), 'os.system', 'os.system', (["(copytext + ' ecp8.h ' + fnameh)"], {}), "(copytext + ' ecp8.h ' + fnameh)\n", (8730, 8762), False, 'import os\n'), ((8930, 8972), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (8939, 8972), False, 'import os\n'), ((9029, 9070), 'os.system', 'os.system', (["(copytext + ' fp16.c ' + fnamec)"], {}), "(copytext + ' fp16.c ' + fnamec)\n", (9038, 9070), False, 'import os\n'), ((9070, 9111), 'os.system', 'os.system', (["(copytext + ' fp16.h ' + fnameh)"], {}), "(copytext + ' fp16.h ' + fnameh)\n", (9079, 9111), False, 'import os\n'), ((9279, 9321), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (9288, 9321), False, 'import os\n'), ((9378, 9419), 'os.system', 'os.system', (["(copytext + ' fp48.c ' + fnamec)"], {}), "(copytext + ' fp48.c ' + fnamec)\n", (9387, 9419), False, 'import os\n'), ((9419, 9460), 'os.system', 'os.system', (["(copytext + ' fp48.h ' + fnameh)"], {}), "(copytext + ' fp48.h ' + fnameh)\n", (9428, 9460), False, 'import os\n'), ((9628, 9670), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (9637, 9670), False, 'import os\n'), ((9733, 9777), 'os.system', 'os.system', (["(copytext + ' pair256.c ' + fnamec)"], {}), "(copytext + ' pair256.c ' + fnamec)\n", (9742, 9777), False, 'import os\n'), ((9777, 9821), 'os.system', 'os.system', (["(copytext + ' pair256.h ' + fnameh)"], {}), "(copytext + ' pair256.h ' + fnameh)\n", (9786, 9821), False, 'import os\n'), ((9989, 10031), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (9998, 10031), False, 'import os\n'), ((10093, 10137), 'os.system', 'os.system', (["(copytext + ' mpin256.c ' + fnamec)"], {}), "(copytext + ' mpin256.c ' + fnamec)\n", (10102, 10137), False, 'import os\n'), ((10137, 10181), 'os.system', 'os.system', (["(copytext + ' mpin256.h ' + fnameh)"], {}), "(copytext + ' mpin256.h ' + fnameh)\n", (10146, 10181), False, 'import os\n'), ((10349, 10391), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (10358, 10391), False, 'import os\n'), ((10455, 10498), 'os.system', 'os.system', (["(copytext + ' bls256.c ' + fnamec)"], {}), "(copytext + ' bls256.c ' + fnamec)\n", (10464, 10498), False, 'import os\n'), ((10498, 10541), 'os.system', 'os.system', (["(copytext + ' bls256.h ' + fnameh)"], {}), "(copytext + ' bls256.h ' + fnameh)\n", (10507, 10541), False, 'import os\n'), ((10709, 10751), 'os.system', 'os.system', (["('gcc -O3 -std=c99 -c ' + fnamec)"], {}), "('gcc -O3 -std=c99 -c ' + fnamec)\n", (10718, 10751), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
import json
from six import assertCountEqual
from synapsesuggestor.tests.common import SynapseSuggestorTestCase
URL_PREFIX = '/ext/synapsesuggestor/treenode-association'
SYN_SLICE_NEAR_SKEL_COLS = [
'skeleton_id', 'treenode_id', 'synapse_object_id', 'synapse_slice_ids', 'synapse_z_s', 'synapse_bounds_s'
]
def stack_to_project(translation, resolution, coords_s):
"""Convert a dictionary of stack coordinates into a dictionary of project coordinates"""
return {dim: val * resolution[dim] + translation[dim] for dim, val in coords_s.items()}
def project_to_stack(translation, resolution, coords_p):
"""Convert a dictionary of project coordinates into a dictionary of stack coordinates"""
return {dim: int(val - translation[dim] / resolution[dim]) for dim, val in coords_p.items()}
def stack_distance_to_project(resolution, distance_s):
assert resolution['x'] == resolution['y'], 'Resolution is XY anisotropic'
return distance_s * resolution['x']
class TreenodeAssociationApiTests(SynapseSuggestorTestCase):
def get_response(self, *args, **kwargs):
response = self.client.get(*args, **kwargs)
self.assertEqual(response.status_code, 200)
return json.loads(response.content.decode('utf-8'))
def test_get_treenode_associations(self):
synapse_obj_id = 1
self.fake_authentication()
parsed_response = self.get_response(
URL_PREFIX + '/{}/get'.format(self.test_project_id),
{'skid': self.test_skeleton_id}
)
expected_result = [[self.test_treenode_id, synapse_obj_id, 5]]
self.assertListEqual(expected_result, parsed_response)
def test_get_treenode_associations_empty(self):
other_skid = 235
self.fake_authentication()
parsed_response = self.get_response(
URL_PREFIX + '/{}/get'.format(self.test_project_id),
{'skid': other_skid}
)
self.assertListEqual([], parsed_response)
def test_add_treenode_synapse_associations(self):
syn_slice_ids = [2, 3]
syn_obj_id = 1
other_skid = 235
other_tns = [237, 239]
contact_px = 10
associations = [
json.dumps([syn_slice_id, other_tn, contact_px])
for syn_slice_id, other_tn in zip(syn_slice_ids, other_tns)
]
self.fake_authentication()
response = self.client.post(
URL_PREFIX + '/{}/add'.format(self.test_project_id),
{'project_workflow_id': self.test_pssw_id, 'associations': associations}
)
self.assertEqual(response.status_code, 200)
parsed_response = json.loads(response.content.decode('utf-8'))
self.assertEqual(len(parsed_response), 2)
parsed_response = self.get_response(
URL_PREFIX + '/{}/get'.format(self.test_project_id),
{'skid': other_skid}
)
expected_result = [[other_tn, syn_obj_id, contact_px] for other_tn in other_tns]
assertCountEqual(self, expected_result, parsed_response)
def _get_stack_info(self):
stack_response = self.client.get(
'/{}/stack/{}/info'.format(self.test_project_id, self.test_stack_id),
)
self.assertEqual(stack_response.status_code, 200)
return json.loads(stack_response.content.decode('utf-8'))
def _create_treenodes(self, coords_s, parent_id=-1):
if isinstance(coords_s, dict):
coords_s = [coords_s]
stack_info = self._get_stack_info()
treenode_ids = []
skid = None
for coord_s_dict in coords_s:
coords_p = stack_to_project(stack_info['translation'], stack_info['resolution'], coord_s_dict)
response = self.client.post(
'/{}/treenode/create'.format(self.test_project_id),
{
'x': coords_p['x'],
'y': coords_p['y'],
'z': coords_p['z'],
'parent_id': parent_id
}
)
self.assertEqual(response.status_code, 200)
treenode_info = json.loads(response.content.decode('utf-8'))
treenode_ids.append(treenode_info['treenode_id'])
assert skid is None or skid == treenode_info['skeleton_id'], 'Inconsistent skeleton ID'
skid = treenode_info['skeleton_id']
return {
'treenode_ids': treenode_ids, 'skeleton_id': skid,
'resolution': stack_info['resolution'], 'translation': stack_info['translation']
}
def test_get_synapse_slices_near_skeleton_single(self):
self.fake_authentication()
distance_s = 1
treenodes_info = self._create_treenodes({'x': 2.5, 'y': 0.5, 'z': 0})
distance_p = stack_distance_to_project(treenodes_info['resolution'], distance_s)
params = {'skid': treenodes_info['skeleton_id'], 'pssw_id': self.test_pssw_id, 'distance': distance_p}
response = self.client.get(URL_PREFIX + '/{}/get-distance'.format(self.test_project_id), params)
self.assertEqual(response.status_code, 200)
parsed_response = json.loads(response.content.decode('utf-8'))
expected_response = {
'columns': SYN_SLICE_NEAR_SKEL_COLS,
'data': [
[treenodes_info['skeleton_id'], treenodes_info['treenode_ids'][0], 1, [2, 3], 0, [0.0, 0.0, 2.0, 1.0]]
]
}
self.assertDictEqual(parsed_response, expected_response)
def test_get_synapse_slices_near_skeleton_multi(self):
self.fake_authentication()
distance_s = 1
treenodes_info = self._create_treenodes({'x': 1, 'y': 1.5, 'z': 0})
distance_p = stack_distance_to_project(treenodes_info['resolution'], distance_s)
params = {'skid': treenodes_info['skeleton_id'], 'pssw_id': self.test_pssw_id, 'distance': distance_p}
response = self.client.get(URL_PREFIX + '/{}/get-distance'.format(self.test_project_id), params)
self.assertEqual(response.status_code, 200)
parsed_response = json.loads(response.content.decode('utf-8'))
expected_response = {
'columns': SYN_SLICE_NEAR_SKEL_COLS,
'data': [
[treenodes_info['skeleton_id'], treenodes_info['treenode_ids'][0], 1, [2, 3], 0, [0.0, 0.0, 2.0, 1.0]]
]
}
self.assertDictEqual(parsed_response, expected_response)
def test_get_synapse_slices_near_skeleton_offset_z(self):
self.fake_authentication()
distance_s = 1
treenodes_info = self._create_treenodes({'x': 1, 'y': 1.5, 'z': 1})
distance_p = stack_distance_to_project(treenodes_info['resolution'], distance_s)
params = {'skid': treenodes_info['skeleton_id'], 'pssw_id': self.test_pssw_id, 'distance': distance_p}
response = self.client.get(URL_PREFIX + '/{}/get-distance'.format(self.test_project_id), params)
self.assertEqual(response.status_code, 200)
parsed_response = json.loads(response.content.decode('utf-8'))
expected_response = {
'columns': SYN_SLICE_NEAR_SKEL_COLS,
'data': []
}
self.assertDictEqual(parsed_response, expected_response)
def test_get_synapse_slices_near_skeleton_too_far(self):
self.fake_authentication()
distance_s = 1
treenodes_info = self._create_treenodes({'x': 10, 'y': 1.5, 'z': 0})
distance_p = stack_distance_to_project(treenodes_info['resolution'], distance_s)
params = {'skid': treenodes_info['skeleton_id'], 'pssw_id': self.test_pssw_id, 'distance': distance_p}
response = self.client.get(URL_PREFIX + '/{}/get-distance'.format(self.test_project_id), params)
self.assertEqual(response.status_code, 200)
parsed_response = json.loads(response.content.decode('utf-8'))
expected_response = {
'columns': SYN_SLICE_NEAR_SKEL_COLS,
'data': []
}
self.assertDictEqual(parsed_response, expected_response)
|
[
"six.assertCountEqual",
"json.dumps"
] |
[((3024, 3080), 'six.assertCountEqual', 'assertCountEqual', (['self', 'expected_result', 'parsed_response'], {}), '(self, expected_result, parsed_response)\n', (3040, 3080), False, 'from six import assertCountEqual\n'), ((2234, 2282), 'json.dumps', 'json.dumps', (['[syn_slice_id, other_tn, contact_px]'], {}), '([syn_slice_id, other_tn, contact_px])\n', (2244, 2282), False, 'import json\n')]
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 8
Contact: <EMAIL>
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class NamespaceObject(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'access_time': 'str',
'atime_val': 'int',
'block_size': 'int',
'blocks': 'int',
'btime_val': 'int',
'change_time': 'str',
'container': 'str',
'container_path': 'str',
'create_time': 'str',
'ctime_val': 'int',
'gid': 'int',
'group': 'str',
'id': 'int',
'is_hidden': 'bool',
'last_modified': 'str',
'mode': 'str',
'mtime_val': 'int',
'name': 'str',
'nlink': 'int',
'owner': 'str',
'size': 'int',
'stub': 'bool',
'type': 'str',
'uid': 'int'
}
attribute_map = {
'access_time': 'access_time',
'atime_val': 'atime_val',
'block_size': 'block_size',
'blocks': 'blocks',
'btime_val': 'btime_val',
'change_time': 'change_time',
'container': 'container',
'container_path': 'container_path',
'create_time': 'create_time',
'ctime_val': 'ctime_val',
'gid': 'gid',
'group': 'group',
'id': 'id',
'is_hidden': 'is_hidden',
'last_modified': 'last_modified',
'mode': 'mode',
'mtime_val': 'mtime_val',
'name': 'name',
'nlink': 'nlink',
'owner': 'owner',
'size': 'size',
'stub': 'stub',
'type': 'type',
'uid': 'uid'
}
def __init__(self, access_time=None, atime_val=None, block_size=None, blocks=None, btime_val=None, change_time=None, container=None, container_path=None, create_time=None, ctime_val=None, gid=None, group=None, id=None, is_hidden=None, last_modified=None, mode=None, mtime_val=None, name=None, nlink=None, owner=None, size=None, stub=None, type=None, uid=None): # noqa: E501
"""NamespaceObject - a model defined in Swagger""" # noqa: E501
self._access_time = None
self._atime_val = None
self._block_size = None
self._blocks = None
self._btime_val = None
self._change_time = None
self._container = None
self._container_path = None
self._create_time = None
self._ctime_val = None
self._gid = None
self._group = None
self._id = None
self._is_hidden = None
self._last_modified = None
self._mode = None
self._mtime_val = None
self._name = None
self._nlink = None
self._owner = None
self._size = None
self._stub = None
self._type = None
self._uid = None
self.discriminator = None
if access_time is not None:
self.access_time = access_time
if atime_val is not None:
self.atime_val = atime_val
if block_size is not None:
self.block_size = block_size
if blocks is not None:
self.blocks = blocks
if btime_val is not None:
self.btime_val = btime_val
if change_time is not None:
self.change_time = change_time
if container is not None:
self.container = container
if container_path is not None:
self.container_path = container_path
if create_time is not None:
self.create_time = create_time
if ctime_val is not None:
self.ctime_val = ctime_val
if gid is not None:
self.gid = gid
if group is not None:
self.group = group
if id is not None:
self.id = id
if is_hidden is not None:
self.is_hidden = is_hidden
if last_modified is not None:
self.last_modified = last_modified
if mode is not None:
self.mode = mode
if mtime_val is not None:
self.mtime_val = mtime_val
if name is not None:
self.name = name
if nlink is not None:
self.nlink = nlink
if owner is not None:
self.owner = owner
if size is not None:
self.size = size
if stub is not None:
self.stub = stub
if type is not None:
self.type = type
if uid is not None:
self.uid = uid
@property
def access_time(self):
"""Gets the access_time of this NamespaceObject. # noqa: E501
Specifies the date when the object was last accessed in HTTP date/time format. # noqa: E501
:return: The access_time of this NamespaceObject. # noqa: E501
:rtype: str
"""
return self._access_time
@access_time.setter
def access_time(self, access_time):
"""Sets the access_time of this NamespaceObject.
Specifies the date when the object was last accessed in HTTP date/time format. # noqa: E501
:param access_time: The access_time of this NamespaceObject. # noqa: E501
:type: str
"""
self._access_time = access_time
@property
def atime_val(self):
"""Gets the atime_val of this NamespaceObject. # noqa: E501
Specifies the time when the object was last accessed in UNIX Epoch format. # noqa: E501
:return: The atime_val of this NamespaceObject. # noqa: E501
:rtype: int
"""
return self._atime_val
@atime_val.setter
def atime_val(self, atime_val):
"""Sets the atime_val of this NamespaceObject.
Specifies the time when the object was last accessed in UNIX Epoch format. # noqa: E501
:param atime_val: The atime_val of this NamespaceObject. # noqa: E501
:type: int
"""
self._atime_val = atime_val
@property
def block_size(self):
"""Gets the block_size of this NamespaceObject. # noqa: E501
Specifies the block size of the object. # noqa: E501
:return: The block_size of this NamespaceObject. # noqa: E501
:rtype: int
"""
return self._block_size
@block_size.setter
def block_size(self, block_size):
"""Sets the block_size of this NamespaceObject.
Specifies the block size of the object. # noqa: E501
:param block_size: The block_size of this NamespaceObject. # noqa: E501
:type: int
"""
self._block_size = block_size
@property
def blocks(self):
"""Gets the blocks of this NamespaceObject. # noqa: E501
Specifies the number of blocks that compose the object. # noqa: E501
:return: The blocks of this NamespaceObject. # noqa: E501
:rtype: int
"""
return self._blocks
@blocks.setter
def blocks(self, blocks):
"""Sets the blocks of this NamespaceObject.
Specifies the number of blocks that compose the object. # noqa: E501
:param blocks: The blocks of this NamespaceObject. # noqa: E501
:type: int
"""
self._blocks = blocks
@property
def btime_val(self):
"""Gets the btime_val of this NamespaceObject. # noqa: E501
Specifies the time when the object data was created in UNIX Epoch format. # noqa: E501
:return: The btime_val of this NamespaceObject. # noqa: E501
:rtype: int
"""
return self._btime_val
@btime_val.setter
def btime_val(self, btime_val):
"""Sets the btime_val of this NamespaceObject.
Specifies the time when the object data was created in UNIX Epoch format. # noqa: E501
:param btime_val: The btime_val of this NamespaceObject. # noqa: E501
:type: int
"""
self._btime_val = btime_val
@property
def change_time(self):
"""Gets the change_time of this NamespaceObject. # noqa: E501
Specifies the date when the object was last changed (including data and metadata changes) in HTTP date/time format. # noqa: E501
:return: The change_time of this NamespaceObject. # noqa: E501
:rtype: str
"""
return self._change_time
@change_time.setter
def change_time(self, change_time):
"""Sets the change_time of this NamespaceObject.
Specifies the date when the object was last changed (including data and metadata changes) in HTTP date/time format. # noqa: E501
:param change_time: The change_time of this NamespaceObject. # noqa: E501
:type: str
"""
self._change_time = change_time
@property
def container(self):
"""Gets the container of this NamespaceObject. # noqa: E501
Specifies the name of the queried container. # noqa: E501
:return: The container of this NamespaceObject. # noqa: E501
:rtype: str
"""
return self._container
@container.setter
def container(self, container):
"""Sets the container of this NamespaceObject.
Specifies the name of the queried container. # noqa: E501
:param container: The container of this NamespaceObject. # noqa: E501
:type: str
"""
self._container = container
@property
def container_path(self):
"""Gets the container_path of this NamespaceObject. # noqa: E501
Specifies the container path on the file system. # noqa: E501
:return: The container_path of this NamespaceObject. # noqa: E501
:rtype: str
"""
return self._container_path
@container_path.setter
def container_path(self, container_path):
"""Sets the container_path of this NamespaceObject.
Specifies the container path on the file system. # noqa: E501
:param container_path: The container_path of this NamespaceObject. # noqa: E501
:type: str
"""
self._container_path = container_path
@property
def create_time(self):
"""Gets the create_time of this NamespaceObject. # noqa: E501
Specifies the date when the object data was created in HTTP date/time format. # noqa: E501
:return: The create_time of this NamespaceObject. # noqa: E501
:rtype: str
"""
return self._create_time
@create_time.setter
def create_time(self, create_time):
"""Sets the create_time of this NamespaceObject.
Specifies the date when the object data was created in HTTP date/time format. # noqa: E501
:param create_time: The create_time of this NamespaceObject. # noqa: E501
:type: str
"""
self._create_time = create_time
@property
def ctime_val(self):
"""Gets the ctime_val of this NamespaceObject. # noqa: E501
Specifies the time when the object was last changed (including data and metadata changes) in UNIX Epoch format. # noqa: E501
:return: The ctime_val of this NamespaceObject. # noqa: E501
:rtype: int
"""
return self._ctime_val
@ctime_val.setter
def ctime_val(self, ctime_val):
"""Sets the ctime_val of this NamespaceObject.
Specifies the time when the object was last changed (including data and metadata changes) in UNIX Epoch format. # noqa: E501
:param ctime_val: The ctime_val of this NamespaceObject. # noqa: E501
:type: int
"""
self._ctime_val = ctime_val
@property
def gid(self):
"""Gets the gid of this NamespaceObject. # noqa: E501
Specifies the GID for the owner. # noqa: E501
:return: The gid of this NamespaceObject. # noqa: E501
:rtype: int
"""
return self._gid
@gid.setter
def gid(self, gid):
"""Sets the gid of this NamespaceObject.
Specifies the GID for the owner. # noqa: E501
:param gid: The gid of this NamespaceObject. # noqa: E501
:type: int
"""
self._gid = gid
@property
def group(self):
"""Gets the group of this NamespaceObject. # noqa: E501
Specifies the group name for the owner of the object. # noqa: E501
:return: The group of this NamespaceObject. # noqa: E501
:rtype: str
"""
return self._group
@group.setter
def group(self, group):
"""Sets the group of this NamespaceObject.
Specifies the group name for the owner of the object. # noqa: E501
:param group: The group of this NamespaceObject. # noqa: E501
:type: str
"""
self._group = group
@property
def id(self):
"""Gets the id of this NamespaceObject. # noqa: E501
Specifies the object ID, which is also the INODE number. # noqa: E501
:return: The id of this NamespaceObject. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this NamespaceObject.
Specifies the object ID, which is also the INODE number. # noqa: E501
:param id: The id of this NamespaceObject. # noqa: E501
:type: int
"""
self._id = id
@property
def is_hidden(self):
"""Gets the is_hidden of this NamespaceObject. # noqa: E501
Specifies whether the file is hidden or not. # noqa: E501
:return: The is_hidden of this NamespaceObject. # noqa: E501
:rtype: bool
"""
return self._is_hidden
@is_hidden.setter
def is_hidden(self, is_hidden):
"""Sets the is_hidden of this NamespaceObject.
Specifies whether the file is hidden or not. # noqa: E501
:param is_hidden: The is_hidden of this NamespaceObject. # noqa: E501
:type: bool
"""
self._is_hidden = is_hidden
@property
def last_modified(self):
"""Gets the last_modified of this NamespaceObject. # noqa: E501
Specifies the time when the object data was last modified in HTTP date/time format. # noqa: E501
:return: The last_modified of this NamespaceObject. # noqa: E501
:rtype: str
"""
return self._last_modified
@last_modified.setter
def last_modified(self, last_modified):
"""Sets the last_modified of this NamespaceObject.
Specifies the time when the object data was last modified in HTTP date/time format. # noqa: E501
:param last_modified: The last_modified of this NamespaceObject. # noqa: E501
:type: str
"""
self._last_modified = last_modified
@property
def mode(self):
"""Gets the mode of this NamespaceObject. # noqa: E501
Specifies the UNIX mode octal number. # noqa: E501
:return: The mode of this NamespaceObject. # noqa: E501
:rtype: str
"""
return self._mode
@mode.setter
def mode(self, mode):
"""Sets the mode of this NamespaceObject.
Specifies the UNIX mode octal number. # noqa: E501
:param mode: The mode of this NamespaceObject. # noqa: E501
:type: str
"""
self._mode = mode
@property
def mtime_val(self):
"""Gets the mtime_val of this NamespaceObject. # noqa: E501
Specifies the time when the object data was last modified in UNIX Epoch format. # noqa: E501
:return: The mtime_val of this NamespaceObject. # noqa: E501
:rtype: int
"""
return self._mtime_val
@mtime_val.setter
def mtime_val(self, mtime_val):
"""Sets the mtime_val of this NamespaceObject.
Specifies the time when the object data was last modified in UNIX Epoch format. # noqa: E501
:param mtime_val: The mtime_val of this NamespaceObject. # noqa: E501
:type: int
"""
self._mtime_val = mtime_val
@property
def name(self):
"""Gets the name of this NamespaceObject. # noqa: E501
Specifies the name of the object. # noqa: E501
:return: The name of this NamespaceObject. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this NamespaceObject.
Specifies the name of the object. # noqa: E501
:param name: The name of this NamespaceObject. # noqa: E501
:type: str
"""
self._name = name
@property
def nlink(self):
"""Gets the nlink of this NamespaceObject. # noqa: E501
Specifies the number of hard links to the object. # noqa: E501
:return: The nlink of this NamespaceObject. # noqa: E501
:rtype: int
"""
return self._nlink
@nlink.setter
def nlink(self, nlink):
"""Sets the nlink of this NamespaceObject.
Specifies the number of hard links to the object. # noqa: E501
:param nlink: The nlink of this NamespaceObject. # noqa: E501
:type: int
"""
self._nlink = nlink
@property
def owner(self):
"""Gets the owner of this NamespaceObject. # noqa: E501
Specifies the user name for the owner of the object. # noqa: E501
:return: The owner of this NamespaceObject. # noqa: E501
:rtype: str
"""
return self._owner
@owner.setter
def owner(self, owner):
"""Sets the owner of this NamespaceObject.
Specifies the user name for the owner of the object. # noqa: E501
:param owner: The owner of this NamespaceObject. # noqa: E501
:type: str
"""
self._owner = owner
@property
def size(self):
"""Gets the size of this NamespaceObject. # noqa: E501
Specifies the size of the object in bytes. # noqa: E501
:return: The size of this NamespaceObject. # noqa: E501
:rtype: int
"""
return self._size
@size.setter
def size(self, size):
"""Sets the size of this NamespaceObject.
Specifies the size of the object in bytes. # noqa: E501
:param size: The size of this NamespaceObject. # noqa: E501
:type: int
"""
self._size = size
@property
def stub(self):
"""Gets the stub of this NamespaceObject. # noqa: E501
Specifies whether the file is a stub or not. # noqa: E501
:return: The stub of this NamespaceObject. # noqa: E501
:rtype: bool
"""
return self._stub
@stub.setter
def stub(self, stub):
"""Sets the stub of this NamespaceObject.
Specifies whether the file is a stub or not. # noqa: E501
:param stub: The stub of this NamespaceObject. # noqa: E501
:type: bool
"""
self._stub = stub
@property
def type(self):
"""Gets the type of this NamespaceObject. # noqa: E501
Specifies the object type, which can be one of the following values: container, object, pipe, character_device, block_device, symbolic_link, socket, or whiteout_file. # noqa: E501
:return: The type of this NamespaceObject. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this NamespaceObject.
Specifies the object type, which can be one of the following values: container, object, pipe, character_device, block_device, symbolic_link, socket, or whiteout_file. # noqa: E501
:param type: The type of this NamespaceObject. # noqa: E501
:type: str
"""
self._type = type
@property
def uid(self):
"""Gets the uid of this NamespaceObject. # noqa: E501
Specifies the UID for the owner. # noqa: E501
:return: The uid of this NamespaceObject. # noqa: E501
:rtype: int
"""
return self._uid
@uid.setter
def uid(self, uid):
"""Sets the uid of this NamespaceObject.
Specifies the UID for the owner. # noqa: E501
:param uid: The uid of this NamespaceObject. # noqa: E501
:type: int
"""
self._uid = uid
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NamespaceObject):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"six.iteritems"
] |
[((20851, 20884), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (20864, 20884), False, 'import six\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
网易云音乐 Player
"""
# Let's make some noise
from __future__ import print_function, unicode_literals, division, absolute_import
import subprocess
import threading
import time
import os
import random
from future.builtins import str
# from ui import Ui
from storage import Storage
from api import NetEase
from cache import Cache
from config import Config
from utils import notify
import logger
log = logger.getLogger(__name__)
class Player(object):
MODE_ORDERED = 0
MODE_ORDERED_LOOP = 1
MODE_SINGLE_LOOP = 2
MODE_RANDOM = 3
MODE_RANDOM_LOOP = 4
def __init__(self):
self.config = Config()
# self.ui = Ui()
self.popen_handler = None
# flag stop, prevent thread start
self.playing_flag = False
self.refrese_url_flag = False
self.process_length = 0
self.process_location = 0
self.storage = Storage()
self.cache = Cache()
self.end_callback = None
self.playing_song_changed_callback = None
self.api = NetEase()
@property
def info(self):
return self.storage.database["player_info"]
@property
def songs(self):
return self.storage.database["songs"]
@property
def index(self):
return self.info["idx"]
@property
def list(self):
return self.info["player_list"]
@property
def order(self):
return self.info["playing_order"]
@property
def mode(self):
return self.info["playing_mode"]
@property
def is_ordered_mode(self):
return self.mode == Player.MODE_ORDERED
@property
def is_ordered_loop_mode(self):
return self.mode == Player.MODE_ORDERED_LOOP
@property
def is_single_loop_mode(self):
return self.mode == Player.MODE_SINGLE_LOOP
@property
def is_random_mode(self):
return self.mode == Player.MODE_RANDOM
@property
def is_random_loop_mode(self):
return self.mode == Player.MODE_RANDOM_LOOP
@property
def config_notifier(self):
return self.config.get("notifier")
@property
def config_mpg123(self):
return self.config.get("mpg123_parameters")
@property
def current_song(self):
if not self.songs:
return {}
if not self.is_index_valid:
return {}
song_id = self.list[self.index]
return self.songs.get(song_id, {})
@property
def playing_id(self):
return self.current_song["song_id"]
@property
def playing_name(self):
return self.current_song["song_name"]
@property
def is_empty(self):
return len(self.list) == 0
@property
def is_index_valid(self):
return 0 <= self.index < len(self.list)
def notify_playing(self):
if not self.current_song:
return
if not self.config_notifier:
return
song = self.current_song
notify(
"正在播放: {}\n{}-{}".format(
song["song_name"], song["artist"], song["album_name"]
)
)
def notify_copyright_issue(self):
log.warning(
"Song {} is unavailable due to copyright issue.".format(self.playing_id)
)
notify("版权限制,无法播放此歌曲")
def change_mode(self, step=1):
self.info["playing_mode"] = (self.info["playing_mode"] + step) % 5
def build_playinfo(self):
if not self.current_song:
return
# self.ui.build_playinfo(
# self.current_song["song_name"],
# self.current_song["artist"],
# self.current_song["album_name"],
# self.current_song["quality"],
# time.time(),
# pause=not self.playing_flag,
# )
def add_songs(self, songs):
for song in songs:
song_id = str(song["song_id"])
self.info["player_list"].append(song_id)
if song_id in self.songs:
self.songs[song_id].update(song)
else:
self.songs[song_id] = song
def refresh_urls(self):
songs = self.api.dig_info(self.list, "refresh_urls")
if songs:
for song in songs:
song_id = str(song["song_id"])
if song_id in self.songs:
self.songs[song_id]["mp3_url"] = song["mp3_url"]
self.songs[song_id]["expires"] = song["expires"]
self.songs[song_id]["get_time"] = song["get_time"]
else:
self.songs[song_id] = song
self.refrese_url_flag = True
def stop(self):
if not self.popen_handler:
return
self.playing_flag = False
self.popen_handler.stdin.write(b"Q\n")
self.popen_handler.stdin.flush()
self.popen_handler.kill()
self.popen_handler = None
# wait process to be killed
time.sleep(0.01)
def tune_volume(self, up=0):
if not self.popen_handler:
return
new_volume = self.info["playing_volume"] + up
if new_volume > 100:
new_volume = 100
elif new_volume < 0:
new_volume = 0
self.info["playing_volume"] = new_volume
self.popen_handler.stdin.write(
"V {}\n".format(self.info["playing_volume"]).encode()
)
self.popen_handler.stdin.flush()
def switch(self):
if not self.popen_handler:
return
self.playing_flag = not self.playing_flag
self.popen_handler.stdin.write(b"P\n")
self.popen_handler.stdin.flush()
self.build_playinfo()
def run_mpg123(self, on_exit, url, expires=-1, get_time=-1):
para = ["mpg123", "-R"] + self.config_mpg123
self.popen_handler = subprocess.Popen(
para, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
self.tune_volume()
self.popen_handler.stdin.write(b"L " + url.encode("utf-8") + b"\n")
self.popen_handler.stdin.flush()
endless_loop_cnt = 0
while True:
if not self.popen_handler:
break
strout = self.popen_handler.stdout.readline().decode("utf-8").strip()
if strout[:2] == "@F":
# playing, update progress
out = strout.split(" ")
self.process_location = int(float(out[3]))
self.process_length = int(float(out[3]) + float(out[4]))
elif strout[:2] == "@E":
self.playing_flag = True
if (
expires >= 0
and get_time >= 0
and time.time() - expires - get_time >= 0
):
# 刷新URL
self.refresh_urls()
else:
# error, stop song and move to next
self.notify_copyright_issue()
break
elif strout == "@P 0":
# end, moving to next
self.playing_flag = True
break
elif strout == "":
endless_loop_cnt += 1
# 有播放后没有退出,mpg123一直在发送空消息的情况,此处直接终止处理
if endless_loop_cnt > 100:
log.warning(
"mpg123 error, halt, endless loop and high cpu use, then we kill it"
)
break
if self.playing_flag:
if self.refrese_url_flag:
self.stop()
self.replay()
self.refrese_url_flag = False
else:
self.next()
else:
self.stop()
def download_lyric(self, is_transalted=False):
key = "lyric" if not is_transalted else "tlyric"
if key not in self.songs[str(self.playing_id)]:
self.songs[str(self.playing_id)][key] = []
if len(self.songs[str(self.playing_id)][key]) > 0:
return
if not is_transalted:
lyric = self.api.song_lyric(self.playing_id)
else:
lyric = self.api.song_tlyric(self.playing_id)
self.songs[str(self.playing_id)][key] = lyric
def download_song(self, song_id, song_name, artist, url):
def write_path(song_id, path):
self.songs[str(song_id)]["cache"] = path
self.cache.add(song_id, song_name, artist, url, write_path)
self.cache.start_download()
def start_playing(self, on_exit, args):
"""
Runs the given args in subprocess.Popen, and then calls the function
on_exit when the subprocess completes.
on_exit is a callable object, and args is a lists/tuple of args
that would give to subprocess.Popen.
"""
# log.debug("%s,%s,%s" % (args['song_id'], args['song_name'], args['mp3_url']))
if "cache" in args.keys() and os.path.isfile(args["cache"]):
thread = threading.Thread(
target=self.run_mpg123, args=(on_exit, args["cache"])
)
else:
new_url = NetEase().songs_url([args["song_id"]])[0]["url"] #使用新地址
if not new_url: #如果没有获得新地址
new_url = args["mp3_url"] #使用老地址传给mpg123
thread = threading.Thread(
target=self.run_mpg123,
args=(on_exit, new_url, args["expires"], args["get_time"]),
)
cache_thread = threading.Thread(
target=self.download_song,
args=(
args["song_id"],
args["song_name"],
args["artist"],
args["mp3_url"],
),
)
cache_thread.start()
thread.start()
lyric_download_thread = threading.Thread(target=self.download_lyric)
lyric_download_thread.start()
tlyric_download_thread = threading.Thread(
target=self.download_lyric, args=(True,)
)
tlyric_download_thread.start()
# returns immediately after the thread starts
return thread
def replay(self):
if not self.is_index_valid:
self.stop()
if self.end_callback:
log.debug("Callback")
self.end_callback()
return
if not self.current_song:
return
self.stop()
self.playing_flag = True
self.build_playinfo()
self.notify_playing()
self.start_playing(lambda: 0, self.current_song)
def shuffle_order(self):
del self.order[:]
self.order.extend(list(range(0, len(self.list))))
random.shuffle(self.order)
self.info["random_index"] = 0
def new_player_list(self, type, title, datalist, offset):
self.info["player_list_type"] = type
self.info["player_list_title"] = title
# self.info['idx'] = offset
self.info["player_list"] = []
self.info["playing_order"] = []
self.info["random_index"] = 0
self.songs.clear()
self.add_songs(datalist)
def append_songs(self, datalist):
self.add_songs(datalist)
# switch_flag为true表示:
# 在播放列表中 || 当前所在列表类型不在"songs"、"djchannels"、"fmsongs"中
def play_or_pause(self, idx, switch_flag):
if self.is_empty:
return
# print('flag:',switch_flag)
# if same "list index" and "playing index" --> same song :: pause/resume it
if self.index == idx and switch_flag:
if not self.popen_handler:
# print('aaaaaa')
self.stop()
self.replay()
else:
# print('bbbbbb')
self.switch()
else:
# print('cccccccc')
self.info["idx"] = idx
self.stop()
self.replay()
def _swap_song(self):
now_songs = self.order.index(self.index)
self.order[0], self.order[now_songs] = self.order[now_songs], self.order[0]
def _need_to_shuffle(self):
playing_order = self.order
random_index = self.info["random_index"]
if (
random_index >= len(playing_order)
or playing_order[random_index] != self.index
):
return True
else:
return False
def next_idx(self):
if not self.is_index_valid:
return self.stop()
playlist_len = len(self.list)
if self.mode == Player.MODE_ORDERED:
# make sure self.index will not over
if self.info["idx"] < playlist_len:
self.info["idx"] += 1
elif self.mode == Player.MODE_ORDERED_LOOP:
self.info["idx"] = (self.index + 1) % playlist_len
elif self.mode == Player.MODE_SINGLE_LOOP:
self.info["idx"] = self.info["idx"]
else:
playing_order_len = len(self.order)
if self._need_to_shuffle():
self.shuffle_order()
# When you regenerate playing list
# you should keep previous song same.
self._swap_song()
playing_order_len = len(self.order)
self.info["random_index"] += 1
# Out of border
if self.mode == Player.MODE_RANDOM_LOOP:
self.info["random_index"] %= playing_order_len
# Random but not loop, out of border, stop playing.
if self.info["random_index"] >= playing_order_len:
self.info["idx"] = playlist_len
else:
self.info["idx"] = self.order[self.info["random_index"]]
if self.playing_song_changed_callback is not None:
self.playing_song_changed_callback()
def next(self):
self.stop()
self.next_idx()
self.replay()
def prev_idx(self):
if not self.is_index_valid:
self.stop()
return
playlist_len = len(self.list)
if self.mode == Player.MODE_ORDERED:
if self.info["idx"] > 0:
self.info["idx"] -= 1
elif self.mode == Player.MODE_ORDERED_LOOP:
self.info["idx"] = (self.info["idx"] - 1) % playlist_len
elif self.mode == Player.MODE_SINGLE_LOOP:
self.info["idx"] = self.info["idx"]
else:
playing_order_len = len(self.order)
if self._need_to_shuffle():
self.shuffle_order()
playing_order_len = len(self.order)
self.info["random_index"] -= 1
if self.info["random_index"] < 0:
if self.mode == Player.MODE_RANDOM:
self.info["random_index"] = 0
else:
self.info["random_index"] %= playing_order_len
self.info["idx"] = self.order[self.info["random_index"]]
if self.playing_song_changed_callback is not None:
self.playing_song_changed_callback()
def prev(self):
self.stop()
self.prev_idx()
self.replay()
def shuffle(self):
self.stop()
self.info["playing_mode"] = Player.MODE_RANDOM
self.shuffle_order()
self.info["idx"] = self.info["playing_order"][self.info["random_index"]]
self.replay()
def volume_up(self):
self.tune_volume(5)
def volume_down(self):
self.tune_volume(-5)
def update_size(self):
self.ui.update_size()
self.build_playinfo()
def cache_song(self, song_id, song_name, artist, song_url):
def on_exit(song_id, path):
self.songs[str(song_id)]["cache"] = path
self.cache.enable = False
self.cache.enable = True
self.cache.add(song_id, song_name, artist, song_url, on_exit)
self.cache.start_download()
|
[
"api.NetEase",
"threading.Thread",
"subprocess.Popen",
"config.Config",
"future.builtins.str",
"random.shuffle",
"logger.getLogger",
"storage.Storage",
"utils.notify",
"time.sleep",
"time.time",
"os.path.isfile",
"cache.Cache"
] |
[((450, 476), 'logger.getLogger', 'logger.getLogger', (['__name__'], {}), '(__name__)\n', (466, 476), False, 'import logger\n'), ((665, 673), 'config.Config', 'Config', ([], {}), '()\n', (671, 673), False, 'from config import Config\n'), ((936, 945), 'storage.Storage', 'Storage', ([], {}), '()\n', (943, 945), False, 'from storage import Storage\n'), ((967, 974), 'cache.Cache', 'Cache', ([], {}), '()\n', (972, 974), False, 'from cache import Cache\n'), ((1077, 1086), 'api.NetEase', 'NetEase', ([], {}), '()\n', (1084, 1086), False, 'from api import NetEase\n'), ((3293, 3315), 'utils.notify', 'notify', (['"""版权限制,无法播放此歌曲"""'], {}), "('版权限制,无法播放此歌曲')\n", (3299, 3315), False, 'from utils import notify\n'), ((4969, 4985), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (4979, 4985), False, 'import time\n'), ((5845, 5942), 'subprocess.Popen', 'subprocess.Popen', (['para'], {'stdin': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(para, stdin=subprocess.PIPE, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n', (5861, 5942), False, 'import subprocess\n'), ((9862, 9906), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.download_lyric'}), '(target=self.download_lyric)\n', (9878, 9906), False, 'import threading\n'), ((9978, 10036), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.download_lyric', 'args': '(True,)'}), '(target=self.download_lyric, args=(True,))\n', (9994, 10036), False, 'import threading\n'), ((10740, 10766), 'random.shuffle', 'random.shuffle', (['self.order'], {}), '(self.order)\n', (10754, 10766), False, 'import random\n'), ((3890, 3910), 'future.builtins.str', 'str', (["song['song_id']"], {}), "(song['song_id'])\n", (3893, 3910), False, 'from future.builtins import str\n'), ((8963, 8992), 'os.path.isfile', 'os.path.isfile', (["args['cache']"], {}), "(args['cache'])\n", (8977, 8992), False, 'import os\n'), ((9015, 9086), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.run_mpg123', 'args': "(on_exit, args['cache'])"}), "(target=self.run_mpg123, args=(on_exit, args['cache']))\n", (9031, 9086), False, 'import threading\n'), ((9332, 9437), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.run_mpg123', 'args': "(on_exit, new_url, args['expires'], args['get_time'])"}), "(target=self.run_mpg123, args=(on_exit, new_url, args[\n 'expires'], args['get_time']))\n", (9348, 9437), False, 'import threading\n'), ((9507, 9631), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.download_song', 'args': "(args['song_id'], args['song_name'], args['artist'], args['mp3_url'])"}), "(target=self.download_song, args=(args['song_id'], args[\n 'song_name'], args['artist'], args['mp3_url']))\n", (9523, 9631), False, 'import threading\n'), ((4277, 4297), 'future.builtins.str', 'str', (["song['song_id']"], {}), "(song['song_id'])\n", (4280, 4297), False, 'from future.builtins import str\n'), ((7895, 7915), 'future.builtins.str', 'str', (['self.playing_id'], {}), '(self.playing_id)\n', (7898, 7915), False, 'from future.builtins import str\n'), ((8232, 8252), 'future.builtins.str', 'str', (['self.playing_id'], {}), '(self.playing_id)\n', (8235, 8252), False, 'from future.builtins import str\n'), ((7941, 7961), 'future.builtins.str', 'str', (['self.playing_id'], {}), '(self.playing_id)\n', (7944, 7961), False, 'from future.builtins import str\n'), ((8392, 8404), 'future.builtins.str', 'str', (['song_id'], {}), '(song_id)\n', (8395, 8404), False, 'from future.builtins import str\n'), ((15692, 15704), 'future.builtins.str', 'str', (['song_id'], {}), '(song_id)\n', (15695, 15704), False, 'from future.builtins import str\n'), ((8000, 8020), 'future.builtins.str', 'str', (['self.playing_id'], {}), '(self.playing_id)\n', (8003, 8020), False, 'from future.builtins import str\n'), ((9153, 9162), 'api.NetEase', 'NetEase', ([], {}), '()\n', (9160, 9162), False, 'from api import NetEase\n'), ((6744, 6755), 'time.time', 'time.time', ([], {}), '()\n', (6753, 6755), False, 'import time\n')]
|
import unittest
import aerochat.database
class TestJson(unittest.TestCase):
@staticmethod
def get_json_string():
return '{"timestamp":"1399589150","sender":"mpillar","text":"This is a test"}'
@staticmethod
def get_json_message():
return aerochat.database.Message.from_json(TestJson.get_json_string())
def verify_json_message(self, message):
self.assertEqual(message.timestamp, int(1399589150))
self.assertEqual(message.sender, 'mpillar')
self.assertEqual(message.text, 'This is a test')
def test_json_decode(self):
"""
Decode a JSON message and verify that we get back what we expect.
"""
message = TestJson.get_json_message()
self.verify_json_message(message)
def test_json_encode(self):
"""
Decode, encode, and verify to ensure that our encoder works as expected
as well.
"""
self.verify_json_message(aerochat.database.Message.from_json(TestJson.get_json_message().to_json()))
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main"
] |
[((1064, 1079), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1077, 1079), False, 'import unittest\n')]
|
# SPDX-License-Identifier: Apache-2.0
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from onnx import defs
def main() -> None:
# domain -> support level -> name -> [schema]
with_inference = []
without_inference = []
for schema in defs.get_all_schemas():
domain, name, has_inference = schema.domain, schema.name, schema.has_type_and_shape_inference_function
elem = (domain, name)
if has_inference:
with_inference.append(elem)
else:
without_inference.append(elem)
print(len(with_inference), 'operators have a type/shape inference function.')
print(len(without_inference), 'do not. These are:')
for domain, name in sorted(without_inference):
print(domain, name)
if __name__ == '__main__':
main()
|
[
"onnx.defs.get_all_schemas"
] |
[((352, 374), 'onnx.defs.get_all_schemas', 'defs.get_all_schemas', ([], {}), '()\n', (372, 374), False, 'from onnx import defs\n')]
|
import os
import numpy as np
from libyana.meshutils import meshio
def load_objects(obj_root):
object_names = [obj_name for obj_name in os.listdir(obj_root) if ".tgz" not in obj_name]
objects = {}
for obj_name in object_names:
# obj_path = os.path.join(obj_root, obj_name, "textured_simple_2000.obj")
obj_path = os.path.join(obj_root, obj_name, "textured_simple.obj") # TODO use full objects
with open(obj_path) as m_f:
mesh = meshio.fast_load_obj(m_f)[0]
objects[obj_name] = {"verts": mesh["vertices"], "faces": mesh["faces"]}
return objects
def load_corners(corner_root):
obj_corners = {}
for objname in os.listdir(corner_root):
filepath = os.path.join(corner_root, objname, "corners.npy")
corners = np.load(filepath)
obj_corners[objname] = corners
return obj_corners
def lineParser(line, annoDict):
"""
Parses a line in the 'anno.txt' and creates a entry in dict with lineid as key
:param line: line from 'anno.txt'
:param annoDict: dict in which an entry should be added
:return:
"""
lineList = line.split()
lineid = lineList[0]
objID = lineList[1]
paramsList = list(map(float, lineList[2:]))
assert lineid not in annoDict.keys(), "Something wrong with the annotation file..."
annoDict[lineid] = {
"objID": objID,
"handJoints": np.reshape(np.array(paramsList[:63]), [21, 3]),
"handPose": np.array(paramsList[63 : 63 + 48]),
"handTrans": np.array(paramsList[63 + 48 : 63 + 48 + 3]),
"handBeta": np.array(paramsList[63 + 48 + 3 : 63 + 48 + 3 + 10]),
"objRot": np.array(paramsList[63 + 48 + 3 + 10 : 63 + 48 + 3 + 10 + 3]),
"objTrans": np.array(paramsList[63 + 48 + 3 + 10 + 3 : 63 + 48 + 3 + 10 + 3 + 3]),
}
return annoDict
def parseAnnoTxt(filename):
"""
Parse the 'anno.txt'
:param filename: path to 'anno.txt'
:return: dict with lineid as keys
"""
ftxt = open(filename, "r")
annoLines = ftxt.readlines()
annoDict = {}
for line in annoLines:
lineParser(line, annoDict)
return annoDict
def project3DPoints(camMat, pts3D, isOpenGLCoords=True):
"""
Function for projecting 3d points to 2d
:param camMat: camera matrix
:param pts3D: 3D points
:param isOpenGLCoords: If True, hand/object along negative z-axis.
If False hand/object along positive z-axis
:return:
"""
assert pts3D.shape[-1] == 3
assert len(pts3D.shape) == 2
coordChangeMat = np.array([[1.0, 0.0, 0.0], [0, -1.0, 0.0], [0.0, 0.0, -1.0]], dtype=np.float32)
if isOpenGLCoords:
pts3D = pts3D.dot(coordChangeMat.T)
projPts = pts3D.dot(camMat.T)
projPts = np.stack([projPts[:, 0] / projPts[:, 2], projPts[:, 1] / projPts[:, 2]], axis=1)
assert len(projPts.shape) == 2
return projPts
|
[
"numpy.stack",
"numpy.load",
"numpy.array",
"libyana.meshutils.meshio.fast_load_obj",
"os.path.join",
"os.listdir"
] |
[((679, 702), 'os.listdir', 'os.listdir', (['corner_root'], {}), '(corner_root)\n', (689, 702), False, 'import os\n'), ((2559, 2638), 'numpy.array', 'np.array', (['[[1.0, 0.0, 0.0], [0, -1.0, 0.0], [0.0, 0.0, -1.0]]'], {'dtype': 'np.float32'}), '([[1.0, 0.0, 0.0], [0, -1.0, 0.0], [0.0, 0.0, -1.0]], dtype=np.float32)\n', (2567, 2638), True, 'import numpy as np\n'), ((2755, 2840), 'numpy.stack', 'np.stack', (['[projPts[:, 0] / projPts[:, 2], projPts[:, 1] / projPts[:, 2]]'], {'axis': '(1)'}), '([projPts[:, 0] / projPts[:, 2], projPts[:, 1] / projPts[:, 2]], axis=1\n )\n', (2763, 2840), True, 'import numpy as np\n'), ((342, 397), 'os.path.join', 'os.path.join', (['obj_root', 'obj_name', '"""textured_simple.obj"""'], {}), "(obj_root, obj_name, 'textured_simple.obj')\n", (354, 397), False, 'import os\n'), ((723, 772), 'os.path.join', 'os.path.join', (['corner_root', 'objname', '"""corners.npy"""'], {}), "(corner_root, objname, 'corners.npy')\n", (735, 772), False, 'import os\n'), ((791, 808), 'numpy.load', 'np.load', (['filepath'], {}), '(filepath)\n', (798, 808), True, 'import numpy as np\n'), ((1469, 1501), 'numpy.array', 'np.array', (['paramsList[63:63 + 48]'], {}), '(paramsList[63:63 + 48])\n', (1477, 1501), True, 'import numpy as np\n'), ((1526, 1567), 'numpy.array', 'np.array', (['paramsList[63 + 48:63 + 48 + 3]'], {}), '(paramsList[63 + 48:63 + 48 + 3])\n', (1534, 1567), True, 'import numpy as np\n'), ((1591, 1641), 'numpy.array', 'np.array', (['paramsList[63 + 48 + 3:63 + 48 + 3 + 10]'], {}), '(paramsList[63 + 48 + 3:63 + 48 + 3 + 10])\n', (1599, 1641), True, 'import numpy as np\n'), ((1663, 1722), 'numpy.array', 'np.array', (['paramsList[63 + 48 + 3 + 10:63 + 48 + 3 + 10 + 3]'], {}), '(paramsList[63 + 48 + 3 + 10:63 + 48 + 3 + 10 + 3])\n', (1671, 1722), True, 'import numpy as np\n'), ((1746, 1813), 'numpy.array', 'np.array', (['paramsList[63 + 48 + 3 + 10 + 3:63 + 48 + 3 + 10 + 3 + 3]'], {}), '(paramsList[63 + 48 + 3 + 10 + 3:63 + 48 + 3 + 10 + 3 + 3])\n', (1754, 1813), True, 'import numpy as np\n'), ((142, 162), 'os.listdir', 'os.listdir', (['obj_root'], {}), '(obj_root)\n', (152, 162), False, 'import os\n'), ((1412, 1437), 'numpy.array', 'np.array', (['paramsList[:63]'], {}), '(paramsList[:63])\n', (1420, 1437), True, 'import numpy as np\n'), ((478, 503), 'libyana.meshutils.meshio.fast_load_obj', 'meshio.fast_load_obj', (['m_f'], {}), '(m_f)\n', (498, 503), False, 'from libyana.meshutils import meshio\n')]
|
"""
Create a flat plate of length 1.0 with aspect ratio 2.0 and a 40-degree
inclination.
The plate is discretized with spacing 0.04 in the x-y plane and with spacing
0.04 along the z-direction.
"""
import math
import pathlib
import numpy
# Flat-plate's parameters.
L = 1.0 # chord length
AR = 2.0 # aspect ratio
xc, yc, zc = 0.0, 0.0, 0.0 # center's coordinates
aoa = 40.0 # angle of inclination in degrees
ds = 0.04 # mesh spacing
simu_dir = pathlib.Path(__file__).absolute().parents[1]
# Generate coordinates of the flat plate.
n = math.ceil(L / ds)
s = numpy.linspace(xc - L / 2, xc + L / 2, num=n + 1)
x = xc + numpy.cos(numpy.radians(-aoa)) * s
y = yc + numpy.sin(numpy.radians(-aoa)) * s
nz = math.ceil(L * AR / ds)
z = numpy.linspace(zc - L * AR / 2, zc + L * AR / 2, num=nz + 1)
# Write coordinates into file.
filepath = simu_dir / 'flatplateAoA{}.body'.format(aoa)
with open(filepath, 'w') as outfile:
outfile.write('{}\n'.format(x.size * z.size))
for zi in z:
with open(filepath, 'ab') as outfile:
numpy.savetxt(outfile, numpy.c_[x, y, zi * numpy.ones(x.size)])
|
[
"numpy.radians",
"math.ceil",
"numpy.ones",
"pathlib.Path",
"numpy.linspace"
] |
[((544, 561), 'math.ceil', 'math.ceil', (['(L / ds)'], {}), '(L / ds)\n', (553, 561), False, 'import math\n'), ((566, 615), 'numpy.linspace', 'numpy.linspace', (['(xc - L / 2)', '(xc + L / 2)'], {'num': '(n + 1)'}), '(xc - L / 2, xc + L / 2, num=n + 1)\n', (580, 615), False, 'import numpy\n'), ((711, 733), 'math.ceil', 'math.ceil', (['(L * AR / ds)'], {}), '(L * AR / ds)\n', (720, 733), False, 'import math\n'), ((738, 798), 'numpy.linspace', 'numpy.linspace', (['(zc - L * AR / 2)', '(zc + L * AR / 2)'], {'num': '(nz + 1)'}), '(zc - L * AR / 2, zc + L * AR / 2, num=nz + 1)\n', (752, 798), False, 'import numpy\n'), ((636, 655), 'numpy.radians', 'numpy.radians', (['(-aoa)'], {}), '(-aoa)\n', (649, 655), False, 'import numpy\n'), ((680, 699), 'numpy.radians', 'numpy.radians', (['(-aoa)'], {}), '(-aoa)\n', (693, 699), False, 'import numpy\n'), ((452, 474), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (464, 474), False, 'import pathlib\n'), ((1080, 1098), 'numpy.ones', 'numpy.ones', (['x.size'], {}), '(x.size)\n', (1090, 1098), False, 'import numpy\n')]
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<<EMAIL>>
# http://binux.me
# Created on 2014-02-22 14:02:21
from pyspider.libs.base_handler import BaseHandler, catch_status_code_error, every
class IgnoreHandler(object):
pass
class TestHandler(BaseHandler):
def hello(self):
return "hello world!"
def echo(self, response):
return response.content
def saved(self, response):
return response.save
def echo_task(self, response, task):
return task['project']
@catch_status_code_error
def catch_status_code(self, response):
return response.status_code
def raise_exception(self):
print('print')
logger.info("info")
logger.warning("warning")
logger.error("error")
raise Exception('exception')
def add_task(self, response):
self.crawl('http://www.google.com', callback='echo', params={'wd': u'中文'})
self.send_message('some_project', {'some': 'message'})
@every
def on_cronjob1(self, response):
logger.info('on_cronjob1')
@every(seconds=10)
def on_cronjob2(self, response):
logger.info('on_cronjob2')
def generator(self, response):
yield "a"
yield "b"
|
[
"pyspider.libs.base_handler.every"
] |
[((1150, 1167), 'pyspider.libs.base_handler.every', 'every', ([], {'seconds': '(10)'}), '(seconds=10)\n', (1155, 1167), False, 'from pyspider.libs.base_handler import BaseHandler, catch_status_code_error, every\n')]
|
#!/usr/bin/env python2.7
#
# Silicon Labs si7021 Interface Class
# Copyright (C) 2018 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import smbus2,time
DEVICE_ADDRESS = 0x40
TEMPERATURE = 0xE3
HUMIDITY = 0xF5
I2C_BUS = 8
TTYPE_BOTH = 2
TTYPE_DEGF = 1
TTYPE_DEGC = 0
class si7021:
""" Silicon Labs si7021 Interface Class
Copyright (C) 2018 <NAME>
Simple class to read from the si7021 high
precision temperature and humidity I2C sensor.
"""
def __init__(self,bus = I2C_BUS, addr = DEVICE_ADDRESS):
""" Constructor:
Parameters:
bus - i2c bus to use, default 8
addr - Device address to send commands to. Default 0x40
If the device does not exist on addr IOError gets thrown by the smbus library.
"""
self.bus = smbus2.SMBus(bus)
self.addr = addr
def _fix_precision(self, num):
""" Fix Precision of float to 2 decimal places """
num = int(num*100)
return float(num)/100.0
def temperature(self,rtype=TTYPE_BOTH, precise=False):
""" Read temperature from device, optional parameters:
rtype - TTYPE_DEGC, return in degrees C
TTYPE_DEGF, return in degrees F
TTYPE_BOTH, return list with [c,f]
precise - True|False, return precise or fixed number. Default false
"""
if rtype < 0 or rtype > 3:
raise ValueError('Rtype must be between 0 and 2')
[msb,lsb] = [0,0]
while msb == 0:
[msb,lsb] = self.bus.read_i2c_block_data(DEVICE_ADDRESS,TEMPERATURE,2)
if msb == 0 or msb == 0xff:
msb = 0
time.sleep(.3)
tempc = ((msb * 256 + lsb) * 175.72 / 65536.0) - 46.85
tempf = (tempc * 1.8 + 32.0)
if not precise:
tempc = self._fix_precision(tempc)
tempf = self._fix_precision(tempf)
rvals = [tempc, tempf,(tempc, tempf)]
return rvals[rtype]
def humidity(self, precise=False):
""" Read humidity from device
precise - True|False, return precise or fixed number. Default false
"""
[msb,lsb] = [0,0]
while msb == 0:
self.bus.write_byte(DEVICE_ADDRESS,HUMIDITY)
time.sleep(.3)
[msb,lsb] = self.bus.read_byte(DEVICE_ADDRESS), self.bus.read_byte(DEVICE_ADDRESS)
humidity = (((msb * 256 + lsb) * 125.0) / 65536.0) - 6
if not precise:
humidity = self._fix_precision(humidity)
return humidity
def __call__(self,rtype=TTYPE_BOTH, precise=False):
t = self.temperature(rtype,precise)
h = self.humidity(precise)
return (t,h)
|
[
"smbus2.SMBus",
"time.sleep"
] |
[((1350, 1367), 'smbus2.SMBus', 'smbus2.SMBus', (['bus'], {}), '(bus)\n', (1362, 1367), False, 'import smbus2, time\n'), ((2844, 2859), 'time.sleep', 'time.sleep', (['(0.3)'], {}), '(0.3)\n', (2854, 2859), False, 'import smbus2, time\n'), ((2246, 2261), 'time.sleep', 'time.sleep', (['(0.3)'], {}), '(0.3)\n', (2256, 2261), False, 'import smbus2, time\n')]
|
import os, sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import glob
import fnmatch
import traceback
import logging
import numpy
import pytest
import lasio
test_dir = os.path.dirname(__file__)
egfn = lambda fn: os.path.join(os.path.dirname(__file__), "examples", fn)
stegfn = lambda vers, fn: os.path.join(os.path.dirname(__file__), "examples", vers, fn)
logger = logging.getLogger(__name__)
def read_file():
las = lasio.read(stegfn("1.2", "sample_big.las"))
def test_read_v12_sample_big(benchmark):
benchmark(read_file)
|
[
"os.path.dirname",
"logging.getLogger"
] |
[((192, 217), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (207, 217), False, 'import os, sys\n'), ((391, 418), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (408, 418), False, 'import logging\n'), ((51, 76), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (66, 76), False, 'import os, sys\n'), ((250, 275), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (265, 275), False, 'import os, sys\n'), ((332, 357), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (347, 357), False, 'import os, sys\n')]
|
# Copyright 2012 Yelp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Represent data in single-line YAML.
:py:class:`YAMLProtocol` can handle nearly any data type, and can serve as a
more readable alternative to :py:class:`~mrjob.protocol.PickleProtocol`.
As with pickle, you should be careful about reading untrusted data with this
protocol, because it can execute arbitrary code; also, this format is
Python-specific.
:py:class:`SafeYAMLProtocol` supports basic YAML data types, which are
a superset of JSON data types, and are supported across YAML implementations.
We also provide :py:class:`YAMLValueProtocol` and :py:class:`SafeYAMLProtocol`
to handle values without keys.
"""
from __future__ import absolute_import
import yaml
import six
from mr3px.common import decode_string, encode_string
__all__ = [
'SafeYAMLProtocol',
'SafeYAMLValueProtocol',
'YAMLProtocol',
'YAMLValueProtocol',
]
def dump_inline(data, allow_unicode=None, encoding=None, safe=False):
"""Dump YAML on a single line.
:param allow_unicode: Don't escape non-ASCII characters in the result.
:param encoding: Optional character encoding to use. If not set,
return unicode
:param safe: if True, use :py:func:`yaml.safe_dump`; that is, only encode
basic value types; otherwise use :py:func:`yaml.dump`
:param kwargs: additional keyword arguments to pass through to
:py:func:`yaml.dump`. Only *allow_unicode* and
*encoding* seem to be useful.
"""
dump = yaml.safe_dump if safe else yaml.dump
out = dump(
data,
allow_unicode=allow_unicode,
default_flow_style='block',
explicit_end=False,
explicit_start=False,
line_break='\n',
width=float('inf')).rstrip()
if out.endswith(six.u('\n...')):
out = out[:-3].rstrip()
return out.encode(encoding)
class YAMLProtocolBase(object):
safe = True
def __init__(self, allow_unicode=False, encoding=None):
"""Optional parameters:
:param allow_unicode: Allow non-ASCII characters in the output
(e.g. accented characters).
:param encoding: Character encoding to use. We default to UTF-8,
with fallback to latin-1 when decoding input.
"""
self.allow_unicode = allow_unicode
self.encoding = encoding
def load(self, data):
unicode_data = decode_string(data, encoding=self.encoding)
if self.safe:
return yaml.safe_load(unicode_data)
else:
return yaml.load(unicode_data)
def dump(self, data):
return dump_inline(
data,
allow_unicode=self.allow_unicode,
encoding=self.encoding or 'utf_8', # never return Unicode
safe=self.safe)
class SafeYAMLProtocol(YAMLProtocolBase):
"""Encode/decode keys and values that can be represented using
YAML tags. This is a superset of JSON, and includes most basic data
structures; for a full list see
http://pyyaml.org/wiki/PyYAMLDocumentation#YAMLtagsandPythontypes.
Note that this will encode tuples as lists.
"""
def read(self, line):
key_str, value_str = decode_string(line, self.encoding).split('\t')
key_str = encode_string(key_str, self.encoding)
value_str = encode_string(value_str, self.encoding)
# cache last key
if key_str != getattr(self, '_key_cache', [None])[0]:
self._key_cache = (key_str, self.load(key_str))
key = self._key_cache[1]
return key, self.load(value_str)
def write(self, key, value):
de_key = decode_string(self.dump(key), encoding=self.encoding)
de_value = decode_string(self.dump(value), encoding=self.encoding)
return encode_string('%s\t%s' % (de_key, de_value), self.encoding)
# return six.b('%s\t%s' % (self.dump(key), self.dump(value)))
class YAMLProtocol(SafeYAMLProtocol):
"""Encode/decode keys and values of virtually any type using YAML.
"""
safe = False
class SafeYAMLValueProtocol(YAMLProtocolBase):
"""Encode/decode keys and values that can be represented using
YAML tags. This is a superset of JSON, and includes most basic data
structures; for a full list see
http://pyyaml.org/wiki/PyYAMLDocumentation#YAMLtagsandPythontypes.
Note that this will encode tuples as lists.
"""
def read(self, line):
return None, self.load(line)
def write(self, _, value):
return self.dump(value)
class YAMLValueProtocol(SafeYAMLValueProtocol):
"""Encode/decode values of virtually any type using YAML.
"""
safe = False
|
[
"yaml.load",
"six.u",
"mr3px.common.encode_string",
"yaml.safe_load",
"mr3px.common.decode_string"
] |
[((2336, 2350), 'six.u', 'six.u', (['"""\n..."""'], {}), "('\\n...')\n", (2341, 2350), False, 'import six\n'), ((2974, 3017), 'mr3px.common.decode_string', 'decode_string', (['data'], {'encoding': 'self.encoding'}), '(data, encoding=self.encoding)\n', (2987, 3017), False, 'from mr3px.common import decode_string, encode_string\n'), ((3831, 3868), 'mr3px.common.encode_string', 'encode_string', (['key_str', 'self.encoding'], {}), '(key_str, self.encoding)\n', (3844, 3868), False, 'from mr3px.common import decode_string, encode_string\n'), ((3889, 3928), 'mr3px.common.encode_string', 'encode_string', (['value_str', 'self.encoding'], {}), '(value_str, self.encoding)\n', (3902, 3928), False, 'from mr3px.common import decode_string, encode_string\n'), ((4348, 4407), 'mr3px.common.encode_string', 'encode_string', (["('%s\\t%s' % (de_key, de_value))", 'self.encoding'], {}), "('%s\\t%s' % (de_key, de_value), self.encoding)\n", (4361, 4407), False, 'from mr3px.common import decode_string, encode_string\n'), ((3060, 3088), 'yaml.safe_load', 'yaml.safe_load', (['unicode_data'], {}), '(unicode_data)\n', (3074, 3088), False, 'import yaml\n'), ((3122, 3145), 'yaml.load', 'yaml.load', (['unicode_data'], {}), '(unicode_data)\n', (3131, 3145), False, 'import yaml\n'), ((3766, 3800), 'mr3px.common.decode_string', 'decode_string', (['line', 'self.encoding'], {}), '(line, self.encoding)\n', (3779, 3800), False, 'from mr3px.common import decode_string, encode_string\n')]
|
import json
import time
def create_graph(data):
current_milli_time = lambda: int(round(time.time() * 1000))
edge_list = []
hashtags_list = []
user_mentions_list = []
all_data = json.loads(data)
user_screen_name = all_data['user']['screen_name']
timestamp_ms = all_data['timestamp_ms']
if len(all_data['entities']['hashtags']) > 0:
a=0
for i in all_data['entities']['hashtags']:
hashtags_list.append(all_data['entities']['hashtags'][a]['text'])
a+=1
a=0
if len(all_data['entities']['user_mentions']) > 0:
b=0
for i in all_data['entities']['user_mentions']:
user_mentions_list.append(all_data['entities']['user_mentions'][b]['screen_name'])
b+=1
b=0
if len(hashtags_list) > 0:
for i in hashtags_list:
edge_list.append((user_screen_name, "#"+i, timestamp_ms))
if len(user_mentions_list) > 0:
for i in user_mentions_list:
edge_list.append((user_screen_name, i, timestamp_ms))
return edge_list
|
[
"json.loads",
"time.time"
] |
[((183, 199), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (193, 199), False, 'import json\n'), ((89, 100), 'time.time', 'time.time', ([], {}), '()\n', (98, 100), False, 'import time\n')]
|
from django.db.models import Field
from urllib.parse import unquote
from django_editorjs.widgets import EditorJsWidget
class EditorJsField(Field):
def __init__(self, *args, **kwargs):
editorjs_config = kwargs.pop("editorjs_config", None)
super().__init__(*args, **kwargs)
self._editorjs_config = editorjs_config
def get_internal_type(self):
return "TextField"
def clean(self, value, model_instance):
if value is not None:
return unquote(super().clean(value, model_instance))
else:
return None
def formfield(self, *args, **kwargs):
kwargs["widget"] = EditorJsWidget(editorjs_config=self._editorjs_config)
return super().formfield(*args, **kwargs)
|
[
"django_editorjs.widgets.EditorJsWidget"
] |
[((652, 705), 'django_editorjs.widgets.EditorJsWidget', 'EditorJsWidget', ([], {'editorjs_config': 'self._editorjs_config'}), '(editorjs_config=self._editorjs_config)\n', (666, 705), False, 'from django_editorjs.widgets import EditorJsWidget\n')]
|
#!/usr/bin/env python3
import re
from mongodb1 import *
def get_goods_url(url):
urls = re.findall('http.+\r\n', url)
if len(urls) == 2:
urls[0] = urls[0].strip('\r\n')
urls[1] = urls[1].strip('\r\n')
#丢弃短连接
if urls[1].find('click') > 0:
return 0, urls
else:
return 1, urls
else:
return 0, urls
def save(text):
status, urls = get_goods_url(text)
if status == 1:
save_caiji_goods(urls)
|
[
"re.findall"
] |
[((93, 122), 're.findall', 're.findall', (["'http.+\\r\\n'", 'url'], {}), "('http.+\\r\\n', url)\n", (103, 122), False, 'import re\n')]
|
# Copyright 2013, Big Switch Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon.utils import validators
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.loadbalancers import utils
AVAILABLE_PROTOCOLS = ('HTTP', 'HTTPS', 'TCP')
AVAILABLE_METHODS = ('ROUND_ROBIN', 'LEAST_CONNECTIONS', 'SOURCE_IP')
LOG = logging.getLogger(__name__)
class AddPoolAction(workflows.Action):
name = forms.CharField(max_length=80, label=_("Name"))
description = forms.CharField(
initial="", required=False,
max_length=80, label=_("Description"))
# provider is optional because some LBaaS implementation does
# not support service-type extension.
provider = forms.ChoiceField(label=_("Provider"), required=False)
subnet_id = forms.ChoiceField(label=_("Subnet"))
protocol = forms.ChoiceField(label=_("Protocol"))
lb_method = forms.ChoiceField(label=_("Load Balancing Method"))
admin_state_up = forms.ChoiceField(choices=[(True, _('UP')),
(False, _('DOWN'))],
label=_("Admin State"))
def __init__(self, request, *args, **kwargs):
super(AddPoolAction, self).__init__(request, *args, **kwargs)
tenant_id = request.user.tenant_id
subnet_id_choices = [('', _("Select a Subnet"))]
try:
networks = api.neutron.network_list_for_tenant(request, tenant_id)
except Exception:
exceptions.handle(request,
_('Unable to retrieve networks list.'))
networks = []
for n in networks:
for s in n['subnets']:
name = "%s (%s)" % (s.name_or_id, s.cidr)
subnet_id_choices.append((s.id, name))
self.fields['subnet_id'].choices = subnet_id_choices
protocol_choices = [('', _("Select a Protocol"))]
[protocol_choices.append((p, p)) for p in AVAILABLE_PROTOCOLS]
self.fields['protocol'].choices = protocol_choices
lb_method_choices = [('', _("Select a Method"))]
[lb_method_choices.append((m, m)) for m in AVAILABLE_METHODS]
self.fields['lb_method'].choices = lb_method_choices
# provider choice
try:
if api.neutron.is_extension_supported(request, 'service-type'):
provider_list = api.neutron.provider_list(request)
providers = [p for p in provider_list
if p['service_type'] == 'LOADBALANCER']
else:
providers = None
except Exception:
exceptions.handle(request,
_('Unable to retrieve providers list.'))
providers = []
if providers:
default_providers = [p for p in providers if p.get('default')]
if default_providers:
default_provider = default_providers[0]['name']
else:
default_provider = None
provider_choices = [(p['name'], p['name']) for p in providers
if p['name'] != default_provider]
if default_provider:
provider_choices.insert(
0, (default_provider,
_("%s (default)") % default_provider))
else:
if providers is None:
msg = _("Provider for Load Balancer is not supported")
else:
msg = _("No provider is available")
provider_choices = [('', msg)]
self.fields['provider'].widget.attrs['readonly'] = True
self.fields['provider'].choices = provider_choices
class Meta(object):
name = _("Add New Pool")
permissions = ('openstack.services.network',)
help_text_template = 'project/loadbalancers/_create_pool_help.html'
class AddPoolStep(workflows.Step):
action_class = AddPoolAction
contributes = ("name", "description", "subnet_id", "provider",
"protocol", "lb_method", "admin_state_up")
def contribute(self, data, context):
context = super(AddPoolStep, self).contribute(data, context)
context['admin_state_up'] = (context['admin_state_up'] == 'True')
if data:
return context
class AddPool(workflows.Workflow):
slug = "addpool"
name = _("Add Pool")
finalize_button_name = _("Add")
success_message = _('Added pool "%s".')
failure_message = _('Unable to add pool "%s".')
success_url = "horizon:project:loadbalancers:index"
default_steps = (AddPoolStep,)
def format_status_message(self, message):
name = self.context.get('name')
return message % name
def handle(self, request, context):
try:
api.lbaas.pool_create(request, **context)
return True
except Exception:
return False
class AddVipAction(workflows.Action):
name = forms.CharField(max_length=80, label=_("Name"))
description = forms.CharField(
initial="", required=False,
max_length=80, label=_("Description"))
subnet_id = forms.ChoiceField(label=_("VIP Subnet"),
initial="",
required=False)
address = forms.IPField(label=_("IP address"),
version=forms.IPv4,
mask=False,
required=False)
protocol_port = forms.IntegerField(
label=_("Protocol Port"), min_value=1,
help_text=_("Enter an integer value "
"between 1 and 65535."),
validators=[validators.validate_port_range])
protocol = forms.ChoiceField(label=_("Protocol"))
session_persistence = forms.ChoiceField(
required=False, initial={}, label=_("Session Persistence"),
widget=forms.Select(attrs={
'class': 'switchable',
'data-slug': 'persistence'
}))
cookie_name = forms.CharField(
initial="", required=False,
max_length=80, label=_("Cookie Name"),
help_text=_("Required for APP_COOKIE persistence;"
" Ignored otherwise."),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'persistence',
'data-persistence-app_cookie': 'APP_COOKIE',
}))
connection_limit = forms.IntegerField(
required=False, min_value=-1, label=_("Connection Limit"),
help_text=_("Maximum number of connections allowed "
"for the VIP or '-1' if the limit is not set"))
admin_state_up = forms.ChoiceField(choices=[(True, _('UP')),
(False, _('DOWN'))],
label=_("Admin State"))
def __init__(self, request, *args, **kwargs):
super(AddVipAction, self).__init__(request, *args, **kwargs)
tenant_id = request.user.tenant_id
subnet_id_choices = [('', _("Select a Subnet"))]
try:
networks = api.neutron.network_list_for_tenant(request, tenant_id)
except Exception:
exceptions.handle(request,
_('Unable to retrieve networks list.'))
networks = []
for n in networks:
for s in n['subnets']:
name = "%s (%s)" % (s.name, s.cidr)
subnet_id_choices.append((s.id, name))
self.fields['subnet_id'].choices = subnet_id_choices
protocol_choices = [('', _("Select a Protocol"))]
[protocol_choices.append((p, p)) for p in AVAILABLE_PROTOCOLS]
self.fields['protocol'].choices = protocol_choices
session_persistence_choices = [('', _("No Session Persistence"))]
for mode in ('SOURCE_IP', 'HTTP_COOKIE', 'APP_COOKIE'):
session_persistence_choices.append((mode.lower(), mode))
self.fields[
'session_persistence'].choices = session_persistence_choices
def clean(self):
cleaned_data = super(AddVipAction, self).clean()
persistence = cleaned_data.get('session_persistence')
if persistence:
cleaned_data['session_persistence'] = persistence.upper()
if (cleaned_data.get('session_persistence') == 'APP_COOKIE' and
not cleaned_data.get('cookie_name')):
msg = _('Cookie name is required for APP_COOKIE persistence.')
self._errors['cookie_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
name = _("Specify VIP")
permissions = ('openstack.services.network',)
help_text_template = 'project/loadbalancers/_create_vip_help.html'
class AddVipStep(workflows.Step):
action_class = AddVipAction
depends_on = ("pool_id", "subnet")
contributes = ("name", "description", "subnet_id",
"address", "protocol_port", "protocol",
"session_persistence", "cookie_name",
"connection_limit", "admin_state_up")
def contribute(self, data, context):
context = super(AddVipStep, self).contribute(data, context)
context['admin_state_up'] = (context['admin_state_up'] == 'True')
return context
class AddVip(workflows.Workflow):
slug = "addvip"
name = _("Add VIP")
finalize_button_name = _("Add")
success_message = _('Added VIP "%s".')
failure_message = _('Unable to add VIP "%s".')
success_url = "horizon:project:loadbalancers:index"
default_steps = (AddVipStep,)
def format_status_message(self, message):
name = self.context.get('name')
return message % name
def handle(self, request, context):
if context['subnet_id'] == '':
try:
pool = api.lbaas.pool_get(request, context['pool_id'])
context['subnet_id'] = pool['subnet_id']
except Exception:
context['subnet_id'] = None
self.failure_message = _(
'Unable to retrieve the specified pool. '
'Unable to add VIP "%s".')
return False
if context['session_persistence']:
stype = context['session_persistence']
if stype == 'APP_COOKIE':
cookie = context['cookie_name']
context['session_persistence'] = {'type': stype,
'cookie_name': cookie}
else:
context['session_persistence'] = {'type': stype}
else:
context['session_persistence'] = {}
try:
api.lbaas.vip_create(request, **context)
return True
except Exception:
return False
class AddMemberAction(workflows.Action):
pool_id = forms.ChoiceField(label=_("Pool"))
member_type = forms.ChoiceField(
label=_("Member Source"),
choices=[('server_list', _("Select from active instances")),
('member_address', _("Specify member IP address"))],
required=False,
widget=forms.Select(attrs={
'class': 'switchable',
'data-slug': 'membertype'
}))
members = forms.MultipleChoiceField(
required=False,
initial=["default"],
widget=forms.SelectMultiple(attrs={
'class': 'switched',
'data-switch-on': 'membertype',
'data-membertype-server_list': _("Member Instance(s)"),
}),
help_text=_("Select members for this pool "))
address = forms.IPField(
required=False,
help_text=_("Specify member IP address"),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'membertype',
'data-membertype-member_address': _("Member Address"),
}),
initial="", version=forms.IPv4 | forms.IPv6, mask=False)
weight = forms.IntegerField(
max_value=256, min_value=1, label=_("Weight"), required=False,
help_text=_("Relative part of requests this pool member serves "
"compared to others. \nThe same weight will be applied to "
"all the selected members and can be modified later. "
"Weight must be in the range 1 to 256.")
)
protocol_port = forms.IntegerField(
label=_("Protocol Port"), min_value=1,
help_text=_("Enter an integer value between 1 and 65535. "
"The same port will be used for all the selected "
"members and can be modified later."),
validators=[validators.validate_port_range]
)
admin_state_up = forms.ChoiceField(choices=[(True, _('UP')),
(False, _('DOWN'))],
label=_("Admin State"))
def __init__(self, request, *args, **kwargs):
super(AddMemberAction, self).__init__(request, *args, **kwargs)
pool_id_choices = [('', _("Select a Pool"))]
try:
tenant_id = self.request.user.tenant_id
pools = api.lbaas.pool_list(request, tenant_id=tenant_id)
except Exception:
pools = []
exceptions.handle(request,
_('Unable to retrieve pools list.'))
pools = sorted(pools,
key=lambda pool: pool.name)
for p in pools:
pool_id_choices.append((p.id, p.name))
self.fields['pool_id'].choices = pool_id_choices
members_choices = []
try:
servers, has_more = api.nova.server_list(request)
except Exception:
servers = []
exceptions.handle(request,
_('Unable to retrieve instances list.'))
if len(servers) == 0:
self.fields['members'].widget.attrs[
'data-membertype-server_list'] = _(
"No servers available. To add a member, you "
"need at least one running instance.")
return
for m in servers:
members_choices.append((m.id, m.name))
self.fields['members'].choices = sorted(
members_choices,
key=lambda member: member[1])
def clean(self):
cleaned_data = super(AddMemberAction, self).clean()
if (cleaned_data.get('member_type') == 'server_list' and
not cleaned_data.get('members')):
msg = _('At least one member must be specified')
self._errors['members'] = self.error_class([msg])
elif (cleaned_data.get('member_type') == 'member_address' and
not cleaned_data.get('address')):
msg = _('Member IP address must be specified')
self._errors['address'] = self.error_class([msg])
return cleaned_data
class Meta(object):
name = _("Add New Member")
permissions = ('openstack.services.network',)
help_text = _("Add member(s) to the selected pool.\n\n"
"Choose one or more listed instances to be "
"added to the pool as member(s). "
"Assign a numeric weight and port number for the "
"selected member(s) to operate(s) on; e.g., 80. \n\n"
"Only one port can be associated with "
"each instance.")
class AddMemberStep(workflows.Step):
action_class = AddMemberAction
contributes = ("pool_id", "member_type", "members", "address",
"protocol_port", "weight", "admin_state_up")
def contribute(self, data, context):
context = super(AddMemberStep, self).contribute(data, context)
context['admin_state_up'] = (context['admin_state_up'] == 'True')
return context
class AddMember(workflows.Workflow):
slug = "addmember"
name = _("Add Member")
finalize_button_name = _("Add")
success_message = _('Added member(s).')
failure_message = _('Unable to add member(s)')
success_url = "horizon:project:loadbalancers:index"
default_steps = (AddMemberStep,)
def handle(self, request, context):
if context['member_type'] == 'server_list':
try:
pool = api.lbaas.pool_get(request, context['pool_id'])
subnet_id = pool['subnet_id']
except Exception:
self.failure_message = _('Unable to retrieve '
'the specified pool.')
return False
for m in context['members']:
params = {'device_id': m}
try:
plist = api.neutron.port_list(request, **params)
except Exception:
return False
# Sort port list for each member. This is needed to avoid
# attachment of random ports in case of creation of several
# members attached to several networks.
plist = sorted(plist, key=lambda port: port.network_id)
psubnet = [p for p in plist for ips in p.fixed_ips
if ips['subnet_id'] == subnet_id]
# If possible, select a port on pool subnet.
if psubnet:
selected_port = psubnet[0]
elif plist:
selected_port = plist[0]
else:
selected_port = None
if selected_port:
context['address'] = \
selected_port.fixed_ips[0]['ip_address']
try:
api.lbaas.member_create(request, **context).id
except Exception as e:
msg = self.failure_message
LOG.info('%s: %s' % (msg, e))
return False
return True
else:
try:
context['member_id'] = api.lbaas.member_create(
request, **context).id
return True
except Exception as e:
msg = self.failure_message
LOG.info('%s: %s' % (msg, e))
return False
class AddMonitorAction(workflows.Action):
type = forms.ChoiceField(
label=_("Type"),
choices=[('ping', _('PING')),
('tcp', _('TCP')),
('http', _('HTTP')),
('https', _('HTTPS'))],
widget=forms.Select(attrs={
'class': 'switchable',
'data-slug': 'type'
}))
delay = forms.IntegerField(
min_value=1,
label=_("Delay"),
help_text=_("The minimum time in seconds between regular checks "
"of a member. It must be greater than or equal to "
"timeout"))
timeout = forms.IntegerField(
min_value=1,
label=_("Timeout"),
help_text=_("The maximum time in seconds for a monitor to wait "
"for a reply. It must be less than or equal to delay"))
max_retries = forms.IntegerField(
max_value=10, min_value=1,
label=_("Max Retries (1~10)"),
help_text=_("Number of permissible failures before changing "
"the status of member to inactive"))
http_method = forms.ChoiceField(
initial="GET",
required=False,
choices=[('GET', _('GET'))],
label=_("HTTP Method"),
help_text=_("HTTP method used to check health status of a member"),
widget=forms.Select(attrs={
'class': 'switched',
'data-switch-on': 'type',
'data-type-http': _('HTTP Method'),
'data-type-https': _('HTTP Method')
}))
url_path = forms.CharField(
initial="/",
required=False,
max_length=80,
label=_("URL"),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'type',
'data-type-http': _('URL'),
'data-type-https': _('URL')
}))
expected_codes = forms.RegexField(
initial="200",
required=False,
max_length=80,
regex=r'^(\d{3}(\s*,\s*\d{3})*)$|^(\d{3}-\d{3})$',
label=_("Expected HTTP Status Codes"),
help_text=_("Expected code may be a single value (e.g. 200), "
"a list of values (e.g. 200, 202), "
"or range of values (e.g. 200-204)"),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'type',
'data-type-http': _('Expected HTTP Status Codes'),
'data-type-https': _('Expected HTTP Status Codes')
}))
admin_state_up = forms.ChoiceField(choices=[(True, _('UP')),
(False, _('DOWN'))],
label=_("Admin State"))
def __init__(self, request, *args, **kwargs):
super(AddMonitorAction, self).__init__(request, *args, **kwargs)
def clean(self):
cleaned_data = super(AddMonitorAction, self).clean()
type_opt = cleaned_data.get('type')
delay = cleaned_data.get('delay')
timeout = cleaned_data.get('timeout')
if not delay >= timeout:
msg = _('Delay must be greater than or equal to Timeout')
self._errors['delay'] = self.error_class([msg])
if type_opt in ['http', 'https']:
http_method_opt = cleaned_data.get('http_method')
url_path = cleaned_data.get('url_path')
expected_codes = cleaned_data.get('expected_codes')
if not http_method_opt:
msg = _('Please choose a HTTP method')
self._errors['http_method'] = self.error_class([msg])
if not url_path:
msg = _('Please specify an URL')
self._errors['url_path'] = self.error_class([msg])
if not expected_codes:
msg = _('Please enter a single value (e.g. 200), '
'a list of values (e.g. 200, 202), '
'or range of values (e.g. 200-204)')
self._errors['expected_codes'] = self.error_class([msg])
return cleaned_data
class Meta(object):
name = _("Add New Monitor")
permissions = ('openstack.services.network',)
help_text = _("Create a monitor template.\n\n"
"Select type of monitoring. "
"Specify delay, timeout, and retry limits "
"required by the monitor. "
"Specify method, URL path, and expected "
"HTTP codes upon success.")
class AddMonitorStep(workflows.Step):
action_class = AddMonitorAction
contributes = ("type", "delay", "timeout", "max_retries",
"http_method", "url_path", "expected_codes",
"admin_state_up")
def contribute(self, data, context):
context = super(AddMonitorStep, self).contribute(data, context)
context['admin_state_up'] = (context['admin_state_up'] == 'True')
if data:
return context
class AddMonitor(workflows.Workflow):
slug = "addmonitor"
name = _("Add Monitor")
finalize_button_name = _("Add")
success_message = _('Added monitor')
failure_message = _('Unable to add monitor')
success_url = "horizon:project:loadbalancers:index"
default_steps = (AddMonitorStep,)
def handle(self, request, context):
try:
context['monitor_id'] = api.lbaas.pool_health_monitor_create(
request, **context).get('id')
return True
except Exception:
exceptions.handle(request, _("Unable to add monitor."))
return False
class AddPMAssociationAction(workflows.Action):
monitor_id = forms.ChoiceField(label=_("Monitor"))
def __init__(self, request, *args, **kwargs):
super(AddPMAssociationAction, self).__init__(request, *args, **kwargs)
def populate_monitor_id_choices(self, request, context):
self.fields['monitor_id'].label = _("Select a monitor template "
"for %s") % context['pool_name']
monitor_id_choices = [('', _("Select a Monitor"))]
try:
tenant_id = self.request.user.tenant_id
monitors = api.lbaas.pool_health_monitor_list(request,
tenant_id=tenant_id)
pool_monitors_ids = [pm.id for pm in context['pool_monitors']]
for m in monitors:
if m.id not in pool_monitors_ids:
display_name = utils.get_monitor_display_name(m)
monitor_id_choices.append((m.id, display_name))
except Exception:
exceptions.handle(request,
_('Unable to retrieve monitors list.'))
self.fields['monitor_id'].choices = monitor_id_choices
return monitor_id_choices
class Meta(object):
name = _("Association Details")
permissions = ('openstack.services.network',)
help_text = _("Associate a health monitor with target pool.")
class AddPMAssociationStep(workflows.Step):
action_class = AddPMAssociationAction
depends_on = ("pool_id", "pool_name", "pool_monitors")
contributes = ("monitor_id",)
def contribute(self, data, context):
context = super(AddPMAssociationStep, self).contribute(data, context)
if data:
return context
class AddPMAssociation(workflows.Workflow):
slug = "addassociation"
name = _("Associate Monitor")
finalize_button_name = _("Associate")
success_message = _('Associated monitor.')
failure_message = _('Unable to associate monitor.')
success_url = "horizon:project:loadbalancers:index"
default_steps = (AddPMAssociationStep,)
def handle(self, request, context):
try:
context['monitor_id'] = api.lbaas.pool_monitor_association_create(
request, **context)
return True
except Exception:
exceptions.handle(request, _("Unable to associate monitor."))
return False
class DeletePMAssociationAction(workflows.Action):
monitor_id = forms.ChoiceField(label=_("Monitor"))
def __init__(self, request, *args, **kwargs):
super(DeletePMAssociationAction, self).__init__(
request, *args, **kwargs)
def populate_monitor_id_choices(self, request, context):
self.fields['monitor_id'].label = (_("Select a health monitor of %s") %
context['pool_name'])
monitor_id_choices = [('', _("Select a Monitor"))]
try:
monitors = api.lbaas.pool_health_monitor_list(request)
pool_monitors_ids = [pm.id for pm in context['pool_monitors']]
for m in monitors:
if m.id in pool_monitors_ids:
display_name = utils.get_monitor_display_name(m)
monitor_id_choices.append((m.id, display_name))
except Exception:
exceptions.handle(request,
_('Unable to retrieve monitors list.'))
self.fields['monitor_id'].choices = monitor_id_choices
return monitor_id_choices
class Meta(object):
name = _("Association Details")
permissions = ('openstack.services.network',)
help_text = _("Disassociate a health monitor from target pool. ")
class DeletePMAssociationStep(workflows.Step):
action_class = DeletePMAssociationAction
depends_on = ("pool_id", "pool_name", "pool_monitors")
contributes = ("monitor_id",)
def contribute(self, data, context):
context = super(DeletePMAssociationStep, self).contribute(
data, context)
if data:
return context
class DeletePMAssociation(workflows.Workflow):
slug = "deleteassociation"
name = _("Disassociate Monitor")
finalize_button_name = _("Disassociate")
success_message = _('Disassociated monitor.')
failure_message = _('Unable to disassociate monitor.')
success_url = "horizon:project:loadbalancers:index"
default_steps = (DeletePMAssociationStep,)
def handle(self, request, context):
try:
context['monitor_id'] = api.lbaas.pool_monitor_association_delete(
request, **context)
return True
except Exception:
exceptions.handle(request, _("Unable to disassociate monitor."))
return False
|
[
"openstack_dashboard.dashboards.project.loadbalancers.utils.get_monitor_display_name",
"openstack_dashboard.api.lbaas.pool_list",
"openstack_dashboard.api.lbaas.pool_create",
"django.utils.translation.ugettext_lazy",
"openstack_dashboard.api.lbaas.pool_monitor_association_create",
"horizon.forms.Select",
"openstack_dashboard.api.neutron.network_list_for_tenant",
"openstack_dashboard.api.lbaas.pool_get",
"openstack_dashboard.api.neutron.is_extension_supported",
"horizon.forms.TextInput",
"openstack_dashboard.api.nova.server_list",
"openstack_dashboard.api.lbaas.pool_health_monitor_list",
"openstack_dashboard.api.neutron.port_list",
"openstack_dashboard.api.lbaas.member_create",
"openstack_dashboard.api.lbaas.pool_monitor_association_delete",
"openstack_dashboard.api.lbaas.pool_health_monitor_create",
"openstack_dashboard.api.neutron.provider_list",
"openstack_dashboard.api.lbaas.vip_create",
"logging.getLogger"
] |
[((1054, 1081), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1071, 1081), False, 'import logging\n'), ((5077, 5090), 'django.utils.translation.ugettext_lazy', '_', (['"""Add Pool"""'], {}), "('Add Pool')\n", (5078, 5090), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5118, 5126), 'django.utils.translation.ugettext_lazy', '_', (['"""Add"""'], {}), "('Add')\n", (5119, 5126), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5149, 5170), 'django.utils.translation.ugettext_lazy', '_', (['"""Added pool "%s"."""'], {}), '(\'Added pool "%s".\')\n', (5150, 5170), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5193, 5222), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to add pool "%s"."""'], {}), '(\'Unable to add pool "%s".\')\n', (5194, 5222), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((10048, 10060), 'django.utils.translation.ugettext_lazy', '_', (['"""Add VIP"""'], {}), "('Add VIP')\n", (10049, 10060), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((10088, 10096), 'django.utils.translation.ugettext_lazy', '_', (['"""Add"""'], {}), "('Add')\n", (10089, 10096), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((10119, 10139), 'django.utils.translation.ugettext_lazy', '_', (['"""Added VIP "%s"."""'], {}), '(\'Added VIP "%s".\')\n', (10120, 10139), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((10162, 10190), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to add VIP "%s"."""'], {}), '(\'Unable to add VIP "%s".\')\n', (10163, 10190), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((16630, 16645), 'django.utils.translation.ugettext_lazy', '_', (['"""Add Member"""'], {}), "('Add Member')\n", (16631, 16645), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((16673, 16681), 'django.utils.translation.ugettext_lazy', '_', (['"""Add"""'], {}), "('Add')\n", (16674, 16681), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((16704, 16725), 'django.utils.translation.ugettext_lazy', '_', (['"""Added member(s)."""'], {}), "('Added member(s).')\n", (16705, 16725), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((16748, 16776), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to add member(s)"""'], {}), "('Unable to add member(s)')\n", (16749, 16776), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((24035, 24051), 'django.utils.translation.ugettext_lazy', '_', (['"""Add Monitor"""'], {}), "('Add Monitor')\n", (24036, 24051), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((24079, 24087), 'django.utils.translation.ugettext_lazy', '_', (['"""Add"""'], {}), "('Add')\n", (24080, 24087), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((24110, 24128), 'django.utils.translation.ugettext_lazy', '_', (['"""Added monitor"""'], {}), "('Added monitor')\n", (24111, 24128), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((24151, 24177), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to add monitor"""'], {}), "('Unable to add monitor')\n", (24152, 24177), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((26448, 26470), 'django.utils.translation.ugettext_lazy', '_', (['"""Associate Monitor"""'], {}), "('Associate Monitor')\n", (26449, 26470), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((26498, 26512), 'django.utils.translation.ugettext_lazy', '_', (['"""Associate"""'], {}), "('Associate')\n", (26499, 26512), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((26535, 26559), 'django.utils.translation.ugettext_lazy', '_', (['"""Associated monitor."""'], {}), "('Associated monitor.')\n", (26536, 26559), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((26582, 26615), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to associate monitor."""'], {}), "('Unable to associate monitor.')\n", (26583, 26615), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((28808, 28833), 'django.utils.translation.ugettext_lazy', '_', (['"""Disassociate Monitor"""'], {}), "('Disassociate Monitor')\n", (28809, 28833), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((28861, 28878), 'django.utils.translation.ugettext_lazy', '_', (['"""Disassociate"""'], {}), "('Disassociate')\n", (28862, 28878), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((28901, 28928), 'django.utils.translation.ugettext_lazy', '_', (['"""Disassociated monitor."""'], {}), "('Disassociated monitor.')\n", (28902, 28928), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((28951, 28987), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to disassociate monitor."""'], {}), "('Unable to disassociate monitor.')\n", (28952, 28987), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4432, 4449), 'django.utils.translation.ugettext_lazy', '_', (['"""Add New Pool"""'], {}), "('Add New Pool')\n", (4433, 4449), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((9293, 9309), 'django.utils.translation.ugettext_lazy', '_', (['"""Specify VIP"""'], {}), "('Specify VIP')\n", (9294, 9309), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((15629, 15648), 'django.utils.translation.ugettext_lazy', '_', (['"""Add New Member"""'], {}), "('Add New Member')\n", (15630, 15648), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((15723, 15996), 'django.utils.translation.ugettext_lazy', '_', (['"""Add member(s) to the selected pool.\n\nChoose one or more listed instances to be added to the pool as member(s). Assign a numeric weight and port number for the selected member(s) to operate(s) on; e.g., 80. \n\nOnly one port can be associated with each instance."""'], {}), '("""Add member(s) to the selected pool.\n\nChoose one or more listed instances to be added to the pool as member(s). Assign a numeric weight and port number for the selected member(s) to operate(s) on; e.g., 80. \n\nOnly one port can be associated with each instance."""\n )\n', (15724, 15996), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((23077, 23097), 'django.utils.translation.ugettext_lazy', '_', (['"""Add New Monitor"""'], {}), "('Add New Monitor')\n", (23078, 23097), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((23172, 23370), 'django.utils.translation.ugettext_lazy', '_', (['"""Create a monitor template.\n\nSelect type of monitoring. Specify delay, timeout, and retry limits required by the monitor. Specify method, URL path, and expected HTTP codes upon success."""'], {}), '("""Create a monitor template.\n\nSelect type of monitoring. Specify delay, timeout, and retry limits required by the monitor. Specify method, URL path, and expected HTTP codes upon success."""\n )\n', (23173, 23370), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((25869, 25893), 'django.utils.translation.ugettext_lazy', '_', (['"""Association Details"""'], {}), "('Association Details')\n", (25870, 25893), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((25968, 26017), 'django.utils.translation.ugettext_lazy', '_', (['"""Associate a health monitor with target pool."""'], {}), "('Associate a health monitor with target pool.')\n", (25969, 26017), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((28197, 28221), 'django.utils.translation.ugettext_lazy', '_', (['"""Association Details"""'], {}), "('Association Details')\n", (28198, 28221), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((28296, 28349), 'django.utils.translation.ugettext_lazy', '_', (['"""Disassociate a health monitor from target pool. """'], {}), "('Disassociate a health monitor from target pool. ')\n", (28297, 28349), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1171, 1180), 'django.utils.translation.ugettext_lazy', '_', (['"""Name"""'], {}), "('Name')\n", (1172, 1180), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1282, 1298), 'django.utils.translation.ugettext_lazy', '_', (['"""Description"""'], {}), "('Description')\n", (1283, 1298), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1447, 1460), 'django.utils.translation.ugettext_lazy', '_', (['"""Provider"""'], {}), "('Provider')\n", (1448, 1460), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1518, 1529), 'django.utils.translation.ugettext_lazy', '_', (['"""Subnet"""'], {}), "('Subnet')\n", (1519, 1529), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1570, 1583), 'django.utils.translation.ugettext_lazy', '_', (['"""Protocol"""'], {}), "('Protocol')\n", (1571, 1583), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1625, 1651), 'django.utils.translation.ugettext_lazy', '_', (['"""Load Balancing Method"""'], {}), "('Load Balancing Method')\n", (1626, 1651), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1832, 1848), 'django.utils.translation.ugettext_lazy', '_', (['"""Admin State"""'], {}), "('Admin State')\n", (1833, 1848), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2109, 2164), 'openstack_dashboard.api.neutron.network_list_for_tenant', 'api.neutron.network_list_for_tenant', (['request', 'tenant_id'], {}), '(request, tenant_id)\n', (2144, 2164), False, 'from openstack_dashboard import api\n'), ((2995, 3054), 'openstack_dashboard.api.neutron.is_extension_supported', 'api.neutron.is_extension_supported', (['request', '"""service-type"""'], {}), "(request, 'service-type')\n", (3029, 3054), False, 'from openstack_dashboard import api\n'), ((5497, 5538), 'openstack_dashboard.api.lbaas.pool_create', 'api.lbaas.pool_create', (['request'], {}), '(request, **context)\n', (5518, 5538), False, 'from openstack_dashboard import api\n'), ((5702, 5711), 'django.utils.translation.ugettext_lazy', '_', (['"""Name"""'], {}), "('Name')\n", (5703, 5711), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5813, 5829), 'django.utils.translation.ugettext_lazy', '_', (['"""Description"""'], {}), "('Description')\n", (5814, 5829), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5871, 5886), 'django.utils.translation.ugettext_lazy', '_', (['"""VIP Subnet"""'], {}), "('VIP Subnet')\n", (5872, 5886), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6018, 6033), 'django.utils.translation.ugettext_lazy', '_', (['"""IP address"""'], {}), "('IP address')\n", (6019, 6033), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6221, 6239), 'django.utils.translation.ugettext_lazy', '_', (['"""Protocol Port"""'], {}), "('Protocol Port')\n", (6222, 6239), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6272, 6320), 'django.utils.translation.ugettext_lazy', '_', (['"""Enter an integer value between 1 and 65535."""'], {}), "('Enter an integer value between 1 and 65535.')\n", (6273, 6320), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6437, 6450), 'django.utils.translation.ugettext_lazy', '_', (['"""Protocol"""'], {}), "('Protocol')\n", (6438, 6450), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6539, 6563), 'django.utils.translation.ugettext_lazy', '_', (['"""Session Persistence"""'], {}), "('Session Persistence')\n", (6540, 6563), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6580, 6651), 'horizon.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'switchable', 'data-slug': 'persistence'}"}), "(attrs={'class': 'switchable', 'data-slug': 'persistence'})\n", (6592, 6651), False, 'from horizon import forms\n'), ((6787, 6803), 'django.utils.translation.ugettext_lazy', '_', (['"""Cookie Name"""'], {}), "('Cookie Name')\n", (6788, 6803), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6823, 6883), 'django.utils.translation.ugettext_lazy', '_', (['"""Required for APP_COOKIE persistence; Ignored otherwise."""'], {}), "('Required for APP_COOKIE persistence; Ignored otherwise.')\n", (6824, 6883), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6923, 7049), 'horizon.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'switched', 'data-switch-on': 'persistence',\n 'data-persistence-app_cookie': 'APP_COOKIE'}"}), "(attrs={'class': 'switched', 'data-switch-on': 'persistence',\n 'data-persistence-app_cookie': 'APP_COOKIE'})\n", (6938, 7049), False, 'from horizon import forms\n'), ((7181, 7202), 'django.utils.translation.ugettext_lazy', '_', (['"""Connection Limit"""'], {}), "('Connection Limit')\n", (7182, 7202), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7222, 7313), 'django.utils.translation.ugettext_lazy', '_', (['"""Maximum number of connections allowed for the VIP or \'-1\' if the limit is not set"""'], {}), '("Maximum number of connections allowed for the VIP or \'-1\' if the limit is not set"\n )\n', (7223, 7313), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7512, 7528), 'django.utils.translation.ugettext_lazy', '_', (['"""Admin State"""'], {}), "('Admin State')\n", (7513, 7528), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7786, 7841), 'openstack_dashboard.api.neutron.network_list_for_tenant', 'api.neutron.network_list_for_tenant', (['request', 'tenant_id'], {}), '(request, tenant_id)\n', (7821, 7841), False, 'from openstack_dashboard import api\n'), ((9102, 9158), 'django.utils.translation.ugettext_lazy', '_', (['"""Cookie name is required for APP_COOKIE persistence."""'], {}), "('Cookie name is required for APP_COOKIE persistence.')\n", (9103, 9158), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((11367, 11407), 'openstack_dashboard.api.lbaas.vip_create', 'api.lbaas.vip_create', (['request'], {}), '(request, **context)\n', (11387, 11407), False, 'from openstack_dashboard import api\n'), ((11564, 11573), 'django.utils.translation.ugettext_lazy', '_', (['"""Pool"""'], {}), "('Pool')\n", (11565, 11573), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((11626, 11644), 'django.utils.translation.ugettext_lazy', '_', (['"""Member Source"""'], {}), "('Member Source')\n", (11627, 11644), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((11824, 11894), 'horizon.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'switchable', 'data-slug': 'membertype'}"}), "(attrs={'class': 'switchable', 'data-slug': 'membertype'})\n", (11836, 11894), False, 'from horizon import forms\n'), ((12243, 12277), 'django.utils.translation.ugettext_lazy', '_', (['"""Select members for this pool """'], {}), "('Select members for this pool ')\n", (12244, 12277), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((12350, 12380), 'django.utils.translation.ugettext_lazy', '_', (['"""Specify member IP address"""'], {}), "('Specify member IP address')\n", (12351, 12380), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((12717, 12728), 'django.utils.translation.ugettext_lazy', '_', (['"""Weight"""'], {}), "('Weight')\n", (12718, 12728), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((12764, 12973), 'django.utils.translation.ugettext_lazy', '_', (['"""Relative part of requests this pool member serves compared to others. \nThe same weight will be applied to all the selected members and can be modified later. Weight must be in the range 1 to 256."""'], {}), '("""Relative part of requests this pool member serves compared to others. \nThe same weight will be applied to all the selected members and can be modified later. Weight must be in the range 1 to 256."""\n )\n', (12765, 12973), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((13095, 13113), 'django.utils.translation.ugettext_lazy', '_', (['"""Protocol Port"""'], {}), "('Protocol Port')\n", (13096, 13113), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((13146, 13282), 'django.utils.translation.ugettext_lazy', '_', (['"""Enter an integer value between 1 and 65535. The same port will be used for all the selected members and can be modified later."""'], {}), "('Enter an integer value between 1 and 65535. The same port will be used for all the selected members and can be modified later.'\n )\n", (13147, 13282), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((13562, 13578), 'django.utils.translation.ugettext_lazy', '_', (['"""Admin State"""'], {}), "('Admin State')\n", (13563, 13578), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((13842, 13891), 'openstack_dashboard.api.lbaas.pool_list', 'api.lbaas.pool_list', (['request'], {'tenant_id': 'tenant_id'}), '(request, tenant_id=tenant_id)\n', (13861, 13891), False, 'from openstack_dashboard import api\n'), ((14335, 14364), 'openstack_dashboard.api.nova.server_list', 'api.nova.server_list', (['request'], {}), '(request)\n', (14355, 14364), False, 'from openstack_dashboard import api\n'), ((14655, 14743), 'django.utils.translation.ugettext_lazy', '_', (['"""No servers available. To add a member, you need at least one running instance."""'], {}), "('No servers available. To add a member, you need at least one running instance.'\n )\n", (14656, 14743), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((15215, 15257), 'django.utils.translation.ugettext_lazy', '_', (['"""At least one member must be specified"""'], {}), "('At least one member must be specified')\n", (15216, 15257), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((19058, 19067), 'django.utils.translation.ugettext_lazy', '_', (['"""Type"""'], {}), "('Type')\n", (19059, 19067), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((19237, 19301), 'horizon.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'switchable', 'data-slug': 'type'}"}), "(attrs={'class': 'switchable', 'data-slug': 'type'})\n", (19249, 19301), False, 'from horizon import forms\n'), ((19404, 19414), 'django.utils.translation.ugettext_lazy', '_', (['"""Delay"""'], {}), "('Delay')\n", (19405, 19414), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((19434, 19551), 'django.utils.translation.ugettext_lazy', '_', (['"""The minimum time in seconds between regular checks of a member. It must be greater than or equal to timeout"""'], {}), "('The minimum time in seconds between regular checks of a member. It must be greater than or equal to timeout'\n )\n", (19435, 19551), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((19663, 19675), 'django.utils.translation.ugettext_lazy', '_', (['"""Timeout"""'], {}), "('Timeout')\n", (19664, 19675), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((19695, 19806), 'django.utils.translation.ugettext_lazy', '_', (['"""The maximum time in seconds for a monitor to wait for a reply. It must be less than or equal to delay"""'], {}), "('The maximum time in seconds for a monitor to wait for a reply. It must be less than or equal to delay'\n )\n", (19696, 19806), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((19913, 19936), 'django.utils.translation.ugettext_lazy', '_', (['"""Max Retries (1~10)"""'], {}), "('Max Retries (1~10)')\n", (19914, 19936), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((19956, 20045), 'django.utils.translation.ugettext_lazy', '_', (['"""Number of permissible failures before changing the status of member to inactive"""'], {}), "('Number of permissible failures before changing the status of member to inactive'\n )\n", (19957, 20045), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((20200, 20216), 'django.utils.translation.ugettext_lazy', '_', (['"""HTTP Method"""'], {}), "('HTTP Method')\n", (20201, 20216), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((20236, 20292), 'django.utils.translation.ugettext_lazy', '_', (['"""HTTP method used to check health status of a member"""'], {}), "('HTTP method used to check health status of a member')\n", (20237, 20292), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((20623, 20631), 'django.utils.translation.ugettext_lazy', '_', (['"""URL"""'], {}), "('URL')\n", (20624, 20631), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((21017, 21048), 'django.utils.translation.ugettext_lazy', '_', (['"""Expected HTTP Status Codes"""'], {}), "('Expected HTTP Status Codes')\n", (21018, 21048), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((21068, 21193), 'django.utils.translation.ugettext_lazy', '_', (['"""Expected code may be a single value (e.g. 200), a list of values (e.g. 200, 202), or range of values (e.g. 200-204)"""'], {}), "('Expected code may be a single value (e.g. 200), a list of values (e.g. 200, 202), or range of values (e.g. 200-204)'\n )\n", (21069, 21193), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((21663, 21679), 'django.utils.translation.ugettext_lazy', '_', (['"""Admin State"""'], {}), "('Admin State')\n", (21664, 21679), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((22072, 22123), 'django.utils.translation.ugettext_lazy', '_', (['"""Delay must be greater than or equal to Timeout"""'], {}), "('Delay must be greater than or equal to Timeout')\n", (22073, 22123), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((24676, 24688), 'django.utils.translation.ugettext_lazy', '_', (['"""Monitor"""'], {}), "('Monitor')\n", (24677, 24688), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((24924, 24961), 'django.utils.translation.ugettext_lazy', '_', (['"""Select a monitor template for %s"""'], {}), "('Select a monitor template for %s')\n", (24925, 24961), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((25180, 25244), 'openstack_dashboard.api.lbaas.pool_health_monitor_list', 'api.lbaas.pool_health_monitor_list', (['request'], {'tenant_id': 'tenant_id'}), '(request, tenant_id=tenant_id)\n', (25214, 25244), False, 'from openstack_dashboard import api\n'), ((26806, 26867), 'openstack_dashboard.api.lbaas.pool_monitor_association_create', 'api.lbaas.pool_monitor_association_create', (['request'], {}), '(request, **context)\n', (26847, 26867), False, 'from openstack_dashboard import api\n'), ((27128, 27140), 'django.utils.translation.ugettext_lazy', '_', (['"""Monitor"""'], {}), "('Monitor')\n", (27129, 27140), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((27393, 27427), 'django.utils.translation.ugettext_lazy', '_', (['"""Select a health monitor of %s"""'], {}), "('Select a health monitor of %s')\n", (27394, 27427), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((27591, 27634), 'openstack_dashboard.api.lbaas.pool_health_monitor_list', 'api.lbaas.pool_health_monitor_list', (['request'], {}), '(request)\n', (27625, 27634), False, 'from openstack_dashboard import api\n'), ((29181, 29242), 'openstack_dashboard.api.lbaas.pool_monitor_association_delete', 'api.lbaas.pool_monitor_association_delete', (['request'], {}), '(request, **context)\n', (29222, 29242), False, 'from openstack_dashboard import api\n'), ((2050, 2070), 'django.utils.translation.ugettext_lazy', '_', (['"""Select a Subnet"""'], {}), "('Select a Subnet')\n", (2051, 2070), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2596, 2618), 'django.utils.translation.ugettext_lazy', '_', (['"""Select a Protocol"""'], {}), "('Select a Protocol')\n", (2597, 2618), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2786, 2806), 'django.utils.translation.ugettext_lazy', '_', (['"""Select a Method"""'], {}), "('Select a Method')\n", (2787, 2806), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3088, 3122), 'openstack_dashboard.api.neutron.provider_list', 'api.neutron.provider_list', (['request'], {}), '(request)\n', (3113, 3122), False, 'from openstack_dashboard import api\n'), ((4103, 4151), 'django.utils.translation.ugettext_lazy', '_', (['"""Provider for Load Balancer is not supported"""'], {}), "('Provider for Load Balancer is not supported')\n", (4104, 4151), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4192, 4221), 'django.utils.translation.ugettext_lazy', '_', (['"""No provider is available"""'], {}), "('No provider is available')\n", (4193, 4221), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7727, 7747), 'django.utils.translation.ugettext_lazy', '_', (['"""Select a Subnet"""'], {}), "('Select a Subnet')\n", (7728, 7747), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((8266, 8288), 'django.utils.translation.ugettext_lazy', '_', (['"""Select a Protocol"""'], {}), "('Select a Protocol')\n", (8267, 8288), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((8466, 8493), 'django.utils.translation.ugettext_lazy', '_', (['"""No Session Persistence"""'], {}), "('No Session Persistence')\n", (8467, 8493), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((10518, 10565), 'openstack_dashboard.api.lbaas.pool_get', 'api.lbaas.pool_get', (['request', "context['pool_id']"], {}), "(request, context['pool_id'])\n", (10536, 10565), False, 'from openstack_dashboard import api\n'), ((13736, 13754), 'django.utils.translation.ugettext_lazy', '_', (['"""Select a Pool"""'], {}), "('Select a Pool')\n", (13737, 13754), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((15458, 15498), 'django.utils.translation.ugettext_lazy', '_', (['"""Member IP address must be specified"""'], {}), "('Member IP address must be specified')\n", (15459, 15498), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((17003, 17050), 'openstack_dashboard.api.lbaas.pool_get', 'api.lbaas.pool_get', (['request', "context['pool_id']"], {}), "(request, context['pool_id'])\n", (17021, 17050), False, 'from openstack_dashboard import api\n'), ((22464, 22496), 'django.utils.translation.ugettext_lazy', '_', (['"""Please choose a HTTP method"""'], {}), "('Please choose a HTTP method')\n", (22465, 22496), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((22618, 22644), 'django.utils.translation.ugettext_lazy', '_', (['"""Please specify an URL"""'], {}), "('Please specify an URL')\n", (22619, 22644), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((22769, 22886), 'django.utils.translation.ugettext_lazy', '_', (['"""Please enter a single value (e.g. 200), a list of values (e.g. 200, 202), or range of values (e.g. 200-204)"""'], {}), "('Please enter a single value (e.g. 200), a list of values (e.g. 200, 202), or range of values (e.g. 200-204)'\n )\n", (22770, 22886), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((25068, 25089), 'django.utils.translation.ugettext_lazy', '_', (['"""Select a Monitor"""'], {}), "('Select a Monitor')\n", (25069, 25089), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((27531, 27552), 'django.utils.translation.ugettext_lazy', '_', (['"""Select a Monitor"""'], {}), "('Select a Monitor')\n", (27532, 27552), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1708, 1715), 'django.utils.translation.ugettext_lazy', '_', (['"""UP"""'], {}), "('UP')\n", (1709, 1715), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1774, 1783), 'django.utils.translation.ugettext_lazy', '_', (['"""DOWN"""'], {}), "('DOWN')\n", (1775, 1783), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2260, 2298), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve networks list."""'], {}), "('Unable to retrieve networks list.')\n", (2261, 2298), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3392, 3431), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve providers list."""'], {}), "('Unable to retrieve providers list.')\n", (3393, 3431), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7388, 7395), 'django.utils.translation.ugettext_lazy', '_', (['"""UP"""'], {}), "('UP')\n", (7389, 7395), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7454, 7463), 'django.utils.translation.ugettext_lazy', '_', (['"""DOWN"""'], {}), "('DOWN')\n", (7455, 7463), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7937, 7975), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve networks list."""'], {}), "('Unable to retrieve networks list.')\n", (7938, 7975), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((10736, 10803), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve the specified pool. Unable to add VIP "%s"."""'], {}), '(\'Unable to retrieve the specified pool. Unable to add VIP "%s".\')\n', (10737, 10803), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((11679, 11712), 'django.utils.translation.ugettext_lazy', '_', (['"""Select from active instances"""'], {}), "('Select from active instances')\n", (11680, 11712), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((11751, 11781), 'django.utils.translation.ugettext_lazy', '_', (['"""Specify member IP address"""'], {}), "('Specify member IP address')\n", (11752, 11781), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((13438, 13445), 'django.utils.translation.ugettext_lazy', '_', (['"""UP"""'], {}), "('UP')\n", (13439, 13445), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((13504, 13513), 'django.utils.translation.ugettext_lazy', '_', (['"""DOWN"""'], {}), "('DOWN')\n", (13505, 13513), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((14010, 14045), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve pools list."""'], {}), "('Unable to retrieve pools list.')\n", (14011, 14045), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((14485, 14524), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve instances list."""'], {}), "('Unable to retrieve instances list.')\n", (14486, 14524), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((17166, 17209), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve the specified pool."""'], {}), "('Unable to retrieve the specified pool.')\n", (17167, 17209), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((17415, 17455), 'openstack_dashboard.api.neutron.port_list', 'api.neutron.port_list', (['request'], {}), '(request, **params)\n', (17436, 17455), False, 'from openstack_dashboard import api\n'), ((18721, 18764), 'openstack_dashboard.api.lbaas.member_create', 'api.lbaas.member_create', (['request'], {}), '(request, **context)\n', (18744, 18764), False, 'from openstack_dashboard import api\n'), ((19095, 19104), 'django.utils.translation.ugettext_lazy', '_', (['"""PING"""'], {}), "('PING')\n", (19096, 19104), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((19132, 19140), 'django.utils.translation.ugettext_lazy', '_', (['"""TCP"""'], {}), "('TCP')\n", (19133, 19140), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((19169, 19178), 'django.utils.translation.ugettext_lazy', '_', (['"""HTTP"""'], {}), "('HTTP')\n", (19170, 19178), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((19208, 19218), 'django.utils.translation.ugettext_lazy', '_', (['"""HTTPS"""'], {}), "('HTTPS')\n", (19209, 19218), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((20174, 20182), 'django.utils.translation.ugettext_lazy', '_', (['"""GET"""'], {}), "('GET')\n", (20175, 20182), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((21539, 21546), 'django.utils.translation.ugettext_lazy', '_', (['"""UP"""'], {}), "('UP')\n", (21540, 21546), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((21605, 21614), 'django.utils.translation.ugettext_lazy', '_', (['"""DOWN"""'], {}), "('DOWN')\n", (21606, 21614), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((24362, 24418), 'openstack_dashboard.api.lbaas.pool_health_monitor_create', 'api.lbaas.pool_health_monitor_create', (['request'], {}), '(request, **context)\n', (24398, 24418), False, 'from openstack_dashboard import api\n'), ((24535, 24562), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to add monitor."""'], {}), "('Unable to add monitor.')\n", (24536, 24562), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((25494, 25527), 'openstack_dashboard.dashboards.project.loadbalancers.utils.get_monitor_display_name', 'utils.get_monitor_display_name', (['m'], {}), '(m)\n', (25524, 25527), False, 'from openstack_dashboard.dashboards.project.loadbalancers import utils\n'), ((25691, 25729), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve monitors list."""'], {}), "('Unable to retrieve monitors list.')\n", (25692, 25729), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((26974, 27007), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to associate monitor."""'], {}), "('Unable to associate monitor.')\n", (26975, 27007), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((27822, 27855), 'openstack_dashboard.dashboards.project.loadbalancers.utils.get_monitor_display_name', 'utils.get_monitor_display_name', (['m'], {}), '(m)\n', (27852, 27855), False, 'from openstack_dashboard.dashboards.project.loadbalancers import utils\n'), ((28019, 28057), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve monitors list."""'], {}), "('Unable to retrieve monitors list.')\n", (28020, 28057), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((29349, 29385), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to disassociate monitor."""'], {}), "('Unable to disassociate monitor.')\n", (29350, 29385), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((12188, 12211), 'django.utils.translation.ugettext_lazy', '_', (['"""Member Instance(s)"""'], {}), "('Member Instance(s)')\n", (12189, 12211), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((12544, 12563), 'django.utils.translation.ugettext_lazy', '_', (['"""Member Address"""'], {}), "('Member Address')\n", (12545, 12563), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((20431, 20447), 'django.utils.translation.ugettext_lazy', '_', (['"""HTTP Method"""'], {}), "('HTTP Method')\n", (20432, 20447), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((20480, 20496), 'django.utils.translation.ugettext_lazy', '_', (['"""HTTP Method"""'], {}), "('HTTP Method')\n", (20481, 20496), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((20773, 20781), 'django.utils.translation.ugettext_lazy', '_', (['"""URL"""'], {}), "('URL')\n", (20774, 20781), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((20814, 20822), 'django.utils.translation.ugettext_lazy', '_', (['"""URL"""'], {}), "('URL')\n", (20815, 20822), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((21376, 21407), 'django.utils.translation.ugettext_lazy', '_', (['"""Expected HTTP Status Codes"""'], {}), "('Expected HTTP Status Codes')\n", (21377, 21407), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((21440, 21471), 'django.utils.translation.ugettext_lazy', '_', (['"""Expected HTTP Status Codes"""'], {}), "('Expected HTTP Status Codes')\n", (21441, 21471), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3994, 4011), 'django.utils.translation.ugettext_lazy', '_', (['"""%s (default)"""'], {}), "('%s (default)')\n", (3995, 4011), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((18395, 18438), 'openstack_dashboard.api.lbaas.member_create', 'api.lbaas.member_create', (['request'], {}), '(request, **context)\n', (18418, 18438), False, 'from openstack_dashboard import api\n')]
|