code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Waiter utility for api_lib.util.waiter.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.dataproc import exceptions
from googlecloudsdk.api_lib.dataproc import util
from googlecloudsdk.api_lib.dataproc.poller import (
abstract_operation_streamer_poller as dataproc_poller_base)
from googlecloudsdk.core import log
class BatchPoller(dataproc_poller_base.AbstractOperationStreamerPoller):
"""Poller for batch workload."""
def IsDone(self, batch):
"""See base class."""
if batch and batch.state in (
self.dataproc.messages.Batch.StateValueValuesEnum.SUCCEEDED,
self.dataproc.messages.Batch.StateValueValuesEnum.CANCELLED,
self.dataproc.messages.Batch.StateValueValuesEnum.FAILED):
return True
return False
def Poll(self, batch_ref):
"""See base class."""
request = (
self.dataproc.messages.DataprocProjectsLocationsBatchesGetRequest(
name=batch_ref))
try:
return self.dataproc.client.projects_locations_batches.Get(request)
except apitools_exceptions.HttpError as error:
log.warning('Get Batch failed:\n{}'.format(error))
if util.IsClientHttpException(error):
# Stop polling if encounter client Http error (4xx).
raise
def _GetResult(self, batch):
"""Handles errors.
Error handling for batch jobs. This happen after the batch reaches one of
the complete states.
Overrides.
Args:
batch: The batch resource.
Returns:
None. The result is directly output to log.err.
Raises:
JobTimeoutError: When waiter timed out.
JobError: When remote batch job is failed.
"""
if not batch:
# Batch resource is None but polling is considered done.
# This only happens when the waiter timed out.
raise exceptions.JobTimeoutError(
'Timed out while waiting for batch job.')
if (batch.state ==
self.dataproc.messages.Batch.StateValueValuesEnum.SUCCEEDED):
if not self.driver_log_streamer:
log.warning('Expected batch job output not found.')
elif self.driver_log_streamer.open:
# Remote output didn't end correctly.
log.warning('Batch job terminated, but output did not finish '
'streaming.')
elif (batch.state ==
self.dataproc.messages.Batch.StateValueValuesEnum.CANCELLED):
log.warning('Batch job is CANCELLED.')
else:
err_message = 'Batch job is FAILED.'
if batch.stateMessage:
err_message = '{} Detail: {}'.format(err_message, batch.stateMessage)
if err_message[-1] != '.':
err_message += '.'
raise exceptions.JobError(err_message)
# Nothing to return, since the result is directly output to users.
return None
def _GetOutputUri(self, batch):
"""See base class."""
if batch and batch.runtimeInfo and batch.runtimeInfo.outputUri:
return batch.runtimeInfo.outputUri
return None
| [
"googlecloudsdk.api_lib.dataproc.util.IsClientHttpException",
"googlecloudsdk.api_lib.dataproc.exceptions.JobError",
"googlecloudsdk.core.log.warning",
"googlecloudsdk.api_lib.dataproc.exceptions.JobTimeoutError"
] | [((2577, 2645), 'googlecloudsdk.api_lib.dataproc.exceptions.JobTimeoutError', 'exceptions.JobTimeoutError', (['"""Timed out while waiting for batch job."""'], {}), "('Timed out while waiting for batch job.')\n", (2603, 2645), False, 'from googlecloudsdk.api_lib.dataproc import exceptions\n'), ((1918, 1951), 'googlecloudsdk.api_lib.dataproc.util.IsClientHttpException', 'util.IsClientHttpException', (['error'], {}), '(error)\n', (1944, 1951), False, 'from googlecloudsdk.api_lib.dataproc import util\n'), ((2798, 2849), 'googlecloudsdk.core.log.warning', 'log.warning', (['"""Expected batch job output not found."""'], {}), "('Expected batch job output not found.')\n", (2809, 2849), False, 'from googlecloudsdk.core import log\n'), ((3146, 3184), 'googlecloudsdk.core.log.warning', 'log.warning', (['"""Batch job is CANCELLED."""'], {}), "('Batch job is CANCELLED.')\n", (3157, 3184), False, 'from googlecloudsdk.core import log\n'), ((3421, 3453), 'googlecloudsdk.api_lib.dataproc.exceptions.JobError', 'exceptions.JobError', (['err_message'], {}), '(err_message)\n', (3440, 3453), False, 'from googlecloudsdk.api_lib.dataproc import exceptions\n'), ((2946, 3019), 'googlecloudsdk.core.log.warning', 'log.warning', (['"""Batch job terminated, but output did not finish streaming."""'], {}), "('Batch job terminated, but output did not finish streaming.')\n", (2957, 3019), False, 'from googlecloudsdk.core import log\n')] |
#!/usr/bin/env python3
import turtle
t = turtle.Pen()
for x in range(400):
t.forward(x)
t.left(90)
input("press enter to exit")
| [
"turtle.Pen"
] | [((41, 53), 'turtle.Pen', 'turtle.Pen', ([], {}), '()\n', (51, 53), False, 'import turtle\n')] |
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
from datadog_checks.utils.common import get_docker_hostname
HERE = os.path.dirname(os.path.abspath(__file__))
# Networking
HOST = get_docker_hostname()
GITLAB_TEST_PASSWORD = "<PASSWORD>"
GITLAB_LOCAL_PORT = 8086
GITLAB_LOCAL_PROMETHEUS_PORT = 8088
PROMETHEUS_ENDPOINT = "http://{}:{}/metrics".format(HOST, GITLAB_LOCAL_PROMETHEUS_PORT)
GITLAB_URL = "http://{}:{}".format(HOST, GITLAB_LOCAL_PORT)
GITLAB_TAGS = ['gitlab_host:{}'.format(HOST), 'gitlab_port:{}'.format(GITLAB_LOCAL_PORT)]
CUSTOM_TAGS = ['optional:tag1']
# Note that this is a subset of the ones defined in GitlabCheck
# When we stand up a clean test infrastructure some of those metrics might not
# be available yet, hence we validate a stable subset
ALLOWED_METRICS = [
'process_max_fds',
'process_open_fds',
'process_resident_memory_bytes',
'process_start_time_seconds',
'process_virtual_memory_bytes',
]
CONFIG = {
'init_config': {'allowed_metrics': ALLOWED_METRICS},
'instances': [
{
'prometheus_endpoint': PROMETHEUS_ENDPOINT,
'gitlab_url': GITLAB_URL,
'disable_ssl_validation': True,
'tags': list(CUSTOM_TAGS),
}
],
}
BAD_CONFIG = {
'init_config': {'allowed_metrics': ALLOWED_METRICS},
'instances': [
{
'prometheus_endpoint': 'http://{}:1234/metrics'.format(HOST),
'gitlab_url': 'http://{}:1234/ci'.format(HOST),
'disable_ssl_validation': True,
'tags': list(CUSTOM_TAGS),
}
],
}
| [
"os.path.abspath",
"datadog_checks.utils.common.get_docker_hostname"
] | [((259, 280), 'datadog_checks.utils.common.get_docker_hostname', 'get_docker_hostname', ([], {}), '()\n', (278, 280), False, 'from datadog_checks.utils.common import get_docker_hostname\n'), ((211, 236), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (226, 236), False, 'import os\n')] |
from __future__ import annotations
import argparse
import bisect
import os
from .extension.extract import extract_omop
def extract_omop_program() -> None:
parser = argparse.ArgumentParser(
description="An extraction tool for OMOP v5 sources"
)
parser.add_argument(
"omop_source", type=str, help="Path of the folder to the omop source",
)
parser.add_argument(
"umls_location", type=str, help="The location of the umls directory",
)
parser.add_argument(
"gem_location", type=str, help="The location of the gem directory",
)
parser.add_argument(
"rxnorm_location",
type=str,
help="The location of the rxnorm directory",
)
parser.add_argument(
"target_location", type=str, help="The place to store the extract",
)
parser.add_argument(
"--delimiter",
type=str,
default=",",
help="The delimiter used in the raw OMOP source",
)
parser.add_argument(
"--ignore_quotes",
dest="use_quotes",
action="store_false",
help="Ignore quotes while parsing",
)
parser.add_argument(
"--use_quotes",
dest="use_quotes",
action="store_true",
help="Use quotes while parsing",
)
parser.set_defaults(use_quotes=True)
args = parser.parse_args()
print(args)
extract_omop(
args.omop_source,
args.umls_location,
args.gem_location,
args.rxnorm_location,
args.target_location,
args.delimiter,
args.use_quotes,
)
| [
"argparse.ArgumentParser"
] | [((172, 249), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""An extraction tool for OMOP v5 sources"""'}), "(description='An extraction tool for OMOP v5 sources')\n", (195, 249), False, 'import argparse\n')] |
# Generated by Django 3.2.7 on 2021-09-16 12:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("chains", "0026_chain_l2"),
]
operations = [
migrations.AddField(
model_name="chain",
name="description",
field=models.CharField(blank=True, max_length=255),
),
]
| [
"django.db.models.CharField"
] | [((327, 371), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(255)'}), '(blank=True, max_length=255)\n', (343, 371), False, 'from django.db import migrations, models\n')] |
import random
import time
print("\n\nWelcome..\n")
time.sleep(2)
print('''
_______
---' ____)
(_____)
(_____)
(____)
---.__(___)
''')
print('''
_______
---' ____)____
______)
_______)
_______)
---.__________)
''')
print('''
_______
---' ____)____
______)
__________)
(____)
---.__(___)
''')
print("\n\n RULES\n")
print('''
Rock wins against scissors.
Scissors win against paper.
Paper wins against rock.\n''')
# d = {0: 'rock', 1: 'paper', 2: 'scissor'}
while True:
while True:
my_turn = int(
input("Choose one among 0: rock, 1: paper or 2: scissor? "))
if my_turn == 0 or my_turn == 1 or my_turn == 2:
break
else:
print("Invalid input.")
continue
computer_turn = random.randint(0, 2)
win_dict = {
0: "SCISSORS",
1: "PAPER",
2: "ROCK",
}
my_win_option = win_dict[my_turn]
if my_turn == computer_turn:
print("It's a draw..")
elif my_turn == my_win_option:
print("You wins")
else:
print("Computer Wins")
if input("New Game? ('Yes') or ('No'): ").casefold() == 'no':
break
| [
"random.randint",
"time.sleep"
] | [((51, 64), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (61, 64), False, 'import time\n'), ((836, 856), 'random.randint', 'random.randint', (['(0)', '(2)'], {}), '(0, 2)\n', (850, 856), False, 'import random\n')] |
# -*- coding: utf-8 -*-
import json
from rest_framework import generics
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.parsers import MultiPartParser, FormParser
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from rest_framework.decorators import api_view, permission_classes
from .serializers import (
AllUserClientSerializer, ClientSerializer, ClientFolderSerializer,
DocumentDetailSerializer, FolderSerializer, DocumentInfoSerializer,
ClientSimpleSerializer, FolderSimpleSerializer
)
from documents.models import UserClient, Document, FolderClient, Log
from users.permissions import IsAdminDelete
from utils.helpers import RequestInfo, LargeResultsSetPagination
@api_view(['GET'])
def user_by_name(request):
from django.shortcuts import HttpResponse
from django.template import defaultfilters
name = defaultfilters.slugify(request.GET.get('name'))
try:
serializer = ClientSerializer(
UserClient.objects.filter(slug__icontains=name).order_by('name'),
many=True)
return HttpResponse(
json.dumps({
"results": serializer.data,
"count": len(serializer.data)}),
content_type='application/json',
status=200
)
except Exception:
return HttpResponse(
json.dumps({"results": [], "count": 0}),
content_type='application/json',
status=200
)
@api_view(['GET'])
def document_by_name(request):
from django.shortcuts import HttpResponse
from django.template import defaultfilters
name = defaultfilters.slugify(request.GET.get('name'))
try:
serializer = DocumentDetailSerializer(
Document.objects.filter(slug__icontains=name).order_by('name'),
many=True
)
return HttpResponse(
json.dumps({
"results": serializer.data,
"count": len(serializer.data)
}),
content_type='application/json',
status=200
)
except Exception:
return HttpResponse(
json.dumps({"results": [], "count": 0}),
content_type='application/json',
status=200
)
class UserListAPIView(generics.ListAPIView):
"""UserListAPIView
Args:
:param order: (str) newer or older
:param limit: (int) limit pagination per page, default 10
"""
authentication_class = (JSONWebTokenAuthentication,)
queryset = UserClient.objects.all()
serializer_class = AllUserClientSerializer
pagination_class = LargeResultsSetPagination
def get_queryset(self):
order = 'newer'
if self.request.query_params.get('order') is not None:
order = self.request.query_params.get('order')
if order == 'newer':
queryset = self.queryset.order_by('-created')
else:
queryset = self.queryset.order_by('created')
return queryset
class ClientListAPIView(generics.ListAPIView):
"""ClientListAPIView
Args:
:param order: (str) newer or older
:param limit: (int) limit pagination per page, default 10
"""
authentication_class = (JSONWebTokenAuthentication,)
queryset = UserClient.objects.all().order_by('name')
serializer_class = ClientSerializer
pagination_class = LargeResultsSetPagination
def get_queryset(self):
order = None
queryset = self.queryset
if self.request.query_params.get('order') is not None:
order = self.request.query_params.get('order')
if order == 'newer':
queryset = self.queryset.order_by('-created')
else:
queryset = self.queryset.order_by('created')
return queryset
class ClientFolderListAPIView(generics.ListAPIView):
"""ClientFolderListAPIView
Args:
:param name: (str) the name of the client
"""
authentication_class = (JSONWebTokenAuthentication,)
serializer_class = ClientFolderSerializer
def get_queryset(self):
queryset = UserClient.objects.all()
if self.request.query_params.get('name') is not None:
queryset = queryset.filter(
slug=self.request.query_params.get('name'))
else:
queryset = queryset
return queryset
class DocumentListAPIView(generics.ListAPIView):
"""DocumentListAPIView
Args:
:param folder: (str) the name of the folder
"""
authentication_class = (JSONWebTokenAuthentication,)
serializer_class = DocumentDetailSerializer
queryset = Document.objects.all()
def get_queryset(self):
if self.request.query_params.get('folder') is not None:
queryset = self.queryset.filter(
folder=FolderClient.objects.get(
slug=self.request.query_params.get('folder')
)
)
else:
queryset = queryset
return queryset
class UserClientDetailAPIView(APIView):
permission_classes = (IsAdminDelete, )
def get_object(self, pk):
"""get_object
Description:
Get UserClient object or None
Args:
:param pk: (int) UserClient's pk
"""
req_inf = RequestInfo()
try:
return UserClient.objects.get(pk=pk)
except UserClient.DoesNotExist as e:
return e.args[0]
def put(self, request, pk):
"""UserClientDetailAPIView put
Description:
update client information
"""
import pudb
pudb.set_trace()
req_inf = RequestInfo()
user_client = self.get_object(pk)
if isinstance(user_client, UserClient):
serializer = ClientSerializer(user_client, data=request.data)
if serializer.is_valid():
try:
serializer.save()
log = Log.objects.create(
action=Log.NOTIFICATION_TYPE.get_value(
'update_client'),
user=request.user,
description='Modificacion de cliente {} - {}'.format(
serializer.instance.id, serializer.instance.name
)
)
log.save()
return req_inf.status_200()
except Exception as e:
return req_inf.status_400(e.args[0])
return req_inf.status_400(serializer.errors)
else:
return req_inf.status_404(user_client)
def delete(self, request, pk=None):
"""UserClientDetailAPIView delete
Description:
delete clients
Args:
:param name: (str) client name
"""
req_inf = RequestInfo()
name = request.query_params.get('name', None)
if name is not None:
try:
client = UserClient.objects.get(slug=name)
folders = FolderClient.objects.filter(user=client)
last_id = client.id
last_name = client.name
for folder in folders:
documents = Document.objects.filter(folder=folder)
for doc in documents:
doc.document.delete()
doc.delete()
client.delete()
log = Log.objects.create(
action=Log.NOTIFICATION_TYPE.get_value(
'delete_client'),
user=request.user,
description='Eliminacion de cliente {} - {}'.format(
last_id, last_name)
)
log.save()
return req_inf.status_200()
except Exception as e:
return req_inf.status_400(e.args[0])
else:
return req_inf.status_400('Nombre de cliente requerido')
class UserClientAPIView(APIView):
def get(self, request):
"""UserClientAPIView get
Description:
Get client id
Args:
:param name: (str) client slug name
"""
req_inf = RequestInfo()
name = request.GET.get('name', None)
try:
serializer = ClientSimpleSerializer(
UserClient.objects.get(slug=name))
return Response(serializer.data)
except Exception as e:
return req_inf.status_400(e.args[0])
def post(self, request):
"""UserClientAPIView post
Description:
Create clients
Args:
:param name: (str) the name of the client
"""
req_inf = RequestInfo()
serializer = ClientSerializer(data=request.data)
if serializer.is_valid():
try:
serializer.save()
log = Log.objects.create(
action=Log.NOTIFICATION_TYPE.get_value(
'create_client'),
user=request.user,
description='Creacion de cliente {} - {}'.format(
serializer.instance.id, serializer.instance.name
)
)
log.save()
return req_inf.status_200()
except Exception as e:
return req_inf.status(e.args[0])
else:
return req_inf.status_400(serializer.errors)
class FolderAPIView(APIView):
def get(self, request):
"""FolderAPIView get
Description:
Get folder id
Args:
:param name: (str) folder slug name
"""
req_inf = RequestInfo()
name = request.GET.get('name', None)
if name is not None:
try:
serializer = FolderSimpleSerializer(
FolderClient.objects.get(slug=name))
return Response(serializer.data)
except Exception as e:
return req_inf.status_400(e.args[0])
else:
return req_inf.status_400('nombre de folder requerido')
def post(self, request):
"""FolderAPIView post
Description:
Create folders
Args:
:param name: (str) the name of the folder
:param user: (int) user id
"""
req_inf = RequestInfo()
serializer = FolderSerializer(data=request.data)
if serializer.is_valid():
try:
serializer.save()
log = Log.objects.create(
action=Log.NOTIFICATION_TYPE.get_value(
'create_folder'),
user=request.user,
description='Creacion de folder {} - {}'.format(
serializer.instance.id, serializer.instance.name
)
)
log.save()
return req_inf.status_200()
except Exception as e:
return req_inf.status_400(e.args[0])
else:
return req_inf.status_400(serializer.errors)
class FolderClientAPIView(APIView):
permission_classes = (IsAdminDelete, )
def get_object(self, pk):
"""get_object
Description:
Get FolderClient object or None
Args:
:param pk: (int) FolderClient's pk
"""
req_inf = RequestInfo()
try:
return FolderClient.objects.get(pk=pk)
except FolderClient.DoesNotExist as e:
return e.args[0]
def put(self, request, pk=None):
"""FolderClientAPIView put
Description:
update client information
"""
req_inf = RequestInfo()
folder_client = self.get_object(pk)
if isinstance(folder_client, FolderClient):
serializer = FolderSerializer(folder_client, data=request.data)
if serializer.is_valid():
serializer.save()
log = Log.objects.create(
action=Log.NOTIFICATION_TYPE.get_value(
'update_folder'),
user=request.user,
description='Modificacion de folder {} - {}'.format(
serializer.instance.id, serializer.instance.name
)
)
log.save()
return req_inf.status_200()
return req_inf.status_400(serializer.errors)
else:
return req_inf.status_404(folder_client)
def delete(self, request, pk=None):
"""FolderClientAPIView delete
Description:
delete folder
Args:
:param name: (str) folder name
"""
req_inf = RequestInfo()
name = request.query_params.get('name', None)
if name is not None:
try:
folder = FolderClient.objects.get(slug=name)
documents = Document.objects.filter(folder=folder)
last_id = folder.id
last_name = folder.name
for doc in documents:
doc.document.delete()
doc.delete()
folder.delete()
log = Log.objects.create(
action=Log.NOTIFICATION_TYPE.get_value(
'update_client'),
user=request.user,
description='Modificacion de cliente {} - {}'.format(
last_id, last_name
)
)
log.save()
return req_inf.status_200()
except Exception as e:
return req_inf.status_400(e.args[0])
else:
return req_inf.status_400('Nombre de folder requerido')
class DocumentAPIView(APIView):
parser_classes = (MultiPartParser, FormParser)
def post(self, request):
"""DocumentAPIView post
Description:
Create Documents
Args:
:param name: (str) the name of the document
:param document: (file) document file
:param folder: (id) folder id
"""
req_inf = RequestInfo()
serializer = DocumentInfoSerializer(data=request.data)
if serializer.is_valid():
try:
serializer.save()
log = Log.objects.create(
action=Log.NOTIFICATION_TYPE.get_value(
'create_document'),
user=request.user,
description='Creacion de Documento {} - {}'.format(
serializer.instance.id, serializer.instance.name
)
)
log.save()
return req_inf.status_200()
except Exception as e:
return req_inf.status_400(e.args[0])
else:
return req_inf.status_400(serializer.errors)
class DocumentDetailAPIView(APIView):
permission_classes = (IsAdminDelete, )
def get_object(self, pk):
"""get_object
Description:
Get Document object or None
Args:
:param pk: (int) Document's pk
"""
req_inf = RequestInfo()
try:
return Document.objects.get(pk=pk)
except Document.DoesNotExist as e:
return e.args[0]
def put(self, request, pk):
"""DocumentDetailAPIView put
Description:
update document information
"""
req_inf = RequestInfo()
document_cls = self.get_object(pk)
if isinstance(document_cls, Document):
serializer = DocumentInfoSerializer(
document_cls,
data=request.data
)
if serializer.is_valid():
try:
serializer.save()
log = Log.objects.create(
action=Log.NOTIFICATION_TYPE.get_value(
'update_document'),
user=request.user,
description='Modificacion de documento {} - {}'.format(
serializer.instance.id, serializer.instance.name
)
)
log.save()
return req_inf.status_200()
except Exception as e:
return req_inf.status_400(e.args[0])
return req_inf.status_400(serializer.errors)
else:
return req_inf.status_404(document_cls)
def delete(self, request, pk=None):
"""DocumentDetailAPIView delete
Description:
Delete Documents
Args:
:param name: (str) the name of the document
"""
req_inf = RequestInfo()
name = request.query_params.get('name', None)
if name is not None:
try:
document = Document.objects.get(slug=name)
document.document.delete()
last_id = document.id
last_name = document.name
document.delete()
log = Log.objects.create(
action=Log.NOTIFICATION_TYPE.get_value(
'delete_document'),
user=request.user,
description='Eliminacion de documento {}- {}'.format(
las_id, last_name
)
)
log.save()
return req_inf.status_200()
except Exception as e:
return req_inf.status_400(e.args[0])
else:
return req_inf.status_400('Nombre de documento requerido')
| [
"documents.models.Document.objects.get",
"documents.models.FolderClient.objects.filter",
"utils.helpers.RequestInfo",
"documents.models.Document.objects.filter",
"documents.models.UserClient.objects.get",
"documents.models.UserClient.objects.all",
"pudb.set_trace",
"json.dumps",
"documents.models.Do... | [((765, 782), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (773, 782), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((1521, 1538), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (1529, 1538), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((2575, 2599), 'documents.models.UserClient.objects.all', 'UserClient.objects.all', ([], {}), '()\n', (2597, 2599), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((4690, 4712), 'documents.models.Document.objects.all', 'Document.objects.all', ([], {}), '()\n', (4710, 4712), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((4165, 4189), 'documents.models.UserClient.objects.all', 'UserClient.objects.all', ([], {}), '()\n', (4187, 4189), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((5357, 5370), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (5368, 5370), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((5678, 5694), 'pudb.set_trace', 'pudb.set_trace', ([], {}), '()\n', (5692, 5694), False, 'import pudb\n'), ((5713, 5726), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (5724, 5726), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((6905, 6918), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (6916, 6918), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((8284, 8297), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (8295, 8297), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((8791, 8804), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (8802, 8804), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((9767, 9780), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (9778, 9780), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((10446, 10459), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (10457, 10459), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((11487, 11500), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (11498, 11500), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((11803, 11816), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (11814, 11816), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((12838, 12851), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (12849, 12851), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((14272, 14285), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (14283, 14285), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((15318, 15331), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (15329, 15331), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((15625, 15638), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (15636, 15638), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((16888, 16901), 'utils.helpers.RequestInfo', 'RequestInfo', ([], {}), '()\n', (16899, 16901), False, 'from utils.helpers import RequestInfo, LargeResultsSetPagination\n'), ((3326, 3350), 'documents.models.UserClient.objects.all', 'UserClient.objects.all', ([], {}), '()\n', (3348, 3350), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((5403, 5432), 'documents.models.UserClient.objects.get', 'UserClient.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (5425, 5432), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((8475, 8500), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (8483, 8500), False, 'from rest_framework.response import Response\n'), ((11533, 11564), 'documents.models.FolderClient.objects.get', 'FolderClient.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (11557, 11564), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((15364, 15391), 'documents.models.Document.objects.get', 'Document.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (15384, 15391), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((1399, 1438), 'json.dumps', 'json.dumps', (["{'results': [], 'count': 0}"], {}), "({'results': [], 'count': 0})\n", (1409, 1438), False, 'import json\n'), ((2187, 2226), 'json.dumps', 'json.dumps', (["{'results': [], 'count': 0}"], {}), "({'results': [], 'count': 0})\n", (2197, 2226), False, 'import json\n'), ((7044, 7077), 'documents.models.UserClient.objects.get', 'UserClient.objects.get', ([], {'slug': 'name'}), '(slug=name)\n', (7066, 7077), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((7104, 7144), 'documents.models.FolderClient.objects.filter', 'FolderClient.objects.filter', ([], {'user': 'client'}), '(user=client)\n', (7131, 7144), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((8421, 8454), 'documents.models.UserClient.objects.get', 'UserClient.objects.get', ([], {'slug': 'name'}), '(slug=name)\n', (8443, 8454), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((10005, 10030), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (10013, 10030), False, 'from rest_framework.response import Response\n'), ((12977, 13012), 'documents.models.FolderClient.objects.get', 'FolderClient.objects.get', ([], {'slug': 'name'}), '(slug=name)\n', (13001, 13012), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((13041, 13079), 'documents.models.Document.objects.filter', 'Document.objects.filter', ([], {'folder': 'folder'}), '(folder=folder)\n', (13064, 13079), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((17029, 17060), 'documents.models.Document.objects.get', 'Document.objects.get', ([], {'slug': 'name'}), '(slug=name)\n', (17049, 17060), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((1022, 1069), 'documents.models.UserClient.objects.filter', 'UserClient.objects.filter', ([], {'slug__icontains': 'name'}), '(slug__icontains=name)\n', (1047, 1069), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((1790, 1835), 'documents.models.Document.objects.filter', 'Document.objects.filter', ([], {'slug__icontains': 'name'}), '(slug__icontains=name)\n', (1813, 1835), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((7292, 7330), 'documents.models.Document.objects.filter', 'Document.objects.filter', ([], {'folder': 'folder'}), '(folder=folder)\n', (7315, 7330), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((9945, 9980), 'documents.models.FolderClient.objects.get', 'FolderClient.objects.get', ([], {'slug': 'name'}), '(slug=name)\n', (9969, 9980), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((7557, 7605), 'documents.models.Log.NOTIFICATION_TYPE.get_value', 'Log.NOTIFICATION_TYPE.get_value', (['"""delete_client"""'], {}), "('delete_client')\n", (7588, 7605), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((9016, 9064), 'documents.models.Log.NOTIFICATION_TYPE.get_value', 'Log.NOTIFICATION_TYPE.get_value', (['"""create_client"""'], {}), "('create_client')\n", (9047, 9064), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((10671, 10719), 'documents.models.Log.NOTIFICATION_TYPE.get_value', 'Log.NOTIFICATION_TYPE.get_value', (['"""create_folder"""'], {}), "('create_folder')\n", (10702, 10719), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((12130, 12178), 'documents.models.Log.NOTIFICATION_TYPE.get_value', 'Log.NOTIFICATION_TYPE.get_value', (['"""update_folder"""'], {}), "('update_folder')\n", (12161, 12178), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((13370, 13418), 'documents.models.Log.NOTIFICATION_TYPE.get_value', 'Log.NOTIFICATION_TYPE.get_value', (['"""update_client"""'], {}), "('update_client')\n", (13401, 13418), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((14503, 14553), 'documents.models.Log.NOTIFICATION_TYPE.get_value', 'Log.NOTIFICATION_TYPE.get_value', (['"""create_document"""'], {}), "('create_document')\n", (14534, 14553), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((17287, 17337), 'documents.models.Log.NOTIFICATION_TYPE.get_value', 'Log.NOTIFICATION_TYPE.get_value', (['"""delete_document"""'], {}), "('delete_document')\n", (17318, 17337), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((6065, 6113), 'documents.models.Log.NOTIFICATION_TYPE.get_value', 'Log.NOTIFICATION_TYPE.get_value', (['"""update_client"""'], {}), "('update_client')\n", (6096, 6113), False, 'from documents.models import UserClient, Document, FolderClient, Log\n'), ((16030, 16080), 'documents.models.Log.NOTIFICATION_TYPE.get_value', 'Log.NOTIFICATION_TYPE.get_value', (['"""update_document"""'], {}), "('update_document')\n", (16061, 16080), False, 'from documents.models import UserClient, Document, FolderClient, Log\n')] |
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from billing.utils import initiate_transaction
PAYTM_MERCHANT_ID = 'SNeEfa79194346659805'
PAYTM_MERCHANT_KEY = '<KEY>'
def initiate(request):
order_id = request.session['order_id']
response = initiate_transaction(order_id)
context = {
'mid': PAYTM_MERCHANT_ID,
'order_id': 2 * order_id,
'txn_token': response['body']['txnToken'],
}
return render(request, 'billing/show_payments.html', context)
@csrf_exempt
def processing(request):
return render(request, 'billing/transaction_in_process.html')
| [
"django.shortcuts.render",
"billing.utils.initiate_transaction"
] | [((293, 323), 'billing.utils.initiate_transaction', 'initiate_transaction', (['order_id'], {}), '(order_id)\n', (313, 323), False, 'from billing.utils import initiate_transaction\n'), ((476, 530), 'django.shortcuts.render', 'render', (['request', '"""billing/show_payments.html"""', 'context'], {}), "(request, 'billing/show_payments.html', context)\n", (482, 530), False, 'from django.shortcuts import render\n'), ((582, 636), 'django.shortcuts.render', 'render', (['request', '"""billing/transaction_in_process.html"""'], {}), "(request, 'billing/transaction_in_process.html')\n", (588, 636), False, 'from django.shortcuts import render\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
# TODO: использовать http://www.cbr.ru/scripts/Root.asp?PrtId=SXML или разобраться с данными от query.yahooapis.com
# непонятны некоторые параметры
# TODO: сделать консоль
# TODO: сделать гуй
# TODO: сделать сервер
import requests
rs = requests.get('https://query.yahooapis.com/v1/public/yql?q=select+*+from+yahoo.finance.xchange+where+pair+=+%22USDRUB,EURRUB%22&format=json&env=store%3A%2F%2Fdatatables.org%2Falltableswithkeys&callback=')
print(rs.json())
for rate in rs.json()['query']['results']['rate']:
print(rate['Name'], rate['Rate'])
| [
"requests.get"
] | [((311, 524), 'requests.get', 'requests.get', (['"""https://query.yahooapis.com/v1/public/yql?q=select+*+from+yahoo.finance.xchange+where+pair+=+%22USDRUB,EURRUB%22&format=json&env=store%3A%2F%2Fdatatables.org%2Falltableswithkeys&callback="""'], {}), "(\n 'https://query.yahooapis.com/v1/public/yql?q=select+*+from+yahoo.finance.xchange+where+pair+=+%22USDRUB,EURRUB%22&format=json&env=store%3A%2F%2Fdatatables.org%2Falltableswithkeys&callback='\n )\n", (323, 524), False, 'import requests\n')] |
import abc
import logging
from datetime import datetime
from .log_adapter import adapt_log
LOGGER = logging.getLogger(__name__)
class RunnerWrapper(abc.ABC):
""" Runner wrapper class """
log = adapt_log(LOGGER, 'RunnerWrapper')
def __init__(self, func_runner, runner_id, key, tracker, log_exception=True):
""" Runner wrapper initializer
Args:
func_runner (FuncRunner): FuncRunner instance
runner_id (int): runner id
key (str): key to store the function output in output dict
tracker (dict): tracker dict
"""
self.func_runner = func_runner
self.id = runner_id
self.tracker = tracker
self.log_exception = log_exception
self.key = key
self.runner = None
self.__initialize_tracker()
def __str__(self):
return "<RunnerWrapper %s[#%s] %s>" % (self.key, self.id, self.func_runner)
def __initialize_tracker(self):
self.tracker[self.key] = dict()
def __update_tracker(self, started, finished, output, got_error, error):
""" Updates status in output dict """
self.tracker[self.key] = {
"started_time": started,
"finished_time": finished,
"execution_time": (finished - started).total_seconds(),
"output": output,
"got_error": got_error,
"error": error
}
def is_tracker_updated(self):
return True if self.tracker[self.key] else False
def run(self):
""" Runs function runner """
output, error, got_error = None, None, False
started = datetime.now()
try:
output = self.func_runner.run()
except Exception as e:
got_error = True
error = str(e)
if self.log_exception:
self.log.exception("Encountered an exception on {} {}".format(self, e))
finally:
finished = datetime.now()
self.__update_tracker(started, finished, output, got_error, error)
def join(self):
self.runner.join()
@abc.abstractmethod
def start(self):
""" Starts runner thread """
pass
@abc.abstractmethod
def is_running(self):
""" Returns True if runner is active else False """
pass
| [
"logging.getLogger",
"datetime.datetime.now"
] | [((102, 129), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (119, 129), False, 'import logging\n'), ((1642, 1656), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1654, 1656), False, 'from datetime import datetime\n'), ((1965, 1979), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1977, 1979), False, 'from datetime import datetime\n')] |
import scipy.stats as stat
import pandas as pd
import plotly.graph_objs as go
from hidrocomp.graphics.distribution_build import DistributionBuild
class GenPareto(DistributionBuild):
def __init__(self, title, shape, location, scale):
super().__init__(title, shape, location, scale)
def cumulative(self):
datas = self._data('cumulative')
data = [go.Scatter(x=datas['peaks'], y=datas['Cumulative'],
name=self.title, line=dict(color='rgb(128, 128, 128)',
width=2))]
bandxaxis = go.layout.XAxis(title="Vazão(m³/s)")
bandyaxis = go.layout.YAxis(title="Probabilidade")
layout = dict(title="GP - Acumulada: %s" % self.title,
showlegend=True,
width=945, height=827,
xaxis=bandxaxis,
yaxis=bandyaxis,
font=dict(family='Time New Roman', size=28, color='rgb(0,0,0)')
)
fig = dict(data=data, layout=layout)
return fig, data
def density(self):
datas = self._data('density')
data = [go.Scatter(x=datas['peaks'], y=datas['Density'],
name=self.title, line=dict(color='rgb(128, 128, 128)',
width=2))]
bandxaxis = go.layout.XAxis(title="Vazão(m³/s)")
bandyaxis = go.layout.YAxis(title="")
layout = dict(title="GP - Densidade: %s" % self.title,
showlegend=True,
width=945, height=827,
xaxis=bandxaxis,
yaxis=bandyaxis,
font=dict(family='Time New Roman', size=28, color='rgb(0,0,0)')
)
fig = dict(data=data, layout=layout)
return fig, data
def _data_density(self):
cumulative = self._data_cumulative()
density = stat.genpareto.pdf(cumulative['peaks'].values, self.shape,
loc=self.location, scale=self.scale)
dic = {'peaks': cumulative['peaks'].values, 'Density': density}
return pd.DataFrame(dic)
def _data_cumulative(self):
probability = list()
for i in range(1, 1000):
probability.append(i/1000)
quantiles = stat.genpareto.ppf(probability, self.shape,
loc=self.location,
scale=self.scale)
dic = {'peaks': quantiles, 'Cumulative': probability}
return pd.DataFrame(dic)
| [
"plotly.graph_objs.layout.XAxis",
"scipy.stats.genpareto.pdf",
"scipy.stats.genpareto.ppf",
"pandas.DataFrame",
"plotly.graph_objs.layout.YAxis"
] | [((602, 638), 'plotly.graph_objs.layout.XAxis', 'go.layout.XAxis', ([], {'title': '"""Vazão(m³/s)"""'}), "(title='Vazão(m³/s)')\n", (617, 638), True, 'import plotly.graph_objs as go\n'), ((659, 697), 'plotly.graph_objs.layout.YAxis', 'go.layout.YAxis', ([], {'title': '"""Probabilidade"""'}), "(title='Probabilidade')\n", (674, 697), True, 'import plotly.graph_objs as go\n'), ((1401, 1437), 'plotly.graph_objs.layout.XAxis', 'go.layout.XAxis', ([], {'title': '"""Vazão(m³/s)"""'}), "(title='Vazão(m³/s)')\n", (1416, 1437), True, 'import plotly.graph_objs as go\n'), ((1458, 1483), 'plotly.graph_objs.layout.YAxis', 'go.layout.YAxis', ([], {'title': '""""""'}), "(title='')\n", (1473, 1483), True, 'import plotly.graph_objs as go\n'), ((1985, 2085), 'scipy.stats.genpareto.pdf', 'stat.genpareto.pdf', (["cumulative['peaks'].values", 'self.shape'], {'loc': 'self.location', 'scale': 'self.scale'}), "(cumulative['peaks'].values, self.shape, loc=self.\n location, scale=self.scale)\n", (2003, 2085), True, 'import scipy.stats as stat\n'), ((2207, 2224), 'pandas.DataFrame', 'pd.DataFrame', (['dic'], {}), '(dic)\n', (2219, 2224), True, 'import pandas as pd\n'), ((2380, 2465), 'scipy.stats.genpareto.ppf', 'stat.genpareto.ppf', (['probability', 'self.shape'], {'loc': 'self.location', 'scale': 'self.scale'}), '(probability, self.shape, loc=self.location, scale=self.scale\n )\n', (2398, 2465), True, 'import scipy.stats as stat\n'), ((2618, 2635), 'pandas.DataFrame', 'pd.DataFrame', (['dic'], {}), '(dic)\n', (2630, 2635), True, 'import pandas as pd\n')] |
import traceback
import copy
import gc
from ctypes import c_void_p
import itertools
import array
import math
import numpy as np
from OpenGL.GL import *
from PyEngine3D.Common import logger
from PyEngine3D.Utilities import Singleton, GetClassName, Attributes, Profiler
from PyEngine3D.OpenGLContext import OpenGLContext
def get_numpy_dtype(data_type):
if GL_BYTE == data_type:
return np.int8
elif GL_UNSIGNED_BYTE == data_type:
return np.uint8
elif GL_UNSIGNED_BYTE == data_type:
return np.uint8
elif GL_SHORT == data_type:
return np.int16
elif GL_UNSIGNED_SHORT == data_type:
return np.uint16
elif GL_INT == data_type:
return np.int32
elif GL_UNSIGNED_INT == data_type:
return np.uint32
elif GL_UNSIGNED_INT64 == data_type:
return np.uint64
elif GL_FLOAT == data_type:
return np.float32
elif GL_DOUBLE == data_type:
return np.float64
logger.error('Cannot convert to numpy dtype. UNKOWN DATA TYPE(%s)', data_type)
return np.uint8
def get_internal_format(str_image_mode):
if str_image_mode == "RGBA":
return GL_RGBA8
elif str_image_mode == "RGB":
return GL_RGB8
elif str_image_mode == "L" or str_image_mode == "P" or str_image_mode == "R":
return GL_R8
else:
logger.error("get_internal_format::unknown image mode ( %s )" % str_image_mode)
return GL_RGBA8
def get_texture_format(str_image_mode):
if str_image_mode == "RGBA":
# R,G,B,A order. GL_BGRA is faster than GL_RGBA
return GL_RGBA # GL_BGRA
elif str_image_mode == "RGB":
return GL_RGB
elif str_image_mode == "L" or str_image_mode == "P" or str_image_mode == "R":
return GL_RED
else:
logger.error("get_texture_format::unknown image mode ( %s )" % str_image_mode)
return GL_RGBA
def get_image_mode(texture_internal_format):
if texture_internal_format in (GL_RGBA, GL_BGRA):
return "RGBA"
elif texture_internal_format in (GL_RGB, GL_BGR):
return "RGB"
elif texture_internal_format == GL_RG:
return "RG"
elif texture_internal_format in (GL_R8, GL_R16F, GL_RED, GL_DEPTH_STENCIL, GL_DEPTH_COMPONENT):
return "R"
elif texture_internal_format == GL_LUMINANCE:
return "L"
else:
logger.error("get_image_mode::unknown image format ( %s )" % texture_internal_format)
return "RGBA"
def CreateTexture(**texture_datas):
texture_class = texture_datas.get('texture_type', Texture2D)
if texture_class is not None:
if type(texture_class) is str:
texture_class = eval(texture_class)
return texture_class(**texture_datas)
return None
class Texture:
target = GL_TEXTURE_2D
default_wrap = GL_REPEAT
use_glTexStorage = False
def __init__(self, **texture_data):
self.name = texture_data.get('name')
self.attachment = False
self.image_mode = "RGBA"
self.internal_format = GL_RGBA8
self.texture_format = GL_RGBA
self.sRGB = False
self.clear_color = None
self.multisample_count = 0
self.width = 0
self.height = 0
self.depth = 1
self.data_type = GL_UNSIGNED_BYTE
self.min_filter = GL_LINEAR_MIPMAP_LINEAR
self.mag_filter = GL_LINEAR
self.enable_mipmap = False
self.wrap = self.default_wrap
self.wrap_s = self.default_wrap
self.wrap_t = self.default_wrap
self.wrap_r = self.default_wrap
self.buffer = -1
self.sampler_handle = -1
self.attribute = Attributes()
self.create_texture(**texture_data)
def create_texture(self, **texture_data):
if self.buffer != -1:
self.delete()
self.attachment = False
self.image_mode = texture_data.get('image_mode')
self.internal_format = texture_data.get('internal_format')
self.texture_format = texture_data.get('texture_format')
self.sRGB = texture_data.get('sRGB', False)
self.clear_color = texture_data.get('clear_color')
self.multisample_count = 0
if self.internal_format is None and self.image_mode:
self.internal_format = get_internal_format(self.image_mode)
if self.texture_format is None and self.image_mode:
self.texture_format = get_texture_format(self.image_mode)
if self.image_mode is None and self.texture_format:
self.image_mode = get_image_mode(self.texture_format)
# Convert to sRGB
if self.sRGB:
if self.internal_format == GL_RGB:
self.internal_format = GL_SRGB8
elif self.internal_format == GL_RGBA:
self.internal_format = GL_SRGB8_ALPHA8
if GL_RGBA == self.internal_format:
self.internal_format = GL_RGBA8
if GL_RGB == self.internal_format:
self.internal_format = GL_RGB8
self.width = int(texture_data.get('width', 0))
self.height = int(texture_data.get('height', 0))
self.depth = int(max(1, texture_data.get('depth', 1)))
self.data_type = texture_data.get('data_type', GL_UNSIGNED_BYTE)
self.min_filter = texture_data.get('min_filter', GL_LINEAR_MIPMAP_LINEAR)
self.mag_filter = texture_data.get('mag_filter', GL_LINEAR) # GL_LINEAR_MIPMAP_LINEAR, GL_LINEAR, GL_NEAREST
mipmap_filters = (GL_LINEAR_MIPMAP_LINEAR, GL_LINEAR_MIPMAP_NEAREST,
GL_NEAREST_MIPMAP_LINEAR, GL_NEAREST_MIPMAP_NEAREST)
self.enable_mipmap = self.min_filter in mipmap_filters
if self.target == GL_TEXTURE_2D_MULTISAMPLE:
self.enable_mipmap = False
self.wrap = texture_data.get('wrap', self.default_wrap) # GL_REPEAT, GL_CLAMP
self.wrap_s = texture_data.get('wrap_s')
self.wrap_t = texture_data.get('wrap_t')
self.wrap_r = texture_data.get('wrap_r')
self.buffer = -1
self.sampler_handle = -1
# texture parameter overwrite
# self.sampler_handle = glGenSamplers(1)
# glSamplerParameteri(self.sampler_handle, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE)
# glBindSampler(0, self.sampler_handle)
logger.info("Create %s : %s %dx%dx%d %s mipmap(%s)." % (
GetClassName(self), self.name, self.width, self.height, self.depth, str(self.internal_format),
'Enable' if self.enable_mipmap else 'Disable'))
self.attribute = Attributes()
def __del__(self):
pass
def delete(self):
logger.info("Delete %s : %s" % (GetClassName(self), self.name))
glDeleteTextures([self.buffer, ])
self.buffer = -1
def get_texture_info(self):
return dict(
texture_type=self.__class__.__name__,
width=self.width,
height=self.height,
depth=self.depth,
image_mode=self.image_mode,
internal_format=self.internal_format,
texture_format=self.texture_format,
data_type=self.data_type,
min_filter=self.min_filter,
mag_filter=self.mag_filter,
wrap=self.wrap,
wrap_s=self.wrap_s,
wrap_t=self.wrap_t,
wrap_r=self.wrap_r,
)
def get_save_data(self):
save_data = self.get_texture_info()
data = self.get_image_data()
if data is not None:
save_data['data'] = data
return save_data
def get_mipmap_size(self, level=0):
if 0 < level:
divider = 2.0 ** level
width = max(1, int(self.width / divider))
height = max(1, int(self.height / divider))
return width, height
return self.width, self.height
def get_image_data(self, level=0):
if self.target not in (GL_TEXTURE_2D, GL_TEXTURE_2D_ARRAY, GL_TEXTURE_3D):
return None
level = min(level, self.get_mipmap_count())
dtype = get_numpy_dtype(self.data_type)
try:
glBindTexture(self.target, self.buffer)
data = OpenGLContext.glGetTexImage(self.target, level, self.texture_format, self.data_type)
# convert to numpy array
if type(data) is bytes:
data = np.fromstring(data, dtype=dtype)
else:
data = np.array(data, dtype=dtype)
glBindTexture(self.target, 0)
return data
except:
logger.error(traceback.format_exc())
logger.error('%s failed to get image data.' % self.name)
logger.info('Try to glReadPixels.')
glBindTexture(self.target, self.buffer)
fb = glGenFramebuffers(1)
glBindFramebuffer(GL_FRAMEBUFFER, fb)
data = []
for layer in range(self.depth):
if GL_TEXTURE_2D == self.target:
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, self.buffer, level)
elif GL_TEXTURE_3D == self.target:
glFramebufferTexture3D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_3D, self.buffer, level, layer)
elif GL_TEXTURE_2D_ARRAY == self.target:
glFramebufferTextureLayer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, self.buffer, level, layer)
glReadBuffer(GL_COLOR_ATTACHMENT0)
width, height = self.get_mipmap_size(level)
pixels = glReadPixels(0, 0, width, height, self.texture_format, self.data_type)
# convert to numpy array
if type(pixels) is bytes:
pixels = np.fromstring(pixels, dtype=dtype)
data.append(pixels)
data = np.array(data, dtype=dtype)
glBindTexture(self.target, 0)
glBindFramebuffer(GL_FRAMEBUFFER, 0)
glDeleteFramebuffers(1, [fb, ])
return data
def get_mipmap_count(self):
factor = max(max(self.width, self.height), self.depth)
return math.floor(math.log2(factor)) + 1
def generate_mipmap(self):
if self.enable_mipmap:
glBindTexture(self.target, self.buffer)
glGenerateMipmap(self.target)
else:
logger.warn('%s disable to generate mipmap.' % self.name)
def texure_wrap(self, wrap):
glTexParameteri(self.target, GL_TEXTURE_WRAP_S, wrap)
glTexParameteri(self.target, GL_TEXTURE_WRAP_T, wrap)
glTexParameteri(self.target, GL_TEXTURE_WRAP_R, wrap)
def bind_texture(self, wrap=None):
if self.buffer == -1:
logger.warn("%s texture is invalid." % self.name)
return
glBindTexture(self.target, self.buffer)
if wrap is not None:
self.texure_wrap(wrap)
def bind_image(self, image_unit, level=0, access=GL_READ_WRITE):
if self.buffer == -1:
logger.warn("%s texture is invalid." % self.name)
return
# flag : GL_READ_WRITE, GL_WRITE_ONLY, GL_READ_ONLY
glBindImageTexture(image_unit, self.buffer, level, GL_FALSE, 0, access, self.internal_format)
def is_attached(self):
return self.attachment
def set_attachment(self, attachment):
self.attachment = attachment
def get_attribute(self):
self.attribute.set_attribute("name", self.name)
self.attribute.set_attribute("target", self.target)
self.attribute.set_attribute("width", self.width)
self.attribute.set_attribute("height", self.height)
self.attribute.set_attribute("depth", self.depth)
self.attribute.set_attribute("image_mode", self.image_mode)
self.attribute.set_attribute("internal_format", self.internal_format)
self.attribute.set_attribute("texture_format", self.texture_format)
self.attribute.set_attribute("data_type", self.data_type)
self.attribute.set_attribute("min_filter", self.min_filter)
self.attribute.set_attribute("mag_filter", self.mag_filter)
self.attribute.set_attribute("multisample_count", self.multisample_count)
self.attribute.set_attribute("wrap", self.wrap)
self.attribute.set_attribute("wrap_s", self.wrap_s)
self.attribute.set_attribute("wrap_t", self.wrap_t)
self.attribute.set_attribute("wrap_r", self.wrap_r)
return self.attribute
def set_attribute(self, attribute_name, attribute_value, item_info_history, attribute_index):
if hasattr(self, attribute_name) and "" != attribute_value:
setattr(self, attribute_name, eval(attribute_value))
if 'wrap' in attribute_name:
glBindTexture(self.target, self.buffer)
glTexParameteri(self.target, GL_TEXTURE_WRAP_S, self.wrap_s or self.wrap)
glTexParameteri(self.target, GL_TEXTURE_WRAP_T, self.wrap_t or self.wrap)
glTexParameteri(self.target, GL_TEXTURE_WRAP_R, self.wrap_r or self.wrap)
glBindTexture(self.target, 0)
return self.attribute
class Texture2D(Texture):
target = GL_TEXTURE_2D
def create_texture(self, **texture_data):
Texture.create_texture(self, **texture_data)
data = texture_data.get('data')
self.buffer = glGenTextures(1)
glBindTexture(GL_TEXTURE_2D, self.buffer)
if self.use_glTexStorage:
glTexStorage2D(GL_TEXTURE_2D,
self.get_mipmap_count(),
self.internal_format,
self.width, self.height)
if data is not None:
glTexSubImage2D(GL_TEXTURE_2D,
0,
0, 0,
self.width, self.height,
self.texture_format,
self.data_type,
data)
else:
glTexImage2D(GL_TEXTURE_2D,
0,
self.internal_format,
self.width,
self.height,
0,
self.texture_format,
self.data_type,
data)
if self.enable_mipmap:
glGenerateMipmap(GL_TEXTURE_2D)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, self.wrap_s or self.wrap)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, self.wrap_t or self.wrap)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, self.min_filter)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, self.mag_filter)
if self.clear_color is not None:
glClearTexImage(self.buffer, 0, self.texture_format, self.data_type, self.clear_color)
glBindTexture(GL_TEXTURE_2D, 0)
class Texture2DArray(Texture):
target = GL_TEXTURE_2D_ARRAY
def create_texture(self, **texture_data):
Texture.create_texture(self, **texture_data)
data = texture_data.get('data')
self.buffer = glGenTextures(1)
glBindTexture(GL_TEXTURE_2D_ARRAY, self.buffer)
if self.use_glTexStorage:
glTexStorage3D(GL_TEXTURE_2D_ARRAY,
self.get_mipmap_count(),
self.internal_format,
self.width, self.height, self.depth)
if data is not None:
glTexSubImage3D(GL_TEXTURE_2D_ARRAY,
0,
0, 0, 0,
self.width, self.height, self.depth,
self.texture_format,
self.data_type,
data)
else:
glTexImage3D(GL_TEXTURE_2D_ARRAY,
0,
self.internal_format,
self.width,
self.height,
self.depth,
0,
self.texture_format,
self.data_type,
data)
if self.enable_mipmap:
glGenerateMipmap(GL_TEXTURE_2D_ARRAY)
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_S, self.wrap_s or self.wrap)
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_T, self.wrap_t or self.wrap)
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MIN_FILTER, self.min_filter)
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MAG_FILTER, self.mag_filter)
glBindTexture(GL_TEXTURE_2D_ARRAY, 0)
class Texture3D(Texture):
target = GL_TEXTURE_3D
def create_texture(self, **texture_data):
Texture.create_texture(self, **texture_data)
data = texture_data.get('data')
self.buffer = glGenTextures(1)
glBindTexture(GL_TEXTURE_3D, self.buffer)
if self.use_glTexStorage:
glTexStorage3D(GL_TEXTURE_3D,
self.get_mipmap_count(),
self.internal_format,
self.width, self.height, self.depth)
if data is not None:
glTexSubImage3D(GL_TEXTURE_3D,
0,
0, 0, 0,
self.width, self.height, self.depth,
self.texture_format,
self.data_type,
data)
else:
glTexImage3D(GL_TEXTURE_3D,
0,
self.internal_format,
self.width,
self.height,
self.depth,
0,
self.texture_format,
self.data_type,
data)
if self.enable_mipmap:
glGenerateMipmap(GL_TEXTURE_3D)
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_S, self.wrap_s or self.wrap)
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_T, self.wrap_t or self.wrap)
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_R, self.wrap_r or self.wrap)
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MIN_FILTER, self.min_filter)
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MAG_FILTER, self.mag_filter)
glBindTexture(GL_TEXTURE_3D, 0)
class Texture2DMultiSample(Texture):
target = GL_TEXTURE_2D_MULTISAMPLE
def create_texture(self, **texture_data):
Texture.create_texture(self, **texture_data)
multisample_count = texture_data.get('multisample_count', 4)
self.multisample_count = multisample_count - (multisample_count % 4)
self.buffer = glGenTextures(1)
glBindTexture(GL_TEXTURE_2D_MULTISAMPLE, self.buffer)
if self.use_glTexStorage:
glTexStorage2DMultisample(GL_TEXTURE_2D_MULTISAMPLE,
self.multisample_count,
self.internal_format,
self.width,
self.height,
GL_TRUE)
else:
glTexImage2DMultisample(GL_TEXTURE_2D_MULTISAMPLE,
self.multisample_count,
self.internal_format,
self.width,
self.height,
GL_TRUE)
glBindTexture(GL_TEXTURE_2D_MULTISAMPLE, 0)
class TextureCube(Texture):
target = GL_TEXTURE_CUBE_MAP
default_wrap = GL_REPEAT
def __init__(self, **texture_data):
self.texture_positive_x = None
self.texture_negative_x = None
self.texture_positive_y = None
self.texture_negative_y = None
self.texture_positive_z = None
self.texture_negative_z = None
Texture.__init__(self, **texture_data)
def create_texture(self, **texture_data):
Texture.create_texture(self, **texture_data)
# If texture2d is None then create render target.
face_texture_datas = copy.copy(texture_data)
face_texture_datas.pop('name')
face_texture_datas['texture_type'] = Texture2D
self.texture_positive_x = texture_data.get('texture_positive_x', CreateTexture(name=self.name + "_right", **face_texture_datas))
self.texture_negative_x = texture_data.get('texture_negative_x', CreateTexture(name=self.name + "_left", **face_texture_datas))
self.texture_positive_y = texture_data.get('texture_positive_y', CreateTexture(name=self.name + "_top", **face_texture_datas))
self.texture_negative_y = texture_data.get('texture_negative_y', CreateTexture(name=self.name + "_bottom", **face_texture_datas))
self.texture_positive_z = texture_data.get('texture_positive_z', CreateTexture(name=self.name + "_front", **face_texture_datas))
self.texture_negative_z = texture_data.get('texture_negative_z', CreateTexture(name=self.name + "_back", **face_texture_datas))
self.buffer = glGenTextures(1)
glBindTexture(GL_TEXTURE_CUBE_MAP, self.buffer)
if self.use_glTexStorage:
glTexStorage2D(GL_TEXTURE_CUBE_MAP, self.get_mipmap_count(), self.internal_format, self.width, self.height)
self.createTexSubImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_X, self.texture_positive_x) # Right
self.createTexSubImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_X, self.texture_negative_x) # Left
self.createTexSubImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Y, self.texture_positive_y) # Top
self.createTexSubImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, self.texture_negative_y) # Bottom
self.createTexSubImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Z, self.texture_positive_z) # Front
self.createTexSubImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, self.texture_negative_z) # Back
else:
self.createTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_X, self.texture_positive_x) # Right
self.createTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_X, self.texture_negative_x) # Left
self.createTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Y, self.texture_positive_y) # Top
self.createTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, self.texture_negative_y) # Bottom
self.createTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Z, self.texture_positive_z) # Front
self.createTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, self.texture_negative_z) # Back
if self.enable_mipmap:
glGenerateMipmap(GL_TEXTURE_CUBE_MAP)
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_S, self.wrap_s or self.wrap)
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_T, self.wrap_t or self.wrap)
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_R, self.wrap_r or self.wrap)
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MIN_FILTER, self.min_filter)
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MAG_FILTER, self.mag_filter)
glBindTexture(GL_TEXTURE_CUBE_MAP, 0)
@staticmethod
def createTexImage2D(target_face, texture):
glTexImage2D(target_face,
0,
texture.internal_format,
texture.width,
texture.height,
0,
texture.texture_format,
texture.data_type,
texture.get_image_data())
@staticmethod
def createTexSubImage2D(target_face, texture):
glTexSubImage2D(target_face,
0,
0, 0,
texture.width, texture.height,
texture.texture_format,
texture.data_type,
texture.get_image_data())
def delete(self):
super(TextureCube, self).delete()
self.texture_positive_x.delete()
self.texture_negative_x.delete()
self.texture_positive_y.delete()
self.texture_negative_y.delete()
self.texture_positive_z.delete()
self.texture_negative_z.delete()
def get_save_data(self, get_image_data=True):
save_data = Texture.get_save_data(self)
save_data['texture_positive_x'] = self.texture_positive_x.name
save_data['texture_negative_x'] = self.texture_negative_x.name
save_data['texture_positive_y'] = self.texture_positive_y.name
save_data['texture_negative_y'] = self.texture_negative_y.name
save_data['texture_positive_z'] = self.texture_positive_z.name
save_data['texture_negative_z'] = self.texture_negative_z.name
return save_data
def get_attribute(self):
Texture.get_attribute(self)
self.attribute.set_attribute("texture_positive_x", self.texture_positive_x.name)
self.attribute.set_attribute("texture_negative_x", self.texture_negative_x.name)
self.attribute.set_attribute("texture_positive_y", self.texture_positive_y.name)
self.attribute.set_attribute("texture_negative_y", self.texture_negative_y.name)
self.attribute.set_attribute("texture_positive_z", self.texture_positive_z.name)
self.attribute.set_attribute("texture_negative_z", self.texture_negative_z.name)
return self.attribute
| [
"PyEngine3D.Common.logger.error",
"traceback.format_exc",
"PyEngine3D.OpenGLContext.OpenGLContext.glGetTexImage",
"PyEngine3D.Utilities.GetClassName",
"PyEngine3D.Common.logger.warn",
"math.log2",
"PyEngine3D.Utilities.Attributes",
"numpy.array",
"copy.copy",
"numpy.fromstring",
"PyEngine3D.Comm... | [((964, 1042), 'PyEngine3D.Common.logger.error', 'logger.error', (['"""Cannot convert to numpy dtype. UNKOWN DATA TYPE(%s)"""', 'data_type'], {}), "('Cannot convert to numpy dtype. UNKOWN DATA TYPE(%s)', data_type)\n", (976, 1042), False, 'from PyEngine3D.Common import logger\n'), ((3639, 3651), 'PyEngine3D.Utilities.Attributes', 'Attributes', ([], {}), '()\n', (3649, 3651), False, 'from PyEngine3D.Utilities import Singleton, GetClassName, Attributes, Profiler\n'), ((6519, 6531), 'PyEngine3D.Utilities.Attributes', 'Attributes', ([], {}), '()\n', (6529, 6531), False, 'from PyEngine3D.Utilities import Singleton, GetClassName, Attributes, Profiler\n'), ((9711, 9738), 'numpy.array', 'np.array', (['data'], {'dtype': 'dtype'}), '(data, dtype=dtype)\n', (9719, 9738), True, 'import numpy as np\n'), ((20172, 20195), 'copy.copy', 'copy.copy', (['texture_data'], {}), '(texture_data)\n', (20181, 20195), False, 'import copy\n'), ((8132, 8221), 'PyEngine3D.OpenGLContext.OpenGLContext.glGetTexImage', 'OpenGLContext.glGetTexImage', (['self.target', 'level', 'self.texture_format', 'self.data_type'], {}), '(self.target, level, self.texture_format, self.\n data_type)\n', (8159, 8221), False, 'from PyEngine3D.OpenGLContext import OpenGLContext\n'), ((10210, 10267), 'PyEngine3D.Common.logger.warn', 'logger.warn', (["('%s disable to generate mipmap.' % self.name)"], {}), "('%s disable to generate mipmap.' % self.name)\n", (10221, 10267), False, 'from PyEngine3D.Common import logger\n'), ((10570, 10619), 'PyEngine3D.Common.logger.warn', 'logger.warn', (["('%s texture is invalid.' % self.name)"], {}), "('%s texture is invalid.' % self.name)\n", (10581, 10619), False, 'from PyEngine3D.Common import logger\n'), ((10865, 10914), 'PyEngine3D.Common.logger.warn', 'logger.warn', (["('%s texture is invalid.' % self.name)"], {}), "('%s texture is invalid.' % self.name)\n", (10876, 10914), False, 'from PyEngine3D.Common import logger\n'), ((1341, 1420), 'PyEngine3D.Common.logger.error', 'logger.error', (["('get_internal_format::unknown image mode ( %s )' % str_image_mode)"], {}), "('get_internal_format::unknown image mode ( %s )' % str_image_mode)\n", (1353, 1420), False, 'from PyEngine3D.Common import logger\n'), ((1784, 1862), 'PyEngine3D.Common.logger.error', 'logger.error', (["('get_texture_format::unknown image mode ( %s )' % str_image_mode)"], {}), "('get_texture_format::unknown image mode ( %s )' % str_image_mode)\n", (1796, 1862), False, 'from PyEngine3D.Common import logger\n'), ((8313, 8345), 'numpy.fromstring', 'np.fromstring', (['data'], {'dtype': 'dtype'}), '(data, dtype=dtype)\n', (8326, 8345), True, 'import numpy as np\n'), ((8387, 8414), 'numpy.array', 'np.array', (['data'], {'dtype': 'dtype'}), '(data, dtype=dtype)\n', (8395, 8414), True, 'import numpy as np\n'), ((8558, 8614), 'PyEngine3D.Common.logger.error', 'logger.error', (["('%s failed to get image data.' % self.name)"], {}), "('%s failed to get image data.' % self.name)\n", (8570, 8614), False, 'from PyEngine3D.Common import logger\n'), ((8627, 8662), 'PyEngine3D.Common.logger.info', 'logger.info', (['"""Try to glReadPixels."""'], {}), "('Try to glReadPixels.')\n", (8638, 8662), False, 'from PyEngine3D.Common import logger\n'), ((9629, 9663), 'numpy.fromstring', 'np.fromstring', (['pixels'], {'dtype': 'dtype'}), '(pixels, dtype=dtype)\n', (9642, 9663), True, 'import numpy as np\n'), ((10004, 10021), 'math.log2', 'math.log2', (['factor'], {}), '(factor)\n', (10013, 10021), False, 'import math\n'), ((6338, 6356), 'PyEngine3D.Utilities.GetClassName', 'GetClassName', (['self'], {}), '(self)\n', (6350, 6356), False, 'from PyEngine3D.Utilities import Singleton, GetClassName, Attributes, Profiler\n'), ((6632, 6650), 'PyEngine3D.Utilities.GetClassName', 'GetClassName', (['self'], {}), '(self)\n', (6644, 6650), False, 'from PyEngine3D.Utilities import Singleton, GetClassName, Attributes, Profiler\n'), ((8522, 8544), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (8542, 8544), False, 'import traceback\n'), ((2349, 2438), 'PyEngine3D.Common.logger.error', 'logger.error', (["('get_image_mode::unknown image format ( %s )' % texture_internal_format)"], {}), "('get_image_mode::unknown image format ( %s )' %\n texture_internal_format)\n", (2361, 2438), False, 'from PyEngine3D.Common import logger\n')] |
"""Find stars that are both in our sample and in Shull+21"""
import numpy as np
import get_data
from matplotlib import pyplot as plt
data = get_data.get_merged_table()
shull = get_data.get_shull2021()
matches = [name for name in data["Name"] if name in shull["Name"]]
print(len(matches), " matches found")
print(matches)
data_comp = data[np.isin(data["Name"], matches)]
refs = data_comp['hiref']
shull_comp = shull[np.isin(shull["Name"], matches)]
def compare_shull(param):
plt.figure()
x = shull_comp[param]
y = data_comp[param]
plt.plot(x, x, color="k")
plt.scatter(x, y, c=refs)
plt.colorbar()
plt.ylabel("ours")
plt.xlabel("shull")
plt.title(param)
# compare_shull("nhtot")
compare_shull("EBV")
compare_shull("fh2")
compare_shull("nhi")
compare_shull("nh2")
plt.show()
| [
"get_data.get_merged_table",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.colorbar",
"numpy.isin",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.title",
"get_data.get_shull2021",
"matplotlib.pyplot.show"
] | [((142, 169), 'get_data.get_merged_table', 'get_data.get_merged_table', ([], {}), '()\n', (167, 169), False, 'import get_data\n'), ((178, 202), 'get_data.get_shull2021', 'get_data.get_shull2021', ([], {}), '()\n', (200, 202), False, 'import get_data\n'), ((806, 816), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (814, 816), True, 'from matplotlib import pyplot as plt\n'), ((341, 371), 'numpy.isin', 'np.isin', (["data['Name']", 'matches'], {}), "(data['Name'], matches)\n", (348, 371), True, 'import numpy as np\n'), ((418, 449), 'numpy.isin', 'np.isin', (["shull['Name']", 'matches'], {}), "(shull['Name'], matches)\n", (425, 449), True, 'import numpy as np\n'), ((483, 495), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (493, 495), True, 'from matplotlib import pyplot as plt\n'), ((551, 576), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'x'], {'color': '"""k"""'}), "(x, x, color='k')\n", (559, 576), True, 'from matplotlib import pyplot as plt\n'), ((581, 606), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x', 'y'], {'c': 'refs'}), '(x, y, c=refs)\n', (592, 606), True, 'from matplotlib import pyplot as plt\n'), ((611, 625), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (623, 625), True, 'from matplotlib import pyplot as plt\n'), ((630, 648), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""ours"""'], {}), "('ours')\n", (640, 648), True, 'from matplotlib import pyplot as plt\n'), ((653, 672), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""shull"""'], {}), "('shull')\n", (663, 672), True, 'from matplotlib import pyplot as plt\n'), ((677, 693), 'matplotlib.pyplot.title', 'plt.title', (['param'], {}), '(param)\n', (686, 693), True, 'from matplotlib import pyplot as plt\n')] |
import numpy as np
import matplotlib.pyplot as plt
from collections import Iterable
mrkr1 = 12
mrkr1_inner = 8
fs = 18
# FUNCTION TO TURN NESTED LIST INTO 1D LIST
def flatten(lis):
for item in lis:
if isinstance(item, Iterable) and not isinstance(item, str):
for x in flatten(item):
yield x
else:
yield item
# FUNCTION TO DRAW TREES
def tree (base, graph, cycle, bias, visits, print_states_hex, docolour):
# find parents
parents = graph[base][0]
for each in cycle:
if each in parents:
parents.remove(each)
# add parents to visits
for a in parents:
visits.append(a)
greycol = (0.4,0.4,0.4)
# add co-ordinates to graph array
l = len(parents)
count = 0
amp = graph[base][2][0]
min_ang = graph[base][2][1]
max_ang = graph[base][2][2]
for b in parents:
graph[b][2][0] = amp + 1
graph[b][2][1] = min_ang + count*(max_ang-min_ang)/l
graph[b][2][2] = min_ang + (count+1)*(max_ang-min_ang)/l
count = count + 1
# draw those on the branches
for c in parents:
mid = (graph[c][2][1] + graph[c][2][2])/2
xco = graph[c][2][0]*np.cos(np.radians(mid))
yco = graph[c][2][0]*np.sin(np.radians(mid)) + bias
graph[c][2][3] = xco
graph[c][2][4] = yco
colo = plt.cm.hsv(c/32)
if c%2==0:
colo2 = plt.cm.flag(c/32.0)
else:
colo2 = plt.cm.prism(c/32.0)
if docolour == False:
colo = 'k'
colo2 = 'k'
#print ('Printing marker for c={0}'.format(c))
plt.plot(xco, yco, 'o', markersize=mrkr1, color=colo)
text_labels=0
if c==21 or c==10 or c==16 or c==0:
text_labels=1
if text_labels:
if print_states_hex:
tt = plt.text(xco+0.25,yco+0.4, '{:02X}'.format(c), ha='center', fontsize=fs)
else:
tt = plt.text(xco+0.25,yco+0.4, '{:d}'.format(c), ha='center', fontsize=fs)
tt.set_bbox(dict(boxstyle='round,pad=0.0', edgecolor='none', facecolor='white', alpha=0.6))
if c==21 or c==10:
selmarker = 'v'
if docolour == False:
colo2 = 'w'
elif c==16 or c==0:
#print ('Printing star for c={0}'.format(c)) # Note in one case, star is red and BG circle is red.
selmarker = '*'
if docolour == False:
selmarker = 'o'
colo2 = 'w'
else:
if (c==0):
print ('printing selmarker for c={0} with BLUE star'.format(c))
colo2='b'
else:
selmarker = 'o'
plt.plot (xco, yco, marker=selmarker, markersize=mrkr1_inner, color=colo2)
plt.arrow(xco, yco, graph[base][2][3]-xco, graph[base][2][4]-yco, overhang=0, length_includes_head=True, head_width=0.15, head_length=0.5, fc=greycol, ec=greycol)
for z in parents:
tree (z, graph, parents, bias, visits, print_states_hex, docolour)
def plot_states (net, ax, print_states_hex=False, kequalsn=True, docolour=True):
# Find where each state leads
targets = []
for i in range(2**5):
state = np.binary_repr(i,5)
# k=n
if kequalsn:
effect = net[int(state,2)] + net[int(state[1:]+state[0:1],2) + 32] + net[int(state[2:]+state[0:2],2) + 64] + net[int(state[3:]+state[0:3],2)+96] + net[int(state[4:]+state[0:4],2)+128]
else:
# k=n-1
effect = net[int(state[1:],2)] + net[int(state[:1]+state[2:],2)+16] + net[int(state[:2]+state[3:],2) + 32] + net[int(state[:3]+state[4],2)+48] + net[int(state[:4],2)+64]
# in decimal form
targets.append(int(effect[4]) + 2*int(effect[3]) + 4*int(effect[2]) + 8*int(effect[1]) + 16*int(effect[0]))
# graph[n] gives the parent nodes, child nodes and co-ordinates for the nth node.
# graph[n][2][0] gives polar amplitude, [1] is min angle, [2] is max angle, [3] is x, [4] is y
graph = [[[],[],[0,0,0,0,0]] for x in range(1024)]
targets = [int(z) for z in targets]
for y in range(32):
graph[y][1] = targets[y] # add child
graph[targets[y]][0].append(y) # add parent
visits = []
greycol = (0.4,0.4,0.4)
plt.xticks([])
plt.yticks([])
bases = []
for x in range(len(targets)):
visits = []
while not x in visits:
visits.append(x)
x = targets[x]
base = visits[visits.index(x):]
# It's awkward to format the list of bases in hex, so it's not implemented
if not base[0] in list(flatten(bases)):
bases.append(base)
for base in bases:
# find co-ordinates of base nodes
tot = len(base)
count = 0
for x in base:
graph[x][2][0] = 1
graph[x][2][1] = count*180/tot
graph[x][2][2] = (count+1)*180/tot
count = count + 1
# find max y-co for bias for next tree
bias = graph[0][2][4]
for node in graph:
if node[2][4]>bias:
bias = node[2][4]
bias = bias + 2
# draw those on the LC
tt = plt.text(0+0.7,bias-2+0.5,base, ha='center', fontsize=fs)
tt.set_bbox(dict(boxstyle='round,pad=0.0', edgecolor='none', facecolor='white', alpha=0.6))
circle = plt.Circle ((0,bias), 1, color=greycol, fill=False)
ax.add_artist(circle)
for x in base:
mid = (graph[x][2][1] + graph[x][2][2])/2.
graph[x][2][3] = graph[x][2][0]*np.cos(np.radians(mid))
graph[x][2][4] = graph[x][2][0]*np.sin(np.radians(mid)) + bias
colo = plt.cm.hsv(x/32)
if x%2==0:
colo2 = plt.cm.flag(x/32.0)
else:
colo2 = plt.cm.prism(x/32.0)
#plt.plot(graph[x][2][3], graph[x][2][4], 'o', color=(0,0,0), markersize=mrkr1)
#print ('Printing marker for c={0}'.format(x))
if docolour == True:
plt.plot(graph[x][2][3], graph[x][2][4], 'o', color=colo, markersize=mrkr1)
else:
plt.plot(graph[x][2][3], graph[x][2][4], 'o', color='k', markersize=mrkr1)
if docolour == False:
colo2 = 'k'
if x==21 or x==10:
selmarker = 'v'
if docolour == False:
colo2 = 'w'
elif x==16 or x==0:
selmarker = '*'
if docolour == False:
selmarker = 'o'
colo2 = 'w'
else:
if x==0:
print ('printing selmarker for x={0} with BLUE star'.format(x))
colo2='b' # special case
else:
selmarker = 'o'
plt.plot(graph[x][2][3], graph[x][2][4], marker=selmarker, color=colo2, markersize=mrkr1_inner)
for x in base:
tree (x, graph, base, bias, visits, print_states_hex, docolour)
# do it again for the next set
# find max y and x to get axis right
max_x = graph[0][2][3]
max_y = graph[0][2][4]
min_x = max_x
for node in graph:
if node[2][4] > max_y:
max_y = node[2][4]
if node[2][3] > max_x:
max_x = node[2][3]
#plt.plot(graph[21][2][3], graph[21][2][4],'v',color='k', markersize=mrkr1-2) # final ant
#plt.plot(graph[10][2][3], graph[10][2][4],'v',color='w', markersize=mrkr1-2) # final post
#plt.plot(graph[16][2][3], graph[16][2][4],'*',color='k', markersize=mrkr1-2) # initial ant
#plt.plot(graph[0][2][3], graph[0][2][4],'*',color='w', markersize=mrkr1-2) # initial post
# Modify use of the area inside the graph
ymin,ymax = plt.ylim()
plt.ylim(ymin-4,ymax+1)
xmin,xmax = plt.xlim()
plt.xlim(xmin-0,xmax+0)
| [
"numpy.radians",
"matplotlib.pyplot.text",
"matplotlib.pyplot.Circle",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.cm.flag",
"numpy.binary_repr",
"matplotlib.pyplot.cm.hsv",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.xlim",
"matplotlib... | [((4339, 4353), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[]'], {}), '([])\n', (4349, 4353), True, 'import matplotlib.pyplot as plt\n'), ((4358, 4372), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[]'], {}), '([])\n', (4368, 4372), True, 'import matplotlib.pyplot as plt\n'), ((7847, 7857), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {}), '()\n', (7855, 7857), True, 'import matplotlib.pyplot as plt\n'), ((7862, 7890), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(ymin - 4)', '(ymax + 1)'], {}), '(ymin - 4, ymax + 1)\n', (7870, 7890), True, 'import matplotlib.pyplot as plt\n'), ((7902, 7912), 'matplotlib.pyplot.xlim', 'plt.xlim', ([], {}), '()\n', (7910, 7912), True, 'import matplotlib.pyplot as plt\n'), ((7917, 7945), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(xmin - 0)', '(xmax + 0)'], {}), '(xmin - 0, xmax + 0)\n', (7925, 7945), True, 'import matplotlib.pyplot as plt\n'), ((1372, 1390), 'matplotlib.pyplot.cm.hsv', 'plt.cm.hsv', (['(c / 32)'], {}), '(c / 32)\n', (1382, 1390), True, 'import matplotlib.pyplot as plt\n'), ((1643, 1696), 'matplotlib.pyplot.plot', 'plt.plot', (['xco', 'yco', '"""o"""'], {'markersize': 'mrkr1', 'color': 'colo'}), "(xco, yco, 'o', markersize=mrkr1, color=colo)\n", (1651, 1696), True, 'import matplotlib.pyplot as plt\n'), ((2744, 2817), 'matplotlib.pyplot.plot', 'plt.plot', (['xco', 'yco'], {'marker': 'selmarker', 'markersize': 'mrkr1_inner', 'color': 'colo2'}), '(xco, yco, marker=selmarker, markersize=mrkr1_inner, color=colo2)\n', (2752, 2817), True, 'import matplotlib.pyplot as plt\n'), ((2828, 3002), 'matplotlib.pyplot.arrow', 'plt.arrow', (['xco', 'yco', '(graph[base][2][3] - xco)', '(graph[base][2][4] - yco)'], {'overhang': '(0)', 'length_includes_head': '(True)', 'head_width': '(0.15)', 'head_length': '(0.5)', 'fc': 'greycol', 'ec': 'greycol'}), '(xco, yco, graph[base][2][3] - xco, graph[base][2][4] - yco,\n overhang=0, length_includes_head=True, head_width=0.15, head_length=0.5,\n fc=greycol, ec=greycol)\n', (2837, 3002), True, 'import matplotlib.pyplot as plt\n'), ((3265, 3285), 'numpy.binary_repr', 'np.binary_repr', (['i', '(5)'], {}), '(i, 5)\n', (3279, 3285), True, 'import numpy as np\n'), ((5259, 5324), 'matplotlib.pyplot.text', 'plt.text', (['(0 + 0.7)', '(bias - 2 + 0.5)', 'base'], {'ha': '"""center"""', 'fontsize': 'fs'}), "(0 + 0.7, bias - 2 + 0.5, base, ha='center', fontsize=fs)\n", (5267, 5324), True, 'import matplotlib.pyplot as plt\n'), ((5434, 5485), 'matplotlib.pyplot.Circle', 'plt.Circle', (['(0, bias)', '(1)'], {'color': 'greycol', 'fill': '(False)'}), '((0, bias), 1, color=greycol, fill=False)\n', (5444, 5485), True, 'import matplotlib.pyplot as plt\n'), ((1428, 1449), 'matplotlib.pyplot.cm.flag', 'plt.cm.flag', (['(c / 32.0)'], {}), '(c / 32.0)\n', (1439, 1449), True, 'import matplotlib.pyplot as plt\n'), ((1482, 1504), 'matplotlib.pyplot.cm.prism', 'plt.cm.prism', (['(c / 32.0)'], {}), '(c / 32.0)\n', (1494, 1504), True, 'import matplotlib.pyplot as plt\n'), ((5756, 5774), 'matplotlib.pyplot.cm.hsv', 'plt.cm.hsv', (['(x / 32)'], {}), '(x / 32)\n', (5766, 5774), True, 'import matplotlib.pyplot as plt\n'), ((6905, 7004), 'matplotlib.pyplot.plot', 'plt.plot', (['graph[x][2][3]', 'graph[x][2][4]'], {'marker': 'selmarker', 'color': 'colo2', 'markersize': 'mrkr1_inner'}), '(graph[x][2][3], graph[x][2][4], marker=selmarker, color=colo2,\n markersize=mrkr1_inner)\n', (6913, 7004), True, 'import matplotlib.pyplot as plt\n'), ((1222, 1237), 'numpy.radians', 'np.radians', (['mid'], {}), '(mid)\n', (1232, 1237), True, 'import numpy as np\n'), ((5820, 5841), 'matplotlib.pyplot.cm.flag', 'plt.cm.flag', (['(x / 32.0)'], {}), '(x / 32.0)\n', (5831, 5841), True, 'import matplotlib.pyplot as plt\n'), ((5882, 5904), 'matplotlib.pyplot.cm.prism', 'plt.cm.prism', (['(x / 32.0)'], {}), '(x / 32.0)\n', (5894, 5904), True, 'import matplotlib.pyplot as plt\n'), ((6103, 6178), 'matplotlib.pyplot.plot', 'plt.plot', (['graph[x][2][3]', 'graph[x][2][4]', '"""o"""'], {'color': 'colo', 'markersize': 'mrkr1'}), "(graph[x][2][3], graph[x][2][4], 'o', color=colo, markersize=mrkr1)\n", (6111, 6178), True, 'import matplotlib.pyplot as plt\n'), ((6213, 6287), 'matplotlib.pyplot.plot', 'plt.plot', (['graph[x][2][3]', 'graph[x][2][4]', '"""o"""'], {'color': '"""k"""', 'markersize': 'mrkr1'}), "(graph[x][2][3], graph[x][2][4], 'o', color='k', markersize=mrkr1)\n", (6221, 6287), True, 'import matplotlib.pyplot as plt\n'), ((1275, 1290), 'numpy.radians', 'np.radians', (['mid'], {}), '(mid)\n', (1285, 1290), True, 'import numpy as np\n'), ((5645, 5660), 'numpy.radians', 'np.radians', (['mid'], {}), '(mid)\n', (5655, 5660), True, 'import numpy as np\n'), ((5713, 5728), 'numpy.radians', 'np.radians', (['mid'], {}), '(mid)\n', (5723, 5728), True, 'import numpy as np\n')] |
import pytest
import wtforms
from dmutils.forms.fields import DMBooleanField
from dmutils.forms.widgets import DMSelectionButtonBase
class BooleanForm(wtforms.Form):
field = DMBooleanField()
@pytest.fixture
def form():
return BooleanForm()
def test_value_is_a_list(form):
assert isinstance(form.field.value, list)
def test_value_is_empty_list_if_there_is_no_selection(form):
assert form.field.value == []
def test_can_be_used_with_a_different_kind_of_selection_button():
class BooleanForm(wtforms.Form):
field = DMBooleanField(widget=DMSelectionButtonBase(type="boolean"))
form = BooleanForm()
assert form.field.widget.type == "boolean"
| [
"dmutils.forms.fields.DMBooleanField",
"dmutils.forms.widgets.DMSelectionButtonBase"
] | [((182, 198), 'dmutils.forms.fields.DMBooleanField', 'DMBooleanField', ([], {}), '()\n', (196, 198), False, 'from dmutils.forms.fields import DMBooleanField\n'), ((574, 611), 'dmutils.forms.widgets.DMSelectionButtonBase', 'DMSelectionButtonBase', ([], {'type': '"""boolean"""'}), "(type='boolean')\n", (595, 611), False, 'from dmutils.forms.widgets import DMSelectionButtonBase\n')] |
# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup, SoupStrainer
import bs4
import requests
import csv
import pandas as pd
import os
import re
"""
Module 3 : retrieve text from each article & basic preprocess
"""
ignore_sents = ['Les associations Vikidia', 'Répondre au sondage', 'Aller à :',
'Récupérée de « https', 'Accédez aux articles', 'Catégorie :',
'Le contenu est disponible sous licence', 'Sur d’autres projets',
'Imprimer / exporter', 'Créer un livre', 'Vikidia, l’encyclopédie',
'Attention, à ne pas confondre', 'Cet article est à compléter', '(Aide)',
'Pages liées', 'À propos de Vikidia', 'Pages liées', 'Espaces de noms',
'PageDiscussion', 'LireModifierModifier', 'AccueilPages par thèmes',
'Article au hasardBanque d’imagesLe SavantDemander un article',
'Modifications récentesCommunautéBavardagesAide', 'Vikidia a besoin de toi',
'Pour rester indépendant, nous refusons' ,'Tu peux soutenir Vikidia',
'Tu peux également financer gratuitement', 'Dernière modification de cette page',
'Non connectéDiscussion', 'Notes et références', '↑ Table', 'Voir aussi[',
'Portail de la littérature —', 'Pour aller plus loin[', 'Portail des sciences —',
'Vikiliens[', 'Lien interne[', 'Lien externe[', '• modifier',
'Soit vous avez mal écrit le titre', "L'article a peut-être été supprimé",
"Il peut aussi avoir été renommé sans création", 'Si vous avez récemment créé cet article',
'Créer le wikicode', 'dans les autres articles (aide)',
'Consultez la liste des articles dont le titre', "Cherchez d'autres pages de Wikipédia",
"Wikipédia ne possède pas d'article", 'Cet article est une ébauche',
'Vous pouvez partager vos connaissances en ']
ignore_single_items = ['Confidentialité', 'Avertissements', 'Version mobile', 'Plus', 'Chercher',
'Navigation', 'Contribuer', 'Espaces de noms', 'PageDiscussion',
'Variantes', 'Affichages', 'Menu de navigation', 'Outils personnels',
'Vikidia', 'Wikipédia', 'Outils', 'Notes pour les rédacteurs :',
'Commons (images et médias)', 'Wikivoyage (guides de voyage)',
'Wikidata (base de données)']
ignore_single_items_wiki = ['Aller au contenu', 'Rechercher', 'Imprimer / exporter', 'Modifier les liens',
'Outils personnels', 'Menu de navigation', 'Navigation', 'Contribuer', 'Outils', 'Espaces de noms', 'Variantes',
'Affichages', 'Liens externes', 'Politique de confidentialité', "À propos de Wikipédia",
"Contact", "Développeurs", 'Statistiques', 'Déclaration sur les témoins (cookies)',
'Précédé par', 'Suivi par', 'Références', 'modifier', 'Lien externe', 'Voir aussi']
ignore_sents_wiki = ['Aragonés | ', 'Un article de Wikipédia', 'AccueilPortails', 'Débuter sur Wikipédia', 'Dans d’autres projets',
'Pour les articles homonymes, voir', 'Wikimedia Commons', 'Afficher / masquer', 'EsperantoEspañol',
'EnglishEspañol', 'EnglishEsperanto', 'Vous lisez un « bon article »', 'Bahasa Indonesia', 'La dernière modification de cette page', 'Dans d’autres projets', 'Wikimedia CommonsWikiquote', 'ArticleDiscussion',
'— Wikipédia', 'Non connectéDiscussion', 'Pages liéesSuivi des pages', 'Créer un livre',
'LireModifier', 'Ce document provient de «', 'Catégories cachées : Pages', "Droit d'auteur : les",
"Voyez les conditions d’utilisation pour plus", 'marque déposée de la Wikimedia Foundation',
'organisation de bienfaisance régie par le paragraphe', 'Cette section est vide, insuffisamment',
'(voir la liste des auteurs)', '(comparer avec la version actuelle)', 'Pour toute information complémentaire,',
'/Articles liés']
def content(f, outname):
"""retrieve text from each article
Parameters :
------------
f : str
csv file containing article urls
outname : str
output name
"""
if not os.path.exists('corpus'):
os.makedirs('corpus')
else:
pass
df_content = pd.read_csv(f, sep='\t', encoding='utf-8', quoting=csv.QUOTE_NONE)
print(f'Columns content in input file : TITLE | URL | URL_WIKIPEDIA | URL_VIKIDIA\n')
print("**This can take a while")
print("Extracting article text content from Vikidia ...")
df_content['vikidia_text'] = df_content['URL_VIKIDIA'].apply(lambda x: BeautifulSoup(requests.get(x).text, features="lxml").text.strip())
print("Extracting article text content from Wikipedia ...")
df_content['wikipedia_text'] = df_content['URL_WIKIPEDIA'].apply(lambda x: BeautifulSoup(requests.get(x).text, features="lxml").text.strip())
def clean(col):
'''basic preprocess specific to wiki data'''
df_content[col] = df_content[col].apply(lambda x: re.sub(r'\n+', '__sent__', x).strip()) # remove succeeding line breaks
df_content[col] = df_content[col].apply(lambda x: re.sub('\[.+?\]', '', x)) # remove items inside brackets
df_content[col] = df_content[col].apply(lambda x: [sent for sent in x.split("__sent__") if len(sent) > 3]) # Ignore sent in article text is len < 3
df_content[col] = df_content[col].apply(lambda x: [s for s in x if not any(item in s for item in ignore_sents)])
df_content[col] = df_content[col].apply(lambda x: [item for item in x if item.strip() not in ignore_single_items])
df_content[col] = df_content[col].apply(lambda x: [s for s in x if not any(item in s for item in ignore_sents_wiki)])
df_content[col] = df_content[col].apply(lambda x: [item for item in x if item.strip() not in ignore_single_items_wiki])
df_content[col] = df_content[col].apply(lambda x: x[1:] if 'langues' in x[0] else x[0:]) # ignore first item in list (12 langues, 34 langues...)
if 'vikidia' in col:
df_content[col] = df_content[col].apply(lambda x: x[1:]) # skip article title in position 0
if 'wikipedia' in col:
df_content[col] = df_content[col].apply(lambda x: x[1:] if x[0] == x[1] else x) # remove titles at the beginning ([Acacia, Acacia, Article text...])
df_content[col] = df_content[col].apply(lambda x: [y.strip() for y in x]) # remove spaces at the begnning of sent
df_content[col] = df_content[col].apply(lambda x: [y for y in x if not y.startswith('Portail d')]) # ignore items in list if starts with
df_content[col] = df_content[col].apply(lambda x: [re.sub(r"(\w+[a-z])+([A-ZÂÊÎÔÛÄËÏÖÜÀÆÇÉÈŒÙ]\w+|[0-9])", r"\1 \2", y) for y in x]) # split overlapping words (wordWord)
df_content[col] = df_content[col].apply(lambda x: [y for y in x if not y.startswith("↑ ")])
df_content[col] = df_content[col].apply(lambda x: [y.replace("\xa0"," ") for y in x])
df_content[col] = df_content[col].apply(lambda x: [y for y in x if len(y.split()) > 3]) # ignore items in list that only contain 3 words e.g.: ['Ceinture de Kuiper', 'Cubewano', 'Plutino', 'Objet épars', ...]
df_content[col] = df_content[col].apply(lambda x: [y for y in x if not y.startswith("v · m")])
clean('vikidia_text')
clean('wikipedia_text')
output_name = "corpus/" + outname + ".tsv"
df_content.to_csv(output_name, sep='\t', encoding='utf-8', quoting=csv.QUOTE_NONE)
print("File(s) saved in /corpus")
| [
"os.path.exists",
"os.makedirs",
"pandas.read_csv",
"requests.get",
"re.sub"
] | [((3919, 3985), 'pandas.read_csv', 'pd.read_csv', (['f'], {'sep': '"""\t"""', 'encoding': '"""utf-8"""', 'quoting': 'csv.QUOTE_NONE'}), "(f, sep='\\t', encoding='utf-8', quoting=csv.QUOTE_NONE)\n", (3930, 3985), True, 'import pandas as pd\n'), ((3840, 3864), 'os.path.exists', 'os.path.exists', (['"""corpus"""'], {}), "('corpus')\n", (3854, 3864), False, 'import os\n'), ((3868, 3889), 'os.makedirs', 'os.makedirs', (['"""corpus"""'], {}), "('corpus')\n", (3879, 3889), False, 'import os\n'), ((4755, 4781), 're.sub', 're.sub', (['"""\\\\[.+?\\\\]"""', '""""""', 'x'], {}), "('\\\\[.+?\\\\]', '', x)\n", (4761, 4781), False, 'import re\n'), ((6196, 6266), 're.sub', 're.sub', (['"""(\\\\w+[a-z])+([A-ZÂÊÎÔÛÄËÏÖÜÀÆÇÉÈŒÙ]\\\\w+|[0-9])"""', '"""\\\\1 \\\\2"""', 'y'], {}), "('(\\\\w+[a-z])+([A-ZÂÊÎÔÛÄËÏÖÜÀÆÇÉÈŒÙ]\\\\w+|[0-9])', '\\\\1 \\\\2', y)\n", (6202, 6266), False, 'import re\n'), ((4632, 4661), 're.sub', 're.sub', (['"""\\\\n+"""', '"""__sent__"""', 'x'], {}), "('\\\\n+', '__sent__', x)\n", (4638, 4661), False, 'import re\n'), ((4254, 4269), 'requests.get', 'requests.get', (['x'], {}), '(x)\n', (4266, 4269), False, 'import requests\n'), ((4460, 4475), 'requests.get', 'requests.get', (['x'], {}), '(x)\n', (4472, 4475), False, 'import requests\n')] |
import io
import os
from svgutils import transform as svg_utils
import qrcode.image.svg
from cwa_qr import generate_qr_code, CwaEventDescription
class CwaPoster(object):
POSTER_PORTRAIT = 'portrait'
POSTER_LANDSCAPE = 'landscape'
TRANSLATIONS = {
POSTER_PORTRAIT: {
'file': 'poster/portrait.svg',
'x': 80,
'y': 60,
'scale': 6
},
POSTER_LANDSCAPE: {
'file': 'poster/landscape.svg',
'x': 42,
'y': 120,
'scale': 4.8
}
}
def generate_poster(event_description: CwaEventDescription, template: CwaPoster) -> svg_utils.SVGFigure:
qr = generate_qr_code(event_description)
svg = qr.make_image(image_factory=qrcode.image.svg.SvgPathImage)
svg_bytes = io.BytesIO()
svg.save(svg_bytes)
poster = svg_utils.fromfile('{}/{}'.format(
os.path.dirname(os.path.abspath(__file__)),
CwaPoster.TRANSLATIONS[template]['file']
))
overlay = svg_utils.fromstring(svg_bytes.getvalue().decode('UTF-8')).getroot()
overlay.moveto(
CwaPoster.TRANSLATIONS[template]['x'],
CwaPoster.TRANSLATIONS[template]['y'],
CwaPoster.TRANSLATIONS[template]['scale']
)
poster.append([overlay])
return poster
| [
"os.path.abspath",
"cwa_qr.generate_qr_code",
"io.BytesIO"
] | [((682, 717), 'cwa_qr.generate_qr_code', 'generate_qr_code', (['event_description'], {}), '(event_description)\n', (698, 717), False, 'from cwa_qr import generate_qr_code, CwaEventDescription\n'), ((803, 815), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (813, 815), False, 'import io\n'), ((913, 938), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (928, 938), False, 'import os\n')] |
# pdaggerq - A code for bringing strings of creation / annihilation operators to normal order.
# Copyright (C) 2020 <NAME>
#
# This file is part of the pdaggerq package.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
spin-orbital CCSD amplitude equations
"""
import numpy as np
from numpy import einsum
def ccsd_energy(t1, t2, f, g, o, v):
# < 0 | e(-T) H e(T) | 0> :
# 1.0000 f(i,i)
energy = 1.000000000000000 * einsum('ii', f[o, o])
# 1.0000 f(i,a)*t1(a,i)
energy += 1.000000000000000 * einsum('ia,ai', f[o, v], t1)
# -0.5000 <j,i||j,i>
energy += -0.500000000000000 * einsum('jiji', g[o, o, o, o])
# 0.2500 <j,i||a,b>*t2(a,b,j,i)
energy += 0.250000000000000 * einsum('jiab,abji', g[o, o, v, v], t2)
# -0.5000 <j,i||a,b>*t1(a,i)*t1(b,j)
energy += -0.500000000000000 * einsum('jiab,ai,bj', g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
return energy
def singles_residual(t1, t2, f, g, o, v):
# < 0 | m* e e(-T) H e(T) | 0> :
# 1.0000 f(e,m)
singles_res = 1.000000000000000 * einsum('em->em', f[v, o])
# -1.0000 f(i,m)*t1(e,i)
singles_res += -1.000000000000000 * einsum('im,ei->em', f[o, o], t1)
# 1.0000 f(e,a)*t1(a,m)
singles_res += 1.000000000000000 * einsum('ea,am->em', f[v, v], t1)
# -1.0000 f(i,a)*t2(a,e,m,i)
singles_res += -1.000000000000000 * einsum('ia,aemi->em', f[o, v], t2)
# -1.0000 f(i,a)*t1(a,m)*t1(e,i)
singles_res += -1.000000000000000 * einsum('ia,am,ei->em', f[o, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
# 1.0000 <i,e||a,m>*t1(a,i)
singles_res += 1.000000000000000 * einsum('ieam,ai->em', g[o, v, v, o], t1)
# -0.5000 <j,i||a,m>*t2(a,e,j,i)
singles_res += -0.500000000000000 * einsum('jiam,aeji->em', g[o, o, v, o], t2)
# -0.5000 <i,e||a,b>*t2(a,b,m,i)
singles_res += -0.500000000000000 * einsum('ieab,abmi->em', g[o, v, v, v], t2)
# 1.0000 <j,i||a,b>*t1(a,i)*t2(b,e,m,j)
singles_res += 1.000000000000000 * einsum('jiab,ai,bemj->em', g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
# 0.5000 <j,i||a,b>*t1(a,m)*t2(b,e,j,i)
singles_res += 0.500000000000000 * einsum('jiab,am,beji->em', g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 2), (0, 1)])
# 0.5000 <j,i||a,b>*t1(e,i)*t2(a,b,m,j)
singles_res += 0.500000000000000 * einsum('jiab,ei,abmj->em', g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 2), (0, 1)])
# 1.0000 <j,i||a,m>*t1(a,i)*t1(e,j)
singles_res += 1.000000000000000 * einsum('jiam,ai,ej->em', g[o, o, v, o], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
# 1.0000 <i,e||a,b>*t1(a,i)*t1(b,m)
singles_res += 1.000000000000000 * einsum('ieab,ai,bm->em', g[o, v, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
# 1.0000 <j,i||a,b>*t1(a,i)*t1(b,m)*t1(e,j)
singles_res += 1.000000000000000 * einsum('jiab,ai,bm,ej->em', g[o, o, v, v], t1, t1, t1, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
return singles_res
def doubles_residual(t1, t2, f, g, o, v):
# < 0 | m* n* f e e(-T) H e(T) | 0> :
# -1.0000 P(m,n)f(i,n)*t2(e,f,m,i)
contracted_intermediate = -1.000000000000000 * einsum('in,efmi->efmn', f[o, o], t2)
doubles_res = 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# 1.0000 P(e,f)f(e,a)*t2(a,f,m,n)
contracted_intermediate = 1.000000000000000 * einsum('ea,afmn->efmn', f[v, v], t2)
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# -1.0000 P(m,n)f(i,a)*t1(a,n)*t2(e,f,m,i)
contracted_intermediate = -1.000000000000000 * einsum('ia,an,efmi->efmn', f[o, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# -1.0000 P(e,f)f(i,a)*t1(e,i)*t2(a,f,m,n)
contracted_intermediate = -1.000000000000000 * einsum('ia,ei,afmn->efmn', f[o, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# 1.0000 <e,f||m,n>
doubles_res += 1.000000000000000 * einsum('efmn->efmn', g[v, v, o, o])
# 1.0000 P(e,f)<i,e||m,n>*t1(f,i)
contracted_intermediate = 1.000000000000000 * einsum('iemn,fi->efmn', g[o, v, o, o], t1)
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# 1.0000 P(m,n)<e,f||a,n>*t1(a,m)
contracted_intermediate = 1.000000000000000 * einsum('efan,am->efmn', g[v, v, v, o], t1)
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# 0.5000 <j,i||m,n>*t2(e,f,j,i)
doubles_res += 0.500000000000000 * einsum('jimn,efji->efmn', g[o, o, o, o], t2)
# 1.0000 P(m,n)*P(e,f)<i,e||a,n>*t2(a,f,m,i)
contracted_intermediate = 1.000000000000000 * einsum('iean,afmi->efmn', g[o, v, v, o], t2)
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate) + -1.00000 * einsum('efmn->femn', contracted_intermediate) + 1.00000 * einsum('efmn->fenm', contracted_intermediate)
# 0.5000 <e,f||a,b>*t2(a,b,m,n)
doubles_res += 0.500000000000000 * einsum('efab,abmn->efmn', g[v, v, v, v], t2)
# 1.0000 P(m,n)<j,i||a,n>*t1(a,i)*t2(e,f,m,j)
contracted_intermediate = 1.000000000000000 * einsum('jian,ai,efmj->efmn', g[o, o, v, o], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# 0.5000 P(m,n)<j,i||a,n>*t1(a,m)*t2(e,f,j,i)
contracted_intermediate = 0.500000000000000 * einsum('jian,am,efji->efmn', g[o, o, v, o], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# -1.0000 P(m,n)*P(e,f)<j,i||a,n>*t1(e,i)*t2(a,f,m,j)
contracted_intermediate = -1.000000000000000 * einsum('jian,ei,afmj->efmn', g[o, o, v, o], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate) + -1.00000 * einsum('efmn->femn', contracted_intermediate) + 1.00000 * einsum('efmn->fenm', contracted_intermediate)
# 1.0000 P(e,f)<i,e||a,b>*t1(a,i)*t2(b,f,m,n)
contracted_intermediate = 1.000000000000000 * einsum('ieab,ai,bfmn->efmn', g[o, v, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# -1.0000 P(m,n)*P(e,f)<i,e||a,b>*t1(a,n)*t2(b,f,m,i)
contracted_intermediate = -1.000000000000000 * einsum('ieab,an,bfmi->efmn', g[o, v, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate) + -1.00000 * einsum('efmn->femn', contracted_intermediate) + 1.00000 * einsum('efmn->fenm', contracted_intermediate)
# 0.5000 P(e,f)<i,e||a,b>*t1(f,i)*t2(a,b,m,n)
contracted_intermediate = 0.500000000000000 * einsum('ieab,fi,abmn->efmn', g[o, v, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# -1.0000 <j,i||m,n>*t1(e,i)*t1(f,j)
doubles_res += -1.000000000000000 * einsum('jimn,ei,fj->efmn', g[o, o, o, o], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
# 1.0000 P(m,n)*P(e,f)<i,e||a,n>*t1(a,m)*t1(f,i)
contracted_intermediate = 1.000000000000000 * einsum('iean,am,fi->efmn', g[o, v, v, o], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate) + -1.00000 * einsum('efmn->femn', contracted_intermediate) + 1.00000 * einsum('efmn->fenm', contracted_intermediate)
# -1.0000 <e,f||a,b>*t1(a,n)*t1(b,m)
doubles_res += -1.000000000000000 * einsum('efab,an,bm->efmn', g[v, v, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
# -0.5000 P(m,n)<j,i||a,b>*t2(a,b,n,i)*t2(e,f,m,j)
contracted_intermediate = -0.500000000000000 * einsum('jiab,abni,efmj->efmn', g[o, o, v, v], t2, t2, optimize=['einsum_path', (0, 1), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# 0.2500 <j,i||a,b>*t2(a,b,m,n)*t2(e,f,j,i)
doubles_res += 0.250000000000000 * einsum('jiab,abmn,efji->efmn', g[o, o, v, v], t2, t2, optimize=['einsum_path', (0, 1), (0, 1)])
# -0.5000 <j,i||a,b>*t2(a,e,j,i)*t2(b,f,m,n)
doubles_res += -0.500000000000000 * einsum('jiab,aeji,bfmn->efmn', g[o, o, v, v], t2, t2, optimize=['einsum_path', (0, 1), (0, 1)])
# 1.0000 P(m,n)<j,i||a,b>*t2(a,e,n,i)*t2(b,f,m,j)
contracted_intermediate = 1.000000000000000 * einsum('jiab,aeni,bfmj->efmn', g[o, o, v, v], t2, t2, optimize=['einsum_path', (0, 1), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# -0.5000 <j,i||a,b>*t2(a,e,m,n)*t2(b,f,j,i)
doubles_res += -0.500000000000000 * einsum('jiab,aemn,bfji->efmn', g[o, o, v, v], t2, t2, optimize=['einsum_path', (0, 2), (0, 1)])
# 1.0000 P(m,n)<j,i||a,b>*t1(a,i)*t1(b,n)*t2(e,f,m,j)
contracted_intermediate = 1.000000000000000 * einsum('jiab,ai,bn,efmj->efmn', g[o, o, v, v], t1, t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# 1.0000 P(e,f)<j,i||a,b>*t1(a,i)*t1(e,j)*t2(b,f,m,n)
contracted_intermediate = 1.000000000000000 * einsum('jiab,ai,ej,bfmn->efmn', g[o, o, v, v], t1, t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# -0.5000 <j,i||a,b>*t1(a,n)*t1(b,m)*t2(e,f,j,i)
doubles_res += -0.500000000000000 * einsum('jiab,an,bm,efji->efmn', g[o, o, v, v], t1, t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
# 1.0000 P(m,n)*P(e,f)<j,i||a,b>*t1(a,n)*t1(e,i)*t2(b,f,m,j)
contracted_intermediate = 1.000000000000000 * einsum('jiab,an,ei,bfmj->efmn', g[o, o, v, v], t1, t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate) + -1.00000 * einsum('efmn->femn', contracted_intermediate) + 1.00000 * einsum('efmn->fenm', contracted_intermediate)
# -0.5000 <j,i||a,b>*t1(e,i)*t1(f,j)*t2(a,b,m,n)
doubles_res += -0.500000000000000 * einsum('jiab,ei,fj,abmn->efmn', g[o, o, v, v], t1, t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
# -1.0000 P(m,n)<j,i||a,n>*t1(a,m)*t1(e,i)*t1(f,j)
contracted_intermediate = -1.000000000000000 * einsum('jian,am,ei,fj->efmn', g[o, o, v, o], t1, t1, t1, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# -1.0000 P(e,f)<i,e||a,b>*t1(a,n)*t1(b,m)*t1(f,i)
contracted_intermediate = -1.000000000000000 * einsum('ieab,an,bm,fi->efmn', g[o, v, v, v], t1, t1, t1, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
doubles_res += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# 1.0000 <j,i||a,b>*t1(a,n)*t1(b,m)*t1(e,i)*t1(f,j)
doubles_res += 1.000000000000000 * einsum('jiab,an,bm,ei,fj->efmn', g[o, o, v, v], t1, t1, t1, t1, optimize=['einsum_path', (0, 1), (0, 3), (0, 2), (0, 1)])
return doubles_res
def ccsd_iterations(t1, t2, fock, g, o, v, e_ai, e_abij, hf_energy, max_iter=100,
e_convergence=1e-8,r_convergence=1e-8,diis_size=None, diis_start_cycle=4):
# initialize diis if diis_size is not None
# else normal scf iterate
if diis_size is not None:
from diis import DIIS
diis_update = DIIS(diis_size, start_iter=diis_start_cycle)
t1_dim = t1.size
old_vec = np.hstack((t1.flatten(), t2.flatten()))
fock_e_ai = np.reciprocal(e_ai)
fock_e_abij = np.reciprocal(e_abij)
old_energy = ccsd_energy(t1, t2, fock, g, o, v)
print("")
print(" ==> CCSD amplitude equations <==")
print("")
print(" Iter Energy |dE| |dT|")
for idx in range(max_iter):
residual_singles = singles_residual(t1, t2, fock, g, o, v)
residual_doubles = doubles_residual(t1, t2, fock, g, o, v)
res_norm = np.linalg.norm(residual_singles) + np.linalg.norm(residual_doubles)
singles_res = residual_singles + fock_e_ai * t1
doubles_res = residual_doubles + fock_e_abij * t2
new_singles = singles_res * e_ai
new_doubles = doubles_res * e_abij
# diis update
if diis_size is not None:
vectorized_iterate = np.hstack(
(new_singles.flatten(), new_doubles.flatten()))
error_vec = old_vec - vectorized_iterate
new_vectorized_iterate = diis_update.compute_new_vec(vectorized_iterate,
error_vec)
new_singles = new_vectorized_iterate[:t1_dim].reshape(t1.shape)
new_doubles = new_vectorized_iterate[t1_dim:].reshape(t2.shape)
old_vec = new_vectorized_iterate
current_energy = ccsd_energy(new_singles, new_doubles, fock, g, o, v)
delta_e = np.abs(old_energy - current_energy)
print(" {: 5d} {: 20.12f} {: 20.12f} {: 20.12f}".format(idx, current_energy - hf_energy, delta_e, res_norm))
if delta_e < e_convergence and res_norm < r_convergence:
# assign t1 and t2 variables for future use before breaking
t1 = new_singles
t2 = new_doubles
break
else:
# assign t1 and t2 and old_energy for next iteration
t1 = new_singles
t2 = new_doubles
old_energy = current_energy
else:
raise ValueError("CCSD iterations did not converge")
return t1, t2
| [
"numpy.abs",
"numpy.reciprocal",
"numpy.einsum",
"numpy.linalg.norm",
"diis.DIIS"
] | [((13236, 13255), 'numpy.reciprocal', 'np.reciprocal', (['e_ai'], {}), '(e_ai)\n', (13249, 13255), True, 'import numpy as np\n'), ((13274, 13295), 'numpy.reciprocal', 'np.reciprocal', (['e_abij'], {}), '(e_abij)\n', (13287, 13295), True, 'import numpy as np\n'), ((959, 980), 'numpy.einsum', 'einsum', (['"""ii"""', 'f[o, o]'], {}), "('ii', f[o, o])\n", (965, 980), False, 'from numpy import einsum\n'), ((1051, 1079), 'numpy.einsum', 'einsum', (['"""ia,ai"""', 'f[o, v]', 't1'], {}), "('ia,ai', f[o, v], t1)\n", (1057, 1079), False, 'from numpy import einsum\n'), ((1146, 1175), 'numpy.einsum', 'einsum', (['"""jiji"""', 'g[o, o, o, o]'], {}), "('jiji', g[o, o, o, o])\n", (1152, 1175), False, 'from numpy import einsum\n'), ((1254, 1292), 'numpy.einsum', 'einsum', (['"""jiab,abji"""', 'g[o, o, v, v]', 't2'], {}), "('jiab,abji', g[o, o, v, v], t2)\n", (1260, 1292), False, 'from numpy import einsum\n'), ((1375, 1464), 'numpy.einsum', 'einsum', (['"""jiab,ai,bj"""', 'g[o, o, v, v]', 't1', 't1'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('jiab,ai,bj', g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1),\n (0, 1)])\n", (1381, 1464), False, 'from numpy import einsum\n'), ((1638, 1663), 'numpy.einsum', 'einsum', (['"""em->em"""', 'f[v, o]'], {}), "('em->em', f[v, o])\n", (1644, 1663), False, 'from numpy import einsum\n'), ((1739, 1771), 'numpy.einsum', 'einsum', (['"""im,ei->em"""', 'f[o, o]', 't1'], {}), "('im,ei->em', f[o, o], t1)\n", (1745, 1771), False, 'from numpy import einsum\n'), ((1847, 1879), 'numpy.einsum', 'einsum', (['"""ea,am->em"""', 'f[v, v]', 't1'], {}), "('ea,am->em', f[v, v], t1)\n", (1853, 1879), False, 'from numpy import einsum\n'), ((1959, 1993), 'numpy.einsum', 'einsum', (['"""ia,aemi->em"""', 'f[o, v]', 't2'], {}), "('ia,aemi->em', f[o, v], t2)\n", (1965, 1993), False, 'from numpy import einsum\n'), ((2077, 2162), 'numpy.einsum', 'einsum', (['"""ia,am,ei->em"""', 'f[o, v]', 't1', 't1'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('ia,am,ei->em', f[o, v], t1, t1, optimize=['einsum_path', (0, 1), (0,\n 1)])\n", (2083, 2162), False, 'from numpy import einsum\n'), ((2238, 2278), 'numpy.einsum', 'einsum', (['"""ieam,ai->em"""', 'g[o, v, v, o]', 't1'], {}), "('ieam,ai->em', g[o, v, v, o], t1)\n", (2244, 2278), False, 'from numpy import einsum\n'), ((2362, 2404), 'numpy.einsum', 'einsum', (['"""jiam,aeji->em"""', 'g[o, o, v, o]', 't2'], {}), "('jiam,aeji->em', g[o, o, v, o], t2)\n", (2368, 2404), False, 'from numpy import einsum\n'), ((2488, 2530), 'numpy.einsum', 'einsum', (['"""ieab,abmi->em"""', 'g[o, v, v, v]', 't2'], {}), "('ieab,abmi->em', g[o, v, v, v], t2)\n", (2494, 2530), False, 'from numpy import einsum\n'), ((2622, 2717), 'numpy.einsum', 'einsum', (['"""jiab,ai,bemj->em"""', 'g[o, o, v, v]', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('jiab,ai,bemj->em', g[o, o, v, v], t1, t2, optimize=['einsum_path',\n (0, 1), (0, 1)])\n", (2628, 2717), False, 'from numpy import einsum\n'), ((2805, 2900), 'numpy.einsum', 'einsum', (['"""jiab,am,beji->em"""', 'g[o, o, v, v]', 't1', 't2'], {'optimize': "['einsum_path', (0, 2), (0, 1)]"}), "('jiab,am,beji->em', g[o, o, v, v], t1, t2, optimize=['einsum_path',\n (0, 2), (0, 1)])\n", (2811, 2900), False, 'from numpy import einsum\n'), ((2988, 3083), 'numpy.einsum', 'einsum', (['"""jiab,ei,abmj->em"""', 'g[o, o, v, v]', 't1', 't2'], {'optimize': "['einsum_path', (0, 2), (0, 1)]"}), "('jiab,ei,abmj->em', g[o, o, v, v], t1, t2, optimize=['einsum_path',\n (0, 2), (0, 1)])\n", (2994, 3083), False, 'from numpy import einsum\n'), ((3167, 3260), 'numpy.einsum', 'einsum', (['"""jiam,ai,ej->em"""', 'g[o, o, v, o]', 't1', 't1'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('jiam,ai,ej->em', g[o, o, v, o], t1, t1, optimize=['einsum_path', (0,\n 1), (0, 1)])\n", (3173, 3260), False, 'from numpy import einsum\n'), ((3344, 3437), 'numpy.einsum', 'einsum', (['"""ieab,ai,bm->em"""', 'g[o, v, v, v]', 't1', 't1'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('ieab,ai,bm->em', g[o, v, v, v], t1, t1, optimize=['einsum_path', (0,\n 1), (0, 1)])\n", (3350, 3437), False, 'from numpy import einsum\n'), ((3529, 3638), 'numpy.einsum', 'einsum', (['"""jiab,ai,bm,ej->em"""', 'g[o, o, v, v]', 't1', 't1', 't1'], {'optimize': "['einsum_path', (0, 1), (0, 2), (0, 1)]"}), "('jiab,ai,bm,ej->em', g[o, o, v, v], t1, t1, t1, optimize=[\n 'einsum_path', (0, 1), (0, 2), (0, 1)])\n", (3535, 3638), False, 'from numpy import einsum\n'), ((3851, 3887), 'numpy.einsum', 'einsum', (['"""in,efmi->efmn"""', 'f[o, o]', 't2'], {}), "('in,efmi->efmn', f[o, o], t2)\n", (3857, 3887), False, 'from numpy import einsum\n'), ((4097, 4133), 'numpy.einsum', 'einsum', (['"""ea,afmn->efmn"""', 'f[v, v]', 't2'], {}), "('ea,afmn->efmn', f[v, v], t2)\n", (4103, 4133), False, 'from numpy import einsum\n'), ((4352, 4441), 'numpy.einsum', 'einsum', (['"""ia,an,efmi->efmn"""', 'f[o, v]', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('ia,an,efmi->efmn', f[o, v], t1, t2, optimize=['einsum_path', (0, 1),\n (0, 1)])\n", (4358, 4441), False, 'from numpy import einsum\n'), ((4656, 4745), 'numpy.einsum', 'einsum', (['"""ia,ei,afmn->efmn"""', 'f[o, v]', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('ia,ei,afmn->efmn', f[o, v], t1, t2, optimize=['einsum_path', (0, 1),\n (0, 1)])\n", (4662, 4745), False, 'from numpy import einsum\n'), ((4927, 4962), 'numpy.einsum', 'einsum', (['"""efmn->efmn"""', 'g[v, v, o, o]'], {}), "('efmn->efmn', g[v, v, o, o])\n", (4933, 4962), False, 'from numpy import einsum\n'), ((5059, 5101), 'numpy.einsum', 'einsum', (['"""iemn,fi->efmn"""', 'g[o, v, o, o]', 't1'], {}), "('iemn,fi->efmn', g[o, v, o, o], t1)\n", (5065, 5101), False, 'from numpy import einsum\n'), ((5312, 5354), 'numpy.einsum', 'einsum', (['"""efan,am->efmn"""', 'g[v, v, v, o]', 't1'], {}), "('efan,am->efmn', g[v, v, v, o], t1)\n", (5318, 5354), False, 'from numpy import einsum\n'), ((5552, 5596), 'numpy.einsum', 'einsum', (['"""jimn,efji->efmn"""', 'g[o, o, o, o]', 't2'], {}), "('jimn,efji->efmn', g[o, o, o, o], t2)\n", (5558, 5596), False, 'from numpy import einsum\n'), ((5704, 5748), 'numpy.einsum', 'einsum', (['"""iean,afmi->efmn"""', 'g[o, v, v, o]', 't2'], {}), "('iean,afmi->efmn', g[o, v, v, o], t2)\n", (5710, 5748), False, 'from numpy import einsum\n'), ((6066, 6110), 'numpy.einsum', 'einsum', (['"""efab,abmn->efmn"""', 'g[v, v, v, v]', 't2'], {}), "('efab,abmn->efmn', g[v, v, v, v], t2)\n", (6072, 6110), False, 'from numpy import einsum\n'), ((6219, 6316), 'numpy.einsum', 'einsum', (['"""jian,ai,efmj->efmn"""', 'g[o, o, v, o]', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('jian,ai,efmj->efmn', g[o, o, v, o], t1, t2, optimize=['einsum_path',\n (0, 1), (0, 1)])\n", (6225, 6316), False, 'from numpy import einsum\n'), ((6535, 6632), 'numpy.einsum', 'einsum', (['"""jian,am,efji->efmn"""', 'g[o, o, v, o]', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('jian,am,efji->efmn', g[o, o, v, o], t1, t2, optimize=['einsum_path',\n (0, 1), (0, 1)])\n", (6541, 6632), False, 'from numpy import einsum\n'), ((6858, 6955), 'numpy.einsum', 'einsum', (['"""jian,ei,afmj->efmn"""', 'g[o, o, v, o]', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('jian,ei,afmj->efmn', g[o, o, v, o], t1, t2, optimize=['einsum_path',\n (0, 1), (0, 1)])\n", (6864, 6955), False, 'from numpy import einsum\n'), ((7294, 7391), 'numpy.einsum', 'einsum', (['"""ieab,ai,bfmn->efmn"""', 'g[o, v, v, v]', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('ieab,ai,bfmn->efmn', g[o, v, v, v], t1, t2, optimize=['einsum_path',\n (0, 1), (0, 1)])\n", (7300, 7391), False, 'from numpy import einsum\n'), ((7617, 7714), 'numpy.einsum', 'einsum', (['"""ieab,an,bfmi->efmn"""', 'g[o, v, v, v]', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('ieab,an,bfmi->efmn', g[o, v, v, v], t1, t2, optimize=['einsum_path',\n (0, 1), (0, 1)])\n", (7623, 7714), False, 'from numpy import einsum\n'), ((8053, 8150), 'numpy.einsum', 'einsum', (['"""ieab,fi,abmn->efmn"""', 'g[o, v, v, v]', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('ieab,fi,abmn->efmn', g[o, v, v, v], t1, t2, optimize=['einsum_path',\n (0, 1), (0, 1)])\n", (8059, 8150), False, 'from numpy import einsum\n'), ((8348, 8443), 'numpy.einsum', 'einsum', (['"""jimn,ei,fj->efmn"""', 'g[o, o, o, o]', 't1', 't1'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('jimn,ei,fj->efmn', g[o, o, o, o], t1, t1, optimize=['einsum_path',\n (0, 1), (0, 1)])\n", (8354, 8443), False, 'from numpy import einsum\n'), ((8551, 8646), 'numpy.einsum', 'einsum', (['"""iean,am,fi->efmn"""', 'g[o, v, v, o]', 't1', 't1'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('iean,am,fi->efmn', g[o, v, v, o], t1, t1, optimize=['einsum_path',\n (0, 1), (0, 1)])\n", (8557, 8646), False, 'from numpy import einsum\n'), ((8964, 9059), 'numpy.einsum', 'einsum', (['"""efab,an,bm->efmn"""', 'g[v, v, v, v]', 't1', 't1'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('efab,an,bm->efmn', g[v, v, v, v], t1, t1, optimize=['einsum_path',\n (0, 1), (0, 1)])\n", (8970, 9059), False, 'from numpy import einsum\n'), ((9168, 9268), 'numpy.einsum', 'einsum', (['"""jiab,abni,efmj->efmn"""', 'g[o, o, v, v]', 't2', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('jiab,abni,efmj->efmn', g[o, o, v, v], t2, t2, optimize=[\n 'einsum_path', (0, 1), (0, 1)])\n", (9174, 9268), False, 'from numpy import einsum\n'), ((9473, 9573), 'numpy.einsum', 'einsum', (['"""jiab,abmn,efji->efmn"""', 'g[o, o, v, v]', 't2', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('jiab,abmn,efji->efmn', g[o, o, v, v], t2, t2, optimize=[\n 'einsum_path', (0, 1), (0, 1)])\n", (9479, 9573), False, 'from numpy import einsum\n'), ((9664, 9764), 'numpy.einsum', 'einsum', (['"""jiab,aeji,bfmn->efmn"""', 'g[o, o, v, v]', 't2', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('jiab,aeji,bfmn->efmn', g[o, o, v, v], t2, t2, optimize=[\n 'einsum_path', (0, 1), (0, 1)])\n", (9670, 9764), False, 'from numpy import einsum\n'), ((9872, 9972), 'numpy.einsum', 'einsum', (['"""jiab,aeni,bfmj->efmn"""', 'g[o, o, v, v]', 't2', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 1)]"}), "('jiab,aeni,bfmj->efmn', g[o, o, v, v], t2, t2, optimize=[\n 'einsum_path', (0, 1), (0, 1)])\n", (9878, 9972), False, 'from numpy import einsum\n'), ((10177, 10277), 'numpy.einsum', 'einsum', (['"""jiab,aemn,bfji->efmn"""', 'g[o, o, v, v]', 't2', 't2'], {'optimize': "['einsum_path', (0, 2), (0, 1)]"}), "('jiab,aemn,bfji->efmn', g[o, o, v, v], t2, t2, optimize=[\n 'einsum_path', (0, 2), (0, 1)])\n", (10183, 10277), False, 'from numpy import einsum\n'), ((10389, 10502), 'numpy.einsum', 'einsum', (['"""jiab,ai,bn,efmj->efmn"""', 'g[o, o, v, v]', 't1', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 2), (0, 1)]"}), "('jiab,ai,bn,efmj->efmn', g[o, o, v, v], t1, t1, t2, optimize=[\n 'einsum_path', (0, 1), (0, 2), (0, 1)])\n", (10395, 10502), False, 'from numpy import einsum\n'), ((10728, 10841), 'numpy.einsum', 'einsum', (['"""jiab,ai,ej,bfmn->efmn"""', 'g[o, o, v, v]', 't1', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 2), (0, 1)]"}), "('jiab,ai,ej,bfmn->efmn', g[o, o, v, v], t1, t1, t2, optimize=[\n 'einsum_path', (0, 1), (0, 2), (0, 1)])\n", (10734, 10841), False, 'from numpy import einsum\n'), ((11050, 11163), 'numpy.einsum', 'einsum', (['"""jiab,an,bm,efji->efmn"""', 'g[o, o, v, v]', 't1', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 2), (0, 1)]"}), "('jiab,an,bm,efji->efmn', g[o, o, v, v], t1, t1, t2, optimize=[\n 'einsum_path', (0, 1), (0, 2), (0, 1)])\n", (11056, 11163), False, 'from numpy import einsum\n'), ((11282, 11395), 'numpy.einsum', 'einsum', (['"""jiab,an,ei,bfmj->efmn"""', 'g[o, o, v, v]', 't1', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 2), (0, 1)]"}), "('jiab,an,ei,bfmj->efmn', g[o, o, v, v], t1, t1, t2, optimize=[\n 'einsum_path', (0, 1), (0, 2), (0, 1)])\n", (11288, 11395), False, 'from numpy import einsum\n'), ((11724, 11837), 'numpy.einsum', 'einsum', (['"""jiab,ei,fj,abmn->efmn"""', 'g[o, o, v, v]', 't1', 't1', 't2'], {'optimize': "['einsum_path', (0, 1), (0, 2), (0, 1)]"}), "('jiab,ei,fj,abmn->efmn', g[o, o, v, v], t1, t1, t2, optimize=[\n 'einsum_path', (0, 1), (0, 2), (0, 1)])\n", (11730, 11837), False, 'from numpy import einsum\n'), ((11945, 12056), 'numpy.einsum', 'einsum', (['"""jian,am,ei,fj->efmn"""', 'g[o, o, v, o]', 't1', 't1', 't1'], {'optimize': "['einsum_path', (0, 1), (0, 2), (0, 1)]"}), "('jian,am,ei,fj->efmn', g[o, o, v, o], t1, t1, t1, optimize=[\n 'einsum_path', (0, 1), (0, 2), (0, 1)])\n", (11951, 12056), False, 'from numpy import einsum\n'), ((12278, 12389), 'numpy.einsum', 'einsum', (['"""ieab,an,bm,fi->efmn"""', 'g[o, v, v, v]', 't1', 't1', 't1'], {'optimize': "['einsum_path', (0, 1), (0, 2), (0, 1)]"}), "('ieab,an,bm,fi->efmn', g[o, v, v, v], t1, t1, t1, optimize=[\n 'einsum_path', (0, 1), (0, 2), (0, 1)])\n", (12284, 12389), False, 'from numpy import einsum\n'), ((12602, 12728), 'numpy.einsum', 'einsum', (['"""jiab,an,bm,ei,fj->efmn"""', 'g[o, o, v, v]', 't1', 't1', 't1', 't1'], {'optimize': "['einsum_path', (0, 1), (0, 3), (0, 2), (0, 1)]"}), "('jiab,an,bm,ei,fj->efmn', g[o, o, v, v], t1, t1, t1, t1, optimize=[\n 'einsum_path', (0, 1), (0, 3), (0, 2), (0, 1)])\n", (12608, 12728), False, 'from numpy import einsum\n'), ((13091, 13135), 'diis.DIIS', 'DIIS', (['diis_size'], {'start_iter': 'diis_start_cycle'}), '(diis_size, start_iter=diis_start_cycle)\n', (13095, 13135), False, 'from diis import DIIS\n'), ((14641, 14676), 'numpy.abs', 'np.abs', (['(old_energy - current_energy)'], {}), '(old_energy - current_energy)\n', (14647, 14676), True, 'import numpy as np\n'), ((3954, 3999), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (3960, 3999), False, 'from numpy import einsum\n'), ((4201, 4246), 'numpy.einsum', 'einsum', (['"""efmn->femn"""', 'contracted_intermediate'], {}), "('efmn->femn', contracted_intermediate)\n", (4207, 4246), False, 'from numpy import einsum\n'), ((4505, 4550), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (4511, 4550), False, 'from numpy import einsum\n'), ((4809, 4854), 'numpy.einsum', 'einsum', (['"""efmn->femn"""', 'contracted_intermediate'], {}), "('efmn->femn', contracted_intermediate)\n", (4815, 4854), False, 'from numpy import einsum\n'), ((5169, 5214), 'numpy.einsum', 'einsum', (['"""efmn->femn"""', 'contracted_intermediate'], {}), "('efmn->femn', contracted_intermediate)\n", (5175, 5214), False, 'from numpy import einsum\n'), ((5422, 5467), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (5428, 5467), False, 'from numpy import einsum\n'), ((5936, 5981), 'numpy.einsum', 'einsum', (['"""efmn->fenm"""', 'contracted_intermediate'], {}), "('efmn->fenm', contracted_intermediate)\n", (5942, 5981), False, 'from numpy import einsum\n'), ((6380, 6425), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (6386, 6425), False, 'from numpy import einsum\n'), ((6696, 6741), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (6702, 6741), False, 'from numpy import einsum\n'), ((7139, 7184), 'numpy.einsum', 'einsum', (['"""efmn->fenm"""', 'contracted_intermediate'], {}), "('efmn->fenm', contracted_intermediate)\n", (7145, 7184), False, 'from numpy import einsum\n'), ((7455, 7500), 'numpy.einsum', 'einsum', (['"""efmn->femn"""', 'contracted_intermediate'], {}), "('efmn->femn', contracted_intermediate)\n", (7461, 7500), False, 'from numpy import einsum\n'), ((7898, 7943), 'numpy.einsum', 'einsum', (['"""efmn->fenm"""', 'contracted_intermediate'], {}), "('efmn->fenm', contracted_intermediate)\n", (7904, 7943), False, 'from numpy import einsum\n'), ((8214, 8259), 'numpy.einsum', 'einsum', (['"""efmn->femn"""', 'contracted_intermediate'], {}), "('efmn->femn', contracted_intermediate)\n", (8220, 8259), False, 'from numpy import einsum\n'), ((8830, 8875), 'numpy.einsum', 'einsum', (['"""efmn->fenm"""', 'contracted_intermediate'], {}), "('efmn->fenm', contracted_intermediate)\n", (8836, 8875), False, 'from numpy import einsum\n'), ((9331, 9376), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (9337, 9376), False, 'from numpy import einsum\n'), ((10035, 10080), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (10041, 10080), False, 'from numpy import einsum\n'), ((10565, 10610), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (10571, 10610), False, 'from numpy import einsum\n'), ((10904, 10949), 'numpy.einsum', 'einsum', (['"""efmn->femn"""', 'contracted_intermediate'], {}), "('efmn->femn', contracted_intermediate)\n", (10910, 10949), False, 'from numpy import einsum\n'), ((11578, 11623), 'numpy.einsum', 'einsum', (['"""efmn->fenm"""', 'contracted_intermediate'], {}), "('efmn->fenm', contracted_intermediate)\n", (11584, 11623), False, 'from numpy import einsum\n'), ((12119, 12164), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (12125, 12164), False, 'from numpy import einsum\n'), ((12452, 12497), 'numpy.einsum', 'einsum', (['"""efmn->femn"""', 'contracted_intermediate'], {}), "('efmn->femn', contracted_intermediate)\n", (12458, 12497), False, 'from numpy import einsum\n'), ((13700, 13732), 'numpy.linalg.norm', 'np.linalg.norm', (['residual_singles'], {}), '(residual_singles)\n', (13714, 13732), True, 'import numpy as np\n'), ((13735, 13767), 'numpy.linalg.norm', 'np.linalg.norm', (['residual_doubles'], {}), '(residual_doubles)\n', (13749, 13767), True, 'import numpy as np\n'), ((5876, 5921), 'numpy.einsum', 'einsum', (['"""efmn->femn"""', 'contracted_intermediate'], {}), "('efmn->femn', contracted_intermediate)\n", (5882, 5921), False, 'from numpy import einsum\n'), ((7079, 7124), 'numpy.einsum', 'einsum', (['"""efmn->femn"""', 'contracted_intermediate'], {}), "('efmn->femn', contracted_intermediate)\n", (7085, 7124), False, 'from numpy import einsum\n'), ((7838, 7883), 'numpy.einsum', 'einsum', (['"""efmn->femn"""', 'contracted_intermediate'], {}), "('efmn->femn', contracted_intermediate)\n", (7844, 7883), False, 'from numpy import einsum\n'), ((8770, 8815), 'numpy.einsum', 'einsum', (['"""efmn->femn"""', 'contracted_intermediate'], {}), "('efmn->femn', contracted_intermediate)\n", (8776, 8815), False, 'from numpy import einsum\n'), ((11518, 11563), 'numpy.einsum', 'einsum', (['"""efmn->femn"""', 'contracted_intermediate'], {}), "('efmn->femn', contracted_intermediate)\n", (11524, 11563), False, 'from numpy import einsum\n'), ((5816, 5861), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (5822, 5861), False, 'from numpy import einsum\n'), ((7019, 7064), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (7025, 7064), False, 'from numpy import einsum\n'), ((7778, 7823), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (7784, 7823), False, 'from numpy import einsum\n'), ((8710, 8755), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (8716, 8755), False, 'from numpy import einsum\n'), ((11458, 11503), 'numpy.einsum', 'einsum', (['"""efmn->efnm"""', 'contracted_intermediate'], {}), "('efmn->efnm', contracted_intermediate)\n", (11464, 11503), False, 'from numpy import einsum\n')] |
"""DNS Authenticator for deSEC."""
import json
import logging
import time
import requests
import zope.interface
from certbot import errors
from certbot import interfaces
from certbot.plugins import dns_common
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
@zope.interface.implementer(interfaces.IAuthenticator) # needed for compatibility with older certbots, see #13
@zope.interface.provider(interfaces.IPluginFactory) # needed for compatibility with older certbots, see #13
class Authenticator(dns_common.DNSAuthenticator):
"""DNS Authenticator for deSEC
This Authenticator uses the deSEC REST API to fulfill a dns-01 challenge.
"""
description = "Obtain certificates using a DNS TXT record (if you are using deSEC.io for DNS)."
DEFAULT_ENDPOINT = "https://desec.io/api/v1"
def __init__(self, *args, **kwargs):
super(Authenticator, self).__init__(*args, **kwargs)
self.credentials = None
@classmethod
def add_parser_arguments(cls, add): # pylint: disable=arguments-differ
super(Authenticator, cls).add_parser_arguments(
add, default_propagation_seconds=80 # TODO decrease after deSEC fixed their NOTIFY problem
)
add("credentials", help="deSEC credentials INI file.")
def more_info(self): # pylint: disable=missing-docstring,no-self-use
return (
"This plugin configures a DNS TXT record to respond to a dns-01 challenge using "
"the deSEC Remote REST API."
)
def _setup_credentials(self):
self.credentials = self._configure_credentials(
key="credentials",
label="deSEC credentials INI file",
required_variables={
"token": "Access token for deSEC API.",
},
)
def _desec_work(self, domain, validation_name, validation, set_operator):
client = self._get_desec_client()
zone = client.get_authoritative_zone(validation_name)
subname = validation_name.rsplit(zone['name'], 1)[0].rstrip('.')
records = client.get_txt_rrset(zone, subname)
logger.debug(f"Current TXT records: {records}")
records = set_operator(records, {f'"{validation}"'})
logger.debug(f"Setting TXT records: {records}")
client.set_txt_rrset(zone, subname, records)
def _perform(self, domain, validation_name, validation):
logger.debug(f"Authenticator._perform: {domain}, {validation_name}, {validation}")
self._desec_work(domain, validation_name, validation, set.union)
def _cleanup(self, domain, validation_name, validation):
logger.debug(f"Authenticator._cleanup: {domain}, {validation_name}, {validation}")
self._desec_work(domain, validation_name, validation, set.difference)
def _get_desec_client(self):
return _DesecConfigClient(
self.credentials.conf("endpoint") or self.DEFAULT_ENDPOINT,
self.credentials.conf("token"),
)
class _DesecConfigClient(object):
"""
Encapsulates all communication with the deSEC REST API.
"""
def __init__(self, endpoint, token):
logger.debug("creating _DesecConfigClient")
self.endpoint = endpoint.rstrip('/')
self.token = token
self.session = requests.Session()
self.session.headers["Authorization"] = f"Token {token}"
self.session.headers["Content-Type"] = "application/json"
@staticmethod
def desec_request(method, **kwargs):
for _ in range(3):
response: requests.Response = method(**kwargs)
if response.status_code == 429 and 'Retry-After' in response.headers:
try:
cooldown = int(response.headers['Retry-After'])
except ValueError:
return response
logger.debug(f"deSEC API limit reached. Retrying request after {cooldown}s.")
time.sleep(cooldown)
else:
return response
return response
def desec_get(self, **kwargs):
return self.desec_request(self.session.get, **kwargs)
def desec_put(self, **kwargs):
return self.desec_request(self.session.put, **kwargs)
def get_authoritative_zone(self, qname):
response = self.desec_get(url=f"{self.endpoint}/domains/?owns_qname={qname}")
self._check_response_status(response)
data = self._response_json(response)
try:
return data[0]
except IndexError:
raise errors.PluginError(f"Could not find suitable domain in your account (did you create it?): {qname}")
def get_txt_rrset(self, zone, subname):
domain = zone['name']
response = self.desec_get(
url=f"{self.endpoint}/domains/{domain}/rrsets/{subname}/TXT/",
)
if response.status_code == 404:
return set()
self._check_response_status(response, domain=domain)
return set(self._response_json(response).get('records', set()))
def set_txt_rrset(self, zone, subname, records: set):
domain = zone['name']
response = self.desec_put(
url=f"{self.endpoint}/domains/{domain}/rrsets/",
data=json.dumps([
{"subname": subname, "type": "TXT", "ttl": zone['minimum_ttl'], "records": list(records)},
]),
)
return self._check_response_status(response, domain=domain)
def _check_response_status(self, response, **kwargs):
if 200 <= response.status_code <= 299:
return
elif response.status_code in [401, 403]:
raise errors.PluginError(f"Could not authenticate against deSEC API: {response.content}")
elif response.status_code == 404:
raise errors.PluginError(f"Not found ({kwargs}): {response.content}")
elif response.status_code == 429:
raise errors.PluginError(f"deSEC throttled your request even after we waited the prescribed cool-down "
f"time. Did you use the API in parallel? {response.content}")
elif response.status_code >= 500:
raise errors.PluginError(f"deSEC API server error (status {response.status_code}): {response.content}")
else:
raise errors.PluginError(f"Unknown error when talking to deSEC (status {response.status_code}: "
f"Request was on '{response.request.url}' with payload {response.request.body}. "
f"Response was '{response.content}'.")
def _response_json(self, response):
try:
return response.json()
except json.JSONDecodeError:
raise errors.PluginError(f"deSEC API sent non-JSON response (status {response.status_code}): "
f"{response.content}")
| [
"logging.getLogger",
"certbot.errors.PluginError",
"requests.Session",
"time.sleep"
] | [((220, 247), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (237, 247), False, 'import logging\n'), ((3296, 3314), 'requests.Session', 'requests.Session', ([], {}), '()\n', (3312, 3314), False, 'import requests\n'), ((3944, 3964), 'time.sleep', 'time.sleep', (['cooldown'], {}), '(cooldown)\n', (3954, 3964), False, 'import time\n'), ((4543, 4652), 'certbot.errors.PluginError', 'errors.PluginError', (['f"""Could not find suitable domain in your account (did you create it?): {qname}"""'], {}), "(\n f'Could not find suitable domain in your account (did you create it?): {qname}'\n )\n", (4561, 4652), False, 'from certbot import errors\n'), ((5646, 5734), 'certbot.errors.PluginError', 'errors.PluginError', (['f"""Could not authenticate against deSEC API: {response.content}"""'], {}), "(\n f'Could not authenticate against deSEC API: {response.content}')\n", (5664, 5734), False, 'from certbot import errors\n'), ((6731, 6848), 'certbot.errors.PluginError', 'errors.PluginError', (['f"""deSEC API sent non-JSON response (status {response.status_code}): {response.content}"""'], {}), "(\n f'deSEC API sent non-JSON response (status {response.status_code}): {response.content}'\n )\n", (6749, 6848), False, 'from certbot import errors\n'), ((5790, 5853), 'certbot.errors.PluginError', 'errors.PluginError', (['f"""Not found ({kwargs}): {response.content}"""'], {}), "(f'Not found ({kwargs}): {response.content}')\n", (5808, 5853), False, 'from certbot import errors\n'), ((5914, 6079), 'certbot.errors.PluginError', 'errors.PluginError', (['f"""deSEC throttled your request even after we waited the prescribed cool-down time. Did you use the API in parallel? {response.content}"""'], {}), "(\n f'deSEC throttled your request even after we waited the prescribed cool-down time. Did you use the API in parallel? {response.content}'\n )\n", (5932, 6079), False, 'from certbot import errors\n'), ((6171, 6278), 'certbot.errors.PluginError', 'errors.PluginError', (['f"""deSEC API server error (status {response.status_code}): {response.content}"""'], {}), "(\n f'deSEC API server error (status {response.status_code}): {response.content}'\n )\n", (6189, 6278), False, 'from certbot import errors\n'), ((6301, 6514), 'certbot.errors.PluginError', 'errors.PluginError', (['f"""Unknown error when talking to deSEC (status {response.status_code}: Request was on \'{response.request.url}\' with payload {response.request.body}. Response was \'{response.content}\'."""'], {}), '(\n f"Unknown error when talking to deSEC (status {response.status_code}: Request was on \'{response.request.url}\' with payload {response.request.body}. Response was \'{response.content}\'."\n )\n', (6319, 6514), False, 'from certbot import errors\n')] |
# -*- coding: utf-8 -*-
#
# fumi deployment tool
# https://github.com/rmed/fumi
#
# The MIT License (MIT)
#
# Copyright (c) 2016 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Code for the ``Deployer`` class, which acts as proxy for configurations."""
import gettext
import types
from fumi import messages as m
from fumi import deployments
from fumi.util import cprint
class Deployer(object):
"""Configuration parsed from the ``fumi.yml`` file.
Attributes:
source_type (str): Source type (e.g. 'local' or 'git'). Required.
source_path (str): Path to the source files in local machine. Required.
host (str): Host to perform the deployment in. Required.
user (str): User to use for the deployment. Required.
use_password (bool): Whether or not to use password. If set to ``False``
(default), will rely on public key authentication. Otherwise, it
will be asked for during deployment or may be configured through the
``password`` attribute.
password (str): If ``use_password`` is set to True, this attribute can
be used to specify the password used for the connection. Otherwise
it will be asked for during deployment.
deploy_path (str): Remote host path in which to deploy files. Required.
predep (list[str]): List of commands to execute before deploying.
postdep (list[str]): List of commands to execute after deploying.
host_tmp (str): In ``local`` deployments, the remote directory to use
for uploading the compressed files (defaults to ``'/tmp'``).
keep_max (int): Maximum revisions to keep in the remote server.
local_ignore (list[str]): List of files (or directories) to ignore in
``local`` deployments.
buffer_size (int): Buffer size (in bytes) for file copying in ``local``
deployments. Defaults to 1 MB.
shared_paths (list[str]): List of file and directory paths that
should be shared accross deployments. These are relative to the
root of the project and are linked to the current revision.
"""
def __init__(self, **kwargs):
# Source information
self.source_type = kwargs['source-type']
self.source_path = kwargs['source-path']
# Destination host information
self.host = kwargs['host']
self.user = kwargs['user']
self.use_password = kwargs.get('use-password', False)
self.password = kwargs.get('password')
self.deploy_path = kwargs['deploy-path']
# Pre-deployment commands
predep = kwargs.get('predep', [])
self.predep = []
for p in predep:
# Single key dicts
for k, v in p.items():
self.predep.append((k, v))
# Post-deployment commands
postdep = kwargs.get('postdep', [])
self.postdep = []
for p in postdep:
# Single key dicts
for k, v in p.items():
self.postdep.append((k, v))
# Optional information
self.host_tmp = kwargs.get('host-tmp', '/tmp')
self.keep_max = kwargs.get('keep-max')
self.local_ignore = kwargs.get('local-ignore')
self.buffer_size = int(kwargs.get('buffer-size', 1024 * 1024))
self.shared_paths = kwargs.get('shared-paths', [])
def build_deployer(config):
"""Build a Deployer object.
Arguments:
config (dict): Parsed section of the YAML configuration file.
Returns:
Boolean indicating result and ``Deployer`` instance or ``None``.
"""
try:
deployer = Deployer(**config)
except KeyError as e:
# Missing required parameter
key = e.args[0]
cprint(m.DEP_MISSING_PARAM + '\n' % key, 'red')
return False, None
# Determine deployment function to use
if deployer.source_type == 'local':
cprint(m.DEP_LOCAL)
deployer.deploy = types.MethodType(deployments.deploy_local, deployer)
elif deployer.source_type == 'git':
cprint(m.DEP_GIT)
deployer.deploy = types.MethodType(deployments.deploy_git, deployer)
else:
# Unknown deployment type
cprint(m.DEP_UNKNOWN % deployer.source_type, 'red')
return False, None
# Additional method for preparing/testing the deployment
deployer.prepare = types.MethodType(deployments.prepare, deployer)
return True, deployer
| [
"types.MethodType",
"fumi.util.cprint"
] | [((5424, 5471), 'types.MethodType', 'types.MethodType', (['deployments.prepare', 'deployer'], {}), '(deployments.prepare, deployer)\n', (5440, 5471), False, 'import types\n'), ((4964, 4983), 'fumi.util.cprint', 'cprint', (['m.DEP_LOCAL'], {}), '(m.DEP_LOCAL)\n', (4970, 4983), False, 'from fumi.util import cprint\n'), ((5010, 5062), 'types.MethodType', 'types.MethodType', (['deployments.deploy_local', 'deployer'], {}), '(deployments.deploy_local, deployer)\n', (5026, 5062), False, 'import types\n'), ((4797, 4844), 'fumi.util.cprint', 'cprint', (["(m.DEP_MISSING_PARAM + '\\n' % key)", '"""red"""'], {}), "(m.DEP_MISSING_PARAM + '\\n' % key, 'red')\n", (4803, 4844), False, 'from fumi.util import cprint\n'), ((5112, 5129), 'fumi.util.cprint', 'cprint', (['m.DEP_GIT'], {}), '(m.DEP_GIT)\n', (5118, 5129), False, 'from fumi.util import cprint\n'), ((5156, 5206), 'types.MethodType', 'types.MethodType', (['deployments.deploy_git', 'deployer'], {}), '(deployments.deploy_git, deployer)\n', (5172, 5206), False, 'import types\n'), ((5260, 5311), 'fumi.util.cprint', 'cprint', (['(m.DEP_UNKNOWN % deployer.source_type)', '"""red"""'], {}), "(m.DEP_UNKNOWN % deployer.source_type, 'red')\n", (5266, 5311), False, 'from fumi.util import cprint\n')] |
import math
import unittest
from scipy import integrate
from ..problem import Problem
from ..algorithm_genetic import NSGAII
from ..algorithm_sweep import SweepAlgorithm
from ..benchmark_functions import Booth
from ..results import Results
from ..operators import LHSGenerator
from ..surrogate_scikit import SurrogateModelScikit
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import ExtraTreesRegressor
class MyProblemCoil(Problem):
def set(self):
self.parameters = [{'name': 'x1', 'initial_value': 0.01, 'bounds': [5e-3, 50e-3]},
{'name': 'x2', 'initial_value': 0.01, 'bounds': [5e-3, 50e-3]},
{'name': 'x3', 'initial_value': 0.01, 'bounds': [5e-3, 50e-3]},
{'name': 'x4', 'initial_value': 0.01, 'bounds': [5e-3, 50e-3]},
{'name': 'x5', 'initial_value': 0.01, 'bounds': [5e-3, 50e-3]},
{'name': 'x6', 'initial_value': 0.01, 'bounds': [5e-3, 50e-3]},
{'name': 'x7', 'initial_value': 0.01, 'bounds': [5e-3, 50e-3]},
{'name': 'x8', 'initial_value': 0.01, 'bounds': [5e-3, 50e-3]},
{'name': 'x9', 'initial_value': 0.01, 'bounds': [5e-3, 50e-3]},
{'name': 'x10', 'initial_value': 0.01, 'bounds': [5e-3, 50e-3]}]
def intl22(self, R2, R, dZ, phi):
return math.sqrt(R2 ** 2 + R ** 2 - 2.0 * R2 * R * math.cos(phi) + dZ ** 2)
def intg(self, R2, R, dZ):
# div J = 0 - nonconstant current density
f = lambda phi: math.log(R2 - R * math.cos(phi) + self.intl22(R2, R, dZ, phi)) * math.cos(phi)
return integrate.quad(f, 0, 2.0 * math.pi, epsabs=1e-3, epsrel=1e-3)[0]
def inth(self, R2, R, dZ):
# div J = 0 - nonconstant current density
f = lambda phi: - math.log(dZ + self.intl22(R2, R, dZ, phi))
return integrate.quad(f, 0, 2.0 * math.pi, epsabs=1e-3, epsrel=1e-3)[0]
def integral(self, rc, zc, R, Z):
w = 0.001
h = 0.0015
R1 = rc
R2 = rc + w
Z1 = zc
Z2 = zc + h
mu0 = 4.0 * math.pi * 1e-7
Jext = 2e6
# div J = 0 - nonconstant current density
C = mu0 * Jext * w * h / (4 * math.pi * (Z2 - Z1) * math.log(R2 / R1))
# upper coil
Bru = C * (self.intg(R2, R, Z2 - Z) - self.intg(R2, R, Z1 - Z) - self.intg(R1, R, Z2 - Z) + self.intg(R1, R, Z1 - Z))
Bzu = C * (self.inth(R2, R, Z2 - Z) - self.inth(R2, R, Z1 - Z) - self.inth(R1, R, Z2 - Z) + self.inth(R1, R, Z1 - Z))
# lower coil
Brl = C * (self.intg(R2, R, -Z1 - Z) - self.intg(R2, R, -Z2 - Z) - self.intg(R1, R, -Z1 - Z) + self.intg(R1, R, -Z2 - Z))
Bzl = C * (self.inth(R2, R, -Z1 - Z) - self.inth(R2, R, -Z2 - Z) - self.inth(R1, R, -Z1 - Z) + self.inth(R1, R, -Z2 - Z))
return [Bru + Brl, Bzu + Bzl]
def integral_all(self, R, Z, x):
Br = 0.0
Bz = 0.0
for k in range(0, 9):
rc = x[k]
zc = k * 1.5e-3
B = self.integral(rc, zc, R, Z)
Br = Br + B[0]
Bz = Bz + B[1]
return [Br, Bz]
def evaluate(self, x):
pass
class MyProblemCoilOne(MyProblemCoil):
def evaluate(self, individual):
x = individual.vector
B0 = 2e-3
dxy = 0.5e-3
nx = 8
ny = 8
dx = (5e-3 - dxy) / (nx - 1)
dy = (5e-3 - dxy) / (ny - 1)
f1 = 0.0
for i in range(0, nx):
xx = dxy + i * dx
for j in range(0, ny):
yy = dxy + j * dy
[Br, Bz] = self.integral_all(xx, yy, x)
Bp1s = math.sqrt((Br - 0.0)**2 + (Bz - B0)**2)
f1 = max(f1, Bp1s)
print("value = {}, \tparams = {}".format([f1], x))
return [f1]
class MyProblemCoilMultiTwo1(MyProblemCoil):
def evaluate(self, x):
B0 = 2e-3
dxy = 0.5e-3
nx = 8
ny = 8
dx = (5e-3 - dxy) / (nx - 1)
dy = (5e-3 - dxy) / (ny - 1)
f1 = 0.0
f2 = sum(x) * 1e3
for i in range(0, nx):
xx = dxy + i * dx
for j in range(0, ny):
yy = dxy + j * dy
[Br, Bz] = self.integral_all(xx, yy, x)
Bp1s = math.sqrt((Br - 0.0)**2 + (Bz - B0)**2)
f1 = max(f1, Bp1s)
return [1e3 * f1, 1e3 * f2]
class MyProblemCoilMultiTwo2(MyProblemCoil):
def __init__(self, name):
super().__init__(name, costs=['F1', 'F2'])
def evaluate(self, individual):
x = individual.vector
B0 = 2e-3
dxy = 0.5e-3
nx = 8
ny = 8
dx = (5e-3 - dxy) / (nx - 1)
dy = (5e-3 - dxy) / (ny - 1)
f1 = 0.0
f3 = 0.0
for i in range(0, nx):
xx = dxy + i * dx
for j in range(0, ny):
yy = dxy + j * dy
[Br, Bz] = self.integral_all(xx, yy, x)
Bp1s = math.sqrt((Br - 0.0)**2 + (Bz - B0)**2)
f1 = max(f1, Bp1s)
dxsi = 0.5e-3
[Brp, Bzp] = self.integral_all(xx + dxsi, yy, x)
[Brm, Bzm] = self.integral_all(xx - dxsi, yy, x)
Bp3 = math.sqrt((Brp - Br) ** 2 + (Bzp - Bz) ** 2) + math.sqrt((Brm - Br) ** 2 + (Bzm - Bz) ** 2)
f3 = max(f3, Bp3)
return [1e3 * f1, 1e3 * f3]
class MyProblemCoilMultiThree(MyProblemCoil):
def __init__(self, name):
super().__init__(name, costs=['F1', 'F2', 'F3'])
def evaluate(self, individual):
x = individual.vector
B0 = 2e-3
dxy = 0.5e-3
nx = 8
ny = 8
dx = (5e-3 - dxy) / (nx - 1)
dy = (5e-3 - dxy) / (ny - 1)
f1 = 0.0
f2 = sum(x)*1e3
f3 = 0.0
for i in range(0, nx):
xx = dxy + i * dx
for j in range(0, ny):
yy = dxy + j * dy
[Br, Bz] = self.integral_all(xx, yy, x)
Bp1s = math.sqrt((Br - 0.0)**2 + (Bz - B0)**2)
f1 = max(f1, Bp1s)
dxsi = 0.5e-3
[Brp, Bzp] = self.integral_all(xx + dxsi, yy, x)
[Brm, Bzm] = self.integral_all(xx - dxsi, yy, x)
Bp3 = math.sqrt((Brp - Br) ** 2 + (Bzp - Bz) ** 2) + math.sqrt((Brm - Br) ** 2 + (Bzm - Bz) ** 2)
f3 = max(f3, Bp3)
return [f1, f2, f3]
class MyProblemBooth(Problem):
""" Describe simple one objective optimization problem. """
def set(self):
self.parameters = {'x_1': {'initial_value': 2.5, 'bounds': [-10, 10]},
'x_2': {'initial_value': 1.5, 'bounds': [-10, 10]}}
self.costs = [{'name': 'F'}]
def evaluate(self, x):
return [Booth.eval(x)]
class TestSimpleOptimization(unittest.TestCase):
""" Tests optimization problem."""
def xtest_local_problem_booth(self):
problem = MyProblemBooth("MyProblemBooth")
problem.surrogate = SurrogateModelScikit(problem)
#kernel = 1.0 * RationalQuadratic(length_scale=1.0, alpha=0.1)
#problem.surrogate.regressor = GaussianProcessRegressor(kernel=kernel)
#problem.surrogate.has_epsilon = True
problem.surrogate.regressor = ExtraTreesRegressor(n_estimators=10)
# problem.surrogate.regressor = DecisionTreeRegressor()
problem.surrogate.train_step = 50
problem.surrogate.score_threshold = 0.0
# sweep analysis (for training)
gen = LHSGenerator(problem.parameters)
gen.init(problem.surrogate.train_step)
algorithm_sweep = SweepAlgorithm(problem, generator=gen)
algorithm_sweep.run()
# optimization
algorithm = NLopt(problem)
algorithm.options['algorithm'] = LN_BOBYQA
algorithm.options['n_iterations'] = 200
algorithm.run()
problem.logger.info("surrogate.predict_counter: {}".format(problem.surrogate.predict_counter))
problem.logger.info("surrogate.eval_counter: {}".format(problem.surrogate.eval_counter))
# print(problem.surrogate.x_data)
# print(problem.surrogate.y_data)
results = Results(problem)
optimum = results.find_optimum('F')
self.assertAlmostEqual(optimum, 1e-6, 3)
"""
kernels = [1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-1, 10.0)),
1.0 * RationalQuadratic(length_scale=1.0, alpha=0.1),
1.0 * ExpSineSquared(length_scale=1.0, periodicity=3.0,
length_scale_bounds=(0.1, 10.0),
periodicity_bounds=(1.0, 10.0)),
ConstantKernel(0.1, (0.01, 10.0))
* (DotProduct(sigma_0=1.0, sigma_0_bounds=(0.1, 10.0)) ** 2),
1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-5, 1e5), nu=1.5)]
for kernel in kernels:
print(kernel)
problem.surrogate = SurrogateModelScikit(problem)
# problem.surrogate.regressor = GaussianProcessRegressor(kernel=kernel)
# set threshold
problem.surrogate.sigma_threshold = 0.1
problem.surrogate.train_step = 10
algorithm = NLopt(problem)
algorithm.options['algorithm'] = LN_BOBYQA
algorithm.options['n_iterations'] = 200
algorithm.run()
problem.logger.info("surrogate.predict_counter: {}".format(problem.surrogate.predict_counter))
problem.logger.info("surrogate.eval_counter: {}".format(problem.surrogate.eval_counter))
"""
def xtest_local_problem_coil_one(self):
problem = MyProblemCoilOne("MyProblemCoilOne")
# enable surrogate
problem.surrogate = SurrogateModelScikit(problem)
problem.surrogate.regressor = DecisionTreeRegressor()
problem.surrogate.train_step = 30
problem.surrogate.score_threshold = 0.0
# sweep analysis (for training)
gen = LHSGenerator(problem.parameters)
gen.init(problem.surrogate.train_step)
algorithm_sweep = SweepAlgorithm(problem, generator=gen)
algorithm_sweep.run()
# optimization
algorithm = NLopt(problem)
algorithm.options['algorithm'] = LN_BOBYQA
algorithm.options['n_iterations'] = 50
algorithm.run()
problem.logger.info("surrogate.predict_counter: {}".format(problem.surrogate.predict_counter))
problem.logger.info("surrogate.eval_counter: {}".format(problem.surrogate.eval_counter))
results = Results(problem)
optimum = results.find_optimum('F1')
self.assertAlmostEqual(optimum, 5e-5, 4)
def xtest_local_problem_coil_one_bobyqa_optimum(self):
problem = MyProblemCoilOne("MyProblemCoilOne")
# optimization
algorithm = NLopt(problem)
algorithm.options['algorithm'] = LN_BOBYQA
algorithm.options['n_iterations'] = 500
algorithm.run()
problem.logger.info("surrogate.predict_counter: {}".format(problem.surrogate.predict_counter))
problem.logger.info("surrogate.eval_counter: {}".format(problem.surrogate.eval_counter))
results = Results(problem)
optimum = results.find_optimum('F1')
print("BOBYQA = {}".format(optimum))
# Bayes = 3.846087978861188e-05
# self.assertAlmostEqual(optimum, 1e-6, 4)
def xtest_local_problem_coil_one_bayesopt_optimum(self):
problem = MyProblemCoilOne("MyProblemCoilOne")
# optimization
algorithm = BayesOptSerial(problem)
algorithm.options['n_iterations'] = 500
algorithm.run()
problem.logger.info("surrogate.predict_counter: {}".format(problem.surrogate.predict_counter))
problem.logger.info("surrogate.eval_counter: {}".format(problem.surrogate.eval_counter))
results = Results(problem)
optimum = results.find_optimum('F1')
print("Bayes = {}".format(optimum))
# Bayes = 4.347142168223674e-05
# self.assertAlmostEqual(optimum, 1e-6, 4)
def xtest_local_problem_coil_one_nsgaii_optimum(self):
problem = MyProblemCoilOne("MyProblemCoilOne")
# optimization
algorithm = NSGAII(problem)
algorithm.options['max_population_number'] = 100
algorithm.options['max_population_size'] = 50
algorithm.run()
problem.logger.info("surrogate.predict_counter: {}".format(problem.surrogate.predict_counter))
problem.logger.info("surrogate.eval_counter: {}".format(problem.surrogate.eval_counter))
results = Results(problem)
optimum = results.find_optimum('F1')
print("NSGAII = {}".format(optimum))
# NSGAII = 8.099681801799041e-06
# self.assertAlmostEqual(optimum, 1e-6, 4)
"""
class TestSimpleOptimization(unittest.TestCase):
def test_local_problem_booth(self):
problem = MyProblemCoilOne("LocalPythonProblem")
#problem = MyProblemMultiTwo2("LocalPythonProblem")
#algorithm = BayesOptSerial(problem)
#algorithm.options['verbose_level'] = 0
#algorithm.options['n_iterations'] = 100
algorithm = NLopt(problem)
algorithm.options['algorithm'] = LN_BOBYQA
algorithm.options['n_iterations'] = 200
#algorithm = NSGA_II(problem)
#algorithm.options['max_population_number'] = 80
#algorithm.options['max_population_size'] = 20
t_s = time.time()
algorithm.run()
t = time.time() - t_s
print('Elapsed time:', t)
print("surrogate_predict_counter: ", problem.surrogate_predict_counter)
print("surrogate_eval_counter: ", problem.surrogate_eval_counter)
results = Results(problem)
optimum = results.find_minimum('F1')
print(optimum)
self.assertAlmostEqual(optimum, 1e-6, 3)
"""
"""
def figures(name):
import matplotlib
matplotlib.use('Agg')
import pylab as pl
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
from matplotlib import rc
data_store = SqliteDataStore(database_file=name + ".sqlite")
problem = ProblemSqliteDataStore(data_store)
data_x = []
data_y = []
pareto_front_x = []
pareto_front_y = []
for population in problem.populations:
if len(population.individuals) > 1:
for individual in population.individuals:
data_x.append(individual.costs[0])
data_y.append(individual.costs[1])
results = GraphicalResults(problem)
pareto_front_x, pareto_front_y = results.find_pareto({'F1': Results.MINIMIZE, 'F2': Results.MINIMIZE})
pl.rcParams['figure.figsize'] = 10, 4
pl.rcParams['legend.fontsize'] = 17
pl.rcParams['text.usetex'] = True
pl.rcParams['font.size'] = 20
pl.rcParams['font.serif'] = "Times"
pl.figure()
pl.plot(data_x, data_y, 'o', color='#d0d0d0', markersize=3)
pl.plot(pareto_front_x, pareto_front_y, 'o', markersize=4, label="Pareto Front")
pl.xlim(1e-4, 8e-4)
pl.ylim(0, 1e-3)
pl.grid(True)
pl.tight_layout()
pl.legend(loc="upper right")
pl.xlabel("$F_1$")
pl.ylabel("$F_2$")
pl.savefig(name + ".pdf", dpi=200)
"""
if __name__ == '__main__':
unittest.main()
| [
"sklearn.tree.DecisionTreeRegressor",
"scipy.integrate.quad",
"sklearn.ensemble.ExtraTreesRegressor",
"math.sqrt",
"math.log",
"math.cos",
"unittest.main"
] | [((15380, 15395), 'unittest.main', 'unittest.main', ([], {}), '()\n', (15393, 15395), False, 'import unittest\n'), ((7312, 7348), 'sklearn.ensemble.ExtraTreesRegressor', 'ExtraTreesRegressor', ([], {'n_estimators': '(10)'}), '(n_estimators=10)\n', (7331, 7348), False, 'from sklearn.ensemble import ExtraTreesRegressor\n'), ((9904, 9927), 'sklearn.tree.DecisionTreeRegressor', 'DecisionTreeRegressor', ([], {}), '()\n', (9925, 9927), False, 'from sklearn.tree import DecisionTreeRegressor\n'), ((1668, 1731), 'scipy.integrate.quad', 'integrate.quad', (['f', '(0)', '(2.0 * math.pi)'], {'epsabs': '(0.001)', 'epsrel': '(0.001)'}), '(f, 0, 2.0 * math.pi, epsabs=0.001, epsrel=0.001)\n', (1682, 1731), False, 'from scipy import integrate\n'), ((1899, 1962), 'scipy.integrate.quad', 'integrate.quad', (['f', '(0)', '(2.0 * math.pi)'], {'epsabs': '(0.001)', 'epsrel': '(0.001)'}), '(f, 0, 2.0 * math.pi, epsabs=0.001, epsrel=0.001)\n', (1913, 1962), False, 'from scipy import integrate\n'), ((1639, 1652), 'math.cos', 'math.cos', (['phi'], {}), '(phi)\n', (1647, 1652), False, 'import math\n'), ((2278, 2295), 'math.log', 'math.log', (['(R2 / R1)'], {}), '(R2 / R1)\n', (2286, 2295), False, 'import math\n'), ((3691, 3734), 'math.sqrt', 'math.sqrt', (['((Br - 0.0) ** 2 + (Bz - B0) ** 2)'], {}), '((Br - 0.0) ** 2 + (Bz - B0) ** 2)\n', (3700, 3734), False, 'import math\n'), ((4319, 4362), 'math.sqrt', 'math.sqrt', (['((Br - 0.0) ** 2 + (Bz - B0) ** 2)'], {}), '((Br - 0.0) ** 2 + (Bz - B0) ** 2)\n', (4328, 4362), False, 'import math\n'), ((5015, 5058), 'math.sqrt', 'math.sqrt', (['((Br - 0.0) ** 2 + (Bz - B0) ** 2)'], {}), '((Br - 0.0) ** 2 + (Bz - B0) ** 2)\n', (5024, 5058), False, 'import math\n'), ((6051, 6094), 'math.sqrt', 'math.sqrt', (['((Br - 0.0) ** 2 + (Bz - B0) ** 2)'], {}), '((Br - 0.0) ** 2 + (Bz - B0) ** 2)\n', (6060, 6094), False, 'import math\n'), ((5273, 5317), 'math.sqrt', 'math.sqrt', (['((Brp - Br) ** 2 + (Bzp - Bz) ** 2)'], {}), '((Brp - Br) ** 2 + (Bzp - Bz) ** 2)\n', (5282, 5317), False, 'import math\n'), ((5320, 5364), 'math.sqrt', 'math.sqrt', (['((Brm - Br) ** 2 + (Bzm - Bz) ** 2)'], {}), '((Brm - Br) ** 2 + (Bzm - Bz) ** 2)\n', (5329, 5364), False, 'import math\n'), ((6309, 6353), 'math.sqrt', 'math.sqrt', (['((Brp - Br) ** 2 + (Bzp - Bz) ** 2)'], {}), '((Brp - Br) ** 2 + (Bzp - Bz) ** 2)\n', (6318, 6353), False, 'import math\n'), ((6356, 6400), 'math.sqrt', 'math.sqrt', (['((Brm - Br) ** 2 + (Bzm - Bz) ** 2)'], {}), '((Brm - Br) ** 2 + (Bzm - Bz) ** 2)\n', (6365, 6400), False, 'import math\n'), ((1443, 1456), 'math.cos', 'math.cos', (['phi'], {}), '(phi)\n', (1451, 1456), False, 'import math\n'), ((1592, 1605), 'math.cos', 'math.cos', (['phi'], {}), '(phi)\n', (1600, 1605), False, 'import math\n')] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = [
'RouteDestinationArgs',
'RouteHeaderArgs',
'RouteSourceArgs',
'UpstreamHealthchecksArgs',
'UpstreamHealthchecksActiveArgs',
'UpstreamHealthchecksActiveHealthyArgs',
'UpstreamHealthchecksActiveUnhealthyArgs',
'UpstreamHealthchecksPassiveArgs',
'UpstreamHealthchecksPassiveHealthyArgs',
'UpstreamHealthchecksPassiveUnhealthyArgs',
]
@pulumi.input_type
class RouteDestinationArgs:
def __init__(__self__, *,
ip: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None):
if ip is not None:
pulumi.set(__self__, "ip", ip)
if port is not None:
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def ip(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "ip")
@ip.setter
def ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@pulumi.input_type
class RouteHeaderArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
values: pulumi.Input[Sequence[pulumi.Input[str]]]):
"""
:param pulumi.Input[str] name: The name of the route
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the route
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
return pulumi.get(self, "values")
@values.setter
def values(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "values", value)
@pulumi.input_type
class RouteSourceArgs:
def __init__(__self__, *,
ip: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None):
if ip is not None:
pulumi.set(__self__, "ip", ip)
if port is not None:
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def ip(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "ip")
@ip.setter
def ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@pulumi.input_type
class UpstreamHealthchecksArgs:
def __init__(__self__, *,
active: Optional[pulumi.Input['UpstreamHealthchecksActiveArgs']] = None,
passive: Optional[pulumi.Input['UpstreamHealthchecksPassiveArgs']] = None):
if active is not None:
pulumi.set(__self__, "active", active)
if passive is not None:
pulumi.set(__self__, "passive", passive)
@property
@pulumi.getter
def active(self) -> Optional[pulumi.Input['UpstreamHealthchecksActiveArgs']]:
return pulumi.get(self, "active")
@active.setter
def active(self, value: Optional[pulumi.Input['UpstreamHealthchecksActiveArgs']]):
pulumi.set(self, "active", value)
@property
@pulumi.getter
def passive(self) -> Optional[pulumi.Input['UpstreamHealthchecksPassiveArgs']]:
return pulumi.get(self, "passive")
@passive.setter
def passive(self, value: Optional[pulumi.Input['UpstreamHealthchecksPassiveArgs']]):
pulumi.set(self, "passive", value)
@pulumi.input_type
class UpstreamHealthchecksActiveArgs:
def __init__(__self__, *,
concurrency: Optional[pulumi.Input[int]] = None,
healthy: Optional[pulumi.Input['UpstreamHealthchecksActiveHealthyArgs']] = None,
http_path: Optional[pulumi.Input[str]] = None,
https_sni: Optional[pulumi.Input[str]] = None,
https_verify_certificate: Optional[pulumi.Input[bool]] = None,
timeout: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None,
unhealthy: Optional[pulumi.Input['UpstreamHealthchecksActiveUnhealthyArgs']] = None):
if concurrency is not None:
pulumi.set(__self__, "concurrency", concurrency)
if healthy is not None:
pulumi.set(__self__, "healthy", healthy)
if http_path is not None:
pulumi.set(__self__, "http_path", http_path)
if https_sni is not None:
pulumi.set(__self__, "https_sni", https_sni)
if https_verify_certificate is not None:
pulumi.set(__self__, "https_verify_certificate", https_verify_certificate)
if timeout is not None:
pulumi.set(__self__, "timeout", timeout)
if type is not None:
pulumi.set(__self__, "type", type)
if unhealthy is not None:
pulumi.set(__self__, "unhealthy", unhealthy)
@property
@pulumi.getter
def concurrency(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "concurrency")
@concurrency.setter
def concurrency(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "concurrency", value)
@property
@pulumi.getter
def healthy(self) -> Optional[pulumi.Input['UpstreamHealthchecksActiveHealthyArgs']]:
return pulumi.get(self, "healthy")
@healthy.setter
def healthy(self, value: Optional[pulumi.Input['UpstreamHealthchecksActiveHealthyArgs']]):
pulumi.set(self, "healthy", value)
@property
@pulumi.getter(name="httpPath")
def http_path(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "http_path")
@http_path.setter
def http_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "http_path", value)
@property
@pulumi.getter(name="httpsSni")
def https_sni(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "https_sni")
@https_sni.setter
def https_sni(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "https_sni", value)
@property
@pulumi.getter(name="httpsVerifyCertificate")
def https_verify_certificate(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "https_verify_certificate")
@https_verify_certificate.setter
def https_verify_certificate(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "https_verify_certificate", value)
@property
@pulumi.getter
def timeout(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "timeout")
@timeout.setter
def timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def unhealthy(self) -> Optional[pulumi.Input['UpstreamHealthchecksActiveUnhealthyArgs']]:
return pulumi.get(self, "unhealthy")
@unhealthy.setter
def unhealthy(self, value: Optional[pulumi.Input['UpstreamHealthchecksActiveUnhealthyArgs']]):
pulumi.set(self, "unhealthy", value)
@pulumi.input_type
class UpstreamHealthchecksActiveHealthyArgs:
def __init__(__self__, *,
http_statuses: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,
interval: Optional[pulumi.Input[int]] = None,
successes: Optional[pulumi.Input[int]] = None):
if http_statuses is not None:
pulumi.set(__self__, "http_statuses", http_statuses)
if interval is not None:
pulumi.set(__self__, "interval", interval)
if successes is not None:
pulumi.set(__self__, "successes", successes)
@property
@pulumi.getter(name="httpStatuses")
def http_statuses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:
return pulumi.get(self, "http_statuses")
@http_statuses.setter
def http_statuses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]):
pulumi.set(self, "http_statuses", value)
@property
@pulumi.getter
def interval(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "interval")
@interval.setter
def interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "interval", value)
@property
@pulumi.getter
def successes(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "successes")
@successes.setter
def successes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "successes", value)
@pulumi.input_type
class UpstreamHealthchecksActiveUnhealthyArgs:
def __init__(__self__, *,
http_failures: Optional[pulumi.Input[int]] = None,
http_statuses: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,
interval: Optional[pulumi.Input[int]] = None,
tcp_failures: Optional[pulumi.Input[int]] = None,
timeouts: Optional[pulumi.Input[int]] = None):
if http_failures is not None:
pulumi.set(__self__, "http_failures", http_failures)
if http_statuses is not None:
pulumi.set(__self__, "http_statuses", http_statuses)
if interval is not None:
pulumi.set(__self__, "interval", interval)
if tcp_failures is not None:
pulumi.set(__self__, "tcp_failures", tcp_failures)
if timeouts is not None:
pulumi.set(__self__, "timeouts", timeouts)
@property
@pulumi.getter(name="httpFailures")
def http_failures(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "http_failures")
@http_failures.setter
def http_failures(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "http_failures", value)
@property
@pulumi.getter(name="httpStatuses")
def http_statuses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:
return pulumi.get(self, "http_statuses")
@http_statuses.setter
def http_statuses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]):
pulumi.set(self, "http_statuses", value)
@property
@pulumi.getter
def interval(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "interval")
@interval.setter
def interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "interval", value)
@property
@pulumi.getter(name="tcpFailures")
def tcp_failures(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "tcp_failures")
@tcp_failures.setter
def tcp_failures(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "tcp_failures", value)
@property
@pulumi.getter
def timeouts(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "timeouts")
@timeouts.setter
def timeouts(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeouts", value)
@pulumi.input_type
class UpstreamHealthchecksPassiveArgs:
def __init__(__self__, *,
healthy: Optional[pulumi.Input['UpstreamHealthchecksPassiveHealthyArgs']] = None,
type: Optional[pulumi.Input[str]] = None,
unhealthy: Optional[pulumi.Input['UpstreamHealthchecksPassiveUnhealthyArgs']] = None):
if healthy is not None:
pulumi.set(__self__, "healthy", healthy)
if type is not None:
pulumi.set(__self__, "type", type)
if unhealthy is not None:
pulumi.set(__self__, "unhealthy", unhealthy)
@property
@pulumi.getter
def healthy(self) -> Optional[pulumi.Input['UpstreamHealthchecksPassiveHealthyArgs']]:
return pulumi.get(self, "healthy")
@healthy.setter
def healthy(self, value: Optional[pulumi.Input['UpstreamHealthchecksPassiveHealthyArgs']]):
pulumi.set(self, "healthy", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def unhealthy(self) -> Optional[pulumi.Input['UpstreamHealthchecksPassiveUnhealthyArgs']]:
return pulumi.get(self, "unhealthy")
@unhealthy.setter
def unhealthy(self, value: Optional[pulumi.Input['UpstreamHealthchecksPassiveUnhealthyArgs']]):
pulumi.set(self, "unhealthy", value)
@pulumi.input_type
class UpstreamHealthchecksPassiveHealthyArgs:
def __init__(__self__, *,
http_statuses: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,
successes: Optional[pulumi.Input[int]] = None):
if http_statuses is not None:
pulumi.set(__self__, "http_statuses", http_statuses)
if successes is not None:
pulumi.set(__self__, "successes", successes)
@property
@pulumi.getter(name="httpStatuses")
def http_statuses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:
return pulumi.get(self, "http_statuses")
@http_statuses.setter
def http_statuses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]):
pulumi.set(self, "http_statuses", value)
@property
@pulumi.getter
def successes(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "successes")
@successes.setter
def successes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "successes", value)
@pulumi.input_type
class UpstreamHealthchecksPassiveUnhealthyArgs:
def __init__(__self__, *,
http_failures: Optional[pulumi.Input[int]] = None,
http_statuses: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,
tcp_failures: Optional[pulumi.Input[int]] = None,
timeouts: Optional[pulumi.Input[int]] = None):
if http_failures is not None:
pulumi.set(__self__, "http_failures", http_failures)
if http_statuses is not None:
pulumi.set(__self__, "http_statuses", http_statuses)
if tcp_failures is not None:
pulumi.set(__self__, "tcp_failures", tcp_failures)
if timeouts is not None:
pulumi.set(__self__, "timeouts", timeouts)
@property
@pulumi.getter(name="httpFailures")
def http_failures(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "http_failures")
@http_failures.setter
def http_failures(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "http_failures", value)
@property
@pulumi.getter(name="httpStatuses")
def http_statuses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:
return pulumi.get(self, "http_statuses")
@http_statuses.setter
def http_statuses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]):
pulumi.set(self, "http_statuses", value)
@property
@pulumi.getter(name="tcpFailures")
def tcp_failures(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "tcp_failures")
@tcp_failures.setter
def tcp_failures(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "tcp_failures", value)
@property
@pulumi.getter
def timeouts(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "timeouts")
@timeouts.setter
def timeouts(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeouts", value)
| [
"pulumi.getter",
"pulumi.set",
"pulumi.get"
] | [((6350, 6380), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""httpPath"""'}), "(name='httpPath')\n", (6363, 6380), False, 'import pulumi\n'), ((6631, 6661), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""httpsSni"""'}), "(name='httpsSni')\n", (6644, 6661), False, 'import pulumi\n'), ((6912, 6956), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""httpsVerifyCertificate"""'}), "(name='httpsVerifyCertificate')\n", (6925, 6956), False, 'import pulumi\n'), ((8715, 8749), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""httpStatuses"""'}), "(name='httpStatuses')\n", (8728, 8749), False, 'import pulumi\n'), ((10525, 10559), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""httpFailures"""'}), "(name='httpFailures')\n", (10538, 10559), False, 'import pulumi\n'), ((10830, 10864), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""httpStatuses"""'}), "(name='httpStatuses')\n", (10843, 10864), False, 'import pulumi\n'), ((11442, 11475), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""tcpFailures"""'}), "(name='tcpFailures')\n", (11455, 11475), False, 'import pulumi\n'), ((13961, 13995), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""httpStatuses"""'}), "(name='httpStatuses')\n", (13974, 13995), False, 'import pulumi\n'), ((15362, 15396), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""httpFailures"""'}), "(name='httpFailures')\n", (15375, 15396), False, 'import pulumi\n'), ((15667, 15701), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""httpStatuses"""'}), "(name='httpStatuses')\n", (15680, 15701), False, 'import pulumi\n'), ((16020, 16053), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""tcpFailures"""'}), "(name='tcpFailures')\n", (16033, 16053), False, 'import pulumi\n'), ((1154, 1176), 'pulumi.get', 'pulumi.get', (['self', '"""ip"""'], {}), "(self, 'ip')\n", (1164, 1176), False, 'import pulumi\n'), ((1255, 1284), 'pulumi.set', 'pulumi.set', (['self', '"""ip"""', 'value'], {}), "(self, 'ip', value)\n", (1265, 1284), False, 'import pulumi\n'), ((1385, 1409), 'pulumi.get', 'pulumi.get', (['self', '"""port"""'], {}), "(self, 'port')\n", (1395, 1409), False, 'import pulumi\n'), ((1492, 1523), 'pulumi.set', 'pulumi.set', (['self', '"""port"""', 'value'], {}), "(self, 'port', value)\n", (1502, 1523), False, 'import pulumi\n'), ((1802, 1836), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (1812, 1836), False, 'import pulumi\n'), ((1845, 1883), 'pulumi.set', 'pulumi.set', (['__self__', '"""values"""', 'values'], {}), "(__self__, 'values', values)\n", (1855, 1883), False, 'import pulumi\n'), ((2028, 2052), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (2038, 2052), False, 'import pulumi\n'), ((2125, 2156), 'pulumi.set', 'pulumi.set', (['self', '"""name"""', 'value'], {}), "(self, 'name', value)\n", (2135, 2156), False, 'import pulumi\n'), ((2273, 2299), 'pulumi.get', 'pulumi.get', (['self', '"""values"""'], {}), "(self, 'values')\n", (2283, 2299), False, 'import pulumi\n'), ((2400, 2433), 'pulumi.set', 'pulumi.set', (['self', '"""values"""', 'value'], {}), "(self, 'values', value)\n", (2410, 2433), False, 'import pulumi\n'), ((2869, 2891), 'pulumi.get', 'pulumi.get', (['self', '"""ip"""'], {}), "(self, 'ip')\n", (2879, 2891), False, 'import pulumi\n'), ((2970, 2999), 'pulumi.set', 'pulumi.set', (['self', '"""ip"""', 'value'], {}), "(self, 'ip', value)\n", (2980, 2999), False, 'import pulumi\n'), ((3100, 3124), 'pulumi.get', 'pulumi.get', (['self', '"""port"""'], {}), "(self, 'port')\n", (3110, 3124), False, 'import pulumi\n'), ((3207, 3238), 'pulumi.set', 'pulumi.set', (['self', '"""port"""', 'value'], {}), "(self, 'port', value)\n", (3217, 3238), False, 'import pulumi\n'), ((3803, 3829), 'pulumi.get', 'pulumi.get', (['self', '"""active"""'], {}), "(self, 'active')\n", (3813, 3829), False, 'import pulumi\n'), ((3945, 3978), 'pulumi.set', 'pulumi.set', (['self', '"""active"""', 'value'], {}), "(self, 'active', value)\n", (3955, 3978), False, 'import pulumi\n'), ((4112, 4139), 'pulumi.get', 'pulumi.get', (['self', '"""passive"""'], {}), "(self, 'passive')\n", (4122, 4139), False, 'import pulumi\n'), ((4258, 4292), 'pulumi.set', 'pulumi.set', (['self', '"""passive"""', 'value'], {}), "(self, 'passive', value)\n", (4268, 4292), False, 'import pulumi\n'), ((5837, 5868), 'pulumi.get', 'pulumi.get', (['self', '"""concurrency"""'], {}), "(self, 'concurrency')\n", (5847, 5868), False, 'import pulumi\n'), ((5965, 6003), 'pulumi.set', 'pulumi.set', (['self', '"""concurrency"""', 'value'], {}), "(self, 'concurrency', value)\n", (5975, 6003), False, 'import pulumi\n'), ((6143, 6170), 'pulumi.get', 'pulumi.get', (['self', '"""healthy"""'], {}), "(self, 'healthy')\n", (6153, 6170), False, 'import pulumi\n'), ((6295, 6329), 'pulumi.set', 'pulumi.set', (['self', '"""healthy"""', 'value'], {}), "(self, 'healthy', value)\n", (6305, 6329), False, 'import pulumi\n'), ((6452, 6481), 'pulumi.get', 'pulumi.get', (['self', '"""http_path"""'], {}), "(self, 'http_path')\n", (6462, 6481), False, 'import pulumi\n'), ((6574, 6610), 'pulumi.set', 'pulumi.set', (['self', '"""http_path"""', 'value'], {}), "(self, 'http_path', value)\n", (6584, 6610), False, 'import pulumi\n'), ((6733, 6762), 'pulumi.get', 'pulumi.get', (['self', '"""https_sni"""'], {}), "(self, 'https_sni')\n", (6743, 6762), False, 'import pulumi\n'), ((6855, 6891), 'pulumi.set', 'pulumi.set', (['self', '"""https_sni"""', 'value'], {}), "(self, 'https_sni', value)\n", (6865, 6891), False, 'import pulumi\n'), ((7044, 7088), 'pulumi.get', 'pulumi.get', (['self', '"""https_verify_certificate"""'], {}), "(self, 'https_verify_certificate')\n", (7054, 7088), False, 'import pulumi\n'), ((7212, 7263), 'pulumi.set', 'pulumi.set', (['self', '"""https_verify_certificate"""', 'value'], {}), "(self, 'https_verify_certificate', value)\n", (7222, 7263), False, 'import pulumi\n'), ((7367, 7394), 'pulumi.get', 'pulumi.get', (['self', '"""timeout"""'], {}), "(self, 'timeout')\n", (7377, 7394), False, 'import pulumi\n'), ((7483, 7517), 'pulumi.set', 'pulumi.set', (['self', '"""timeout"""', 'value'], {}), "(self, 'timeout', value)\n", (7493, 7517), False, 'import pulumi\n'), ((7618, 7642), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (7628, 7642), False, 'import pulumi\n'), ((7725, 7756), 'pulumi.set', 'pulumi.set', (['self', '"""type"""', 'value'], {}), "(self, 'type', value)\n", (7735, 7756), False, 'import pulumi\n'), ((7900, 7929), 'pulumi.get', 'pulumi.get', (['self', '"""unhealthy"""'], {}), "(self, 'unhealthy')\n", (7910, 7929), False, 'import pulumi\n'), ((8060, 8096), 'pulumi.set', 'pulumi.set', (['self', '"""unhealthy"""', 'value'], {}), "(self, 'unhealthy', value)\n", (8070, 8096), False, 'import pulumi\n'), ((8849, 8882), 'pulumi.get', 'pulumi.get', (['self', '"""http_statuses"""'], {}), "(self, 'http_statuses')\n", (8859, 8882), False, 'import pulumi\n'), ((9007, 9047), 'pulumi.set', 'pulumi.set', (['self', '"""http_statuses"""', 'value'], {}), "(self, 'http_statuses', value)\n", (9017, 9047), False, 'import pulumi\n'), ((9152, 9180), 'pulumi.get', 'pulumi.get', (['self', '"""interval"""'], {}), "(self, 'interval')\n", (9162, 9180), False, 'import pulumi\n'), ((9271, 9306), 'pulumi.set', 'pulumi.set', (['self', '"""interval"""', 'value'], {}), "(self, 'interval', value)\n", (9281, 9306), False, 'import pulumi\n'), ((9412, 9441), 'pulumi.get', 'pulumi.get', (['self', '"""successes"""'], {}), "(self, 'successes')\n", (9422, 9441), False, 'import pulumi\n'), ((9534, 9570), 'pulumi.set', 'pulumi.set', (['self', '"""successes"""', 'value'], {}), "(self, 'successes', value)\n", (9544, 9570), False, 'import pulumi\n'), ((10635, 10668), 'pulumi.get', 'pulumi.get', (['self', '"""http_failures"""'], {}), "(self, 'http_failures')\n", (10645, 10668), False, 'import pulumi\n'), ((10769, 10809), 'pulumi.set', 'pulumi.set', (['self', '"""http_failures"""', 'value'], {}), "(self, 'http_failures', value)\n", (10779, 10809), False, 'import pulumi\n'), ((10964, 10997), 'pulumi.get', 'pulumi.get', (['self', '"""http_statuses"""'], {}), "(self, 'http_statuses')\n", (10974, 10997), False, 'import pulumi\n'), ((11122, 11162), 'pulumi.set', 'pulumi.set', (['self', '"""http_statuses"""', 'value'], {}), "(self, 'http_statuses', value)\n", (11132, 11162), False, 'import pulumi\n'), ((11267, 11295), 'pulumi.get', 'pulumi.get', (['self', '"""interval"""'], {}), "(self, 'interval')\n", (11277, 11295), False, 'import pulumi\n'), ((11386, 11421), 'pulumi.set', 'pulumi.set', (['self', '"""interval"""', 'value'], {}), "(self, 'interval', value)\n", (11396, 11421), False, 'import pulumi\n'), ((11550, 11582), 'pulumi.get', 'pulumi.get', (['self', '"""tcp_failures"""'], {}), "(self, 'tcp_failures')\n", (11560, 11582), False, 'import pulumi\n'), ((11681, 11720), 'pulumi.set', 'pulumi.set', (['self', '"""tcp_failures"""', 'value'], {}), "(self, 'tcp_failures', value)\n", (11691, 11720), False, 'import pulumi\n'), ((11825, 11853), 'pulumi.get', 'pulumi.get', (['self', '"""timeouts"""'], {}), "(self, 'timeouts')\n", (11835, 11853), False, 'import pulumi\n'), ((11944, 11979), 'pulumi.set', 'pulumi.set', (['self', '"""timeouts"""', 'value'], {}), "(self, 'timeouts', value)\n", (11954, 11979), False, 'import pulumi\n'), ((12724, 12751), 'pulumi.get', 'pulumi.get', (['self', '"""healthy"""'], {}), "(self, 'healthy')\n", (12734, 12751), False, 'import pulumi\n'), ((12877, 12911), 'pulumi.set', 'pulumi.set', (['self', '"""healthy"""', 'value'], {}), "(self, 'healthy', value)\n", (12887, 12911), False, 'import pulumi\n'), ((13012, 13036), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (13022, 13036), False, 'import pulumi\n'), ((13119, 13150), 'pulumi.set', 'pulumi.set', (['self', '"""type"""', 'value'], {}), "(self, 'type', value)\n", (13129, 13150), False, 'import pulumi\n'), ((13295, 13324), 'pulumi.get', 'pulumi.get', (['self', '"""unhealthy"""'], {}), "(self, 'unhealthy')\n", (13305, 13324), False, 'import pulumi\n'), ((13456, 13492), 'pulumi.set', 'pulumi.set', (['self', '"""unhealthy"""', 'value'], {}), "(self, 'unhealthy', value)\n", (13466, 13492), False, 'import pulumi\n'), ((14095, 14128), 'pulumi.get', 'pulumi.get', (['self', '"""http_statuses"""'], {}), "(self, 'http_statuses')\n", (14105, 14128), False, 'import pulumi\n'), ((14253, 14293), 'pulumi.set', 'pulumi.set', (['self', '"""http_statuses"""', 'value'], {}), "(self, 'http_statuses', value)\n", (14263, 14293), False, 'import pulumi\n'), ((14399, 14428), 'pulumi.get', 'pulumi.get', (['self', '"""successes"""'], {}), "(self, 'successes')\n", (14409, 14428), False, 'import pulumi\n'), ((14521, 14557), 'pulumi.set', 'pulumi.set', (['self', '"""successes"""', 'value'], {}), "(self, 'successes', value)\n", (14531, 14557), False, 'import pulumi\n'), ((15472, 15505), 'pulumi.get', 'pulumi.get', (['self', '"""http_failures"""'], {}), "(self, 'http_failures')\n", (15482, 15505), False, 'import pulumi\n'), ((15606, 15646), 'pulumi.set', 'pulumi.set', (['self', '"""http_failures"""', 'value'], {}), "(self, 'http_failures', value)\n", (15616, 15646), False, 'import pulumi\n'), ((15801, 15834), 'pulumi.get', 'pulumi.get', (['self', '"""http_statuses"""'], {}), "(self, 'http_statuses')\n", (15811, 15834), False, 'import pulumi\n'), ((15959, 15999), 'pulumi.set', 'pulumi.set', (['self', '"""http_statuses"""', 'value'], {}), "(self, 'http_statuses', value)\n", (15969, 15999), False, 'import pulumi\n'), ((16128, 16160), 'pulumi.get', 'pulumi.get', (['self', '"""tcp_failures"""'], {}), "(self, 'tcp_failures')\n", (16138, 16160), False, 'import pulumi\n'), ((16259, 16298), 'pulumi.set', 'pulumi.set', (['self', '"""tcp_failures"""', 'value'], {}), "(self, 'tcp_failures', value)\n", (16269, 16298), False, 'import pulumi\n'), ((16403, 16431), 'pulumi.get', 'pulumi.get', (['self', '"""timeouts"""'], {}), "(self, 'timeouts')\n", (16413, 16431), False, 'import pulumi\n'), ((16522, 16557), 'pulumi.set', 'pulumi.set', (['self', '"""timeouts"""', 'value'], {}), "(self, 'timeouts', value)\n", (16532, 16557), False, 'import pulumi\n'), ((949, 979), 'pulumi.set', 'pulumi.set', (['__self__', '"""ip"""', 'ip'], {}), "(__self__, 'ip', ip)\n", (959, 979), False, 'import pulumi\n'), ((1021, 1055), 'pulumi.set', 'pulumi.set', (['__self__', '"""port"""', 'port'], {}), "(__self__, 'port', port)\n", (1031, 1055), False, 'import pulumi\n'), ((2664, 2694), 'pulumi.set', 'pulumi.set', (['__self__', '"""ip"""', 'ip'], {}), "(__self__, 'ip', ip)\n", (2674, 2694), False, 'import pulumi\n'), ((2736, 2770), 'pulumi.set', 'pulumi.set', (['__self__', '"""port"""', 'port'], {}), "(__self__, 'port', port)\n", (2746, 2770), False, 'import pulumi\n'), ((3548, 3586), 'pulumi.set', 'pulumi.set', (['__self__', '"""active"""', 'active'], {}), "(__self__, 'active', active)\n", (3558, 3586), False, 'import pulumi\n'), ((3631, 3671), 'pulumi.set', 'pulumi.set', (['__self__', '"""passive"""', 'passive'], {}), "(__self__, 'passive', passive)\n", (3641, 3671), False, 'import pulumi\n'), ((5026, 5074), 'pulumi.set', 'pulumi.set', (['__self__', '"""concurrency"""', 'concurrency'], {}), "(__self__, 'concurrency', concurrency)\n", (5036, 5074), False, 'import pulumi\n'), ((5119, 5159), 'pulumi.set', 'pulumi.set', (['__self__', '"""healthy"""', 'healthy'], {}), "(__self__, 'healthy', healthy)\n", (5129, 5159), False, 'import pulumi\n'), ((5206, 5250), 'pulumi.set', 'pulumi.set', (['__self__', '"""http_path"""', 'http_path'], {}), "(__self__, 'http_path', http_path)\n", (5216, 5250), False, 'import pulumi\n'), ((5297, 5341), 'pulumi.set', 'pulumi.set', (['__self__', '"""https_sni"""', 'https_sni'], {}), "(__self__, 'https_sni', https_sni)\n", (5307, 5341), False, 'import pulumi\n'), ((5403, 5477), 'pulumi.set', 'pulumi.set', (['__self__', '"""https_verify_certificate"""', 'https_verify_certificate'], {}), "(__self__, 'https_verify_certificate', https_verify_certificate)\n", (5413, 5477), False, 'import pulumi\n'), ((5522, 5562), 'pulumi.set', 'pulumi.set', (['__self__', '"""timeout"""', 'timeout'], {}), "(__self__, 'timeout', timeout)\n", (5532, 5562), False, 'import pulumi\n'), ((5604, 5638), 'pulumi.set', 'pulumi.set', (['__self__', '"""type"""', 'type'], {}), "(__self__, 'type', type)\n", (5614, 5638), False, 'import pulumi\n'), ((5685, 5729), 'pulumi.set', 'pulumi.set', (['__self__', '"""unhealthy"""', 'unhealthy'], {}), "(__self__, 'unhealthy', unhealthy)\n", (5695, 5729), False, 'import pulumi\n'), ((8463, 8515), 'pulumi.set', 'pulumi.set', (['__self__', '"""http_statuses"""', 'http_statuses'], {}), "(__self__, 'http_statuses', http_statuses)\n", (8473, 8515), False, 'import pulumi\n'), ((8561, 8603), 'pulumi.set', 'pulumi.set', (['__self__', '"""interval"""', 'interval'], {}), "(__self__, 'interval', interval)\n", (8571, 8603), False, 'import pulumi\n'), ((8650, 8694), 'pulumi.set', 'pulumi.set', (['__self__', '"""successes"""', 'successes'], {}), "(__self__, 'successes', successes)\n", (8660, 8694), False, 'import pulumi\n'), ((10073, 10125), 'pulumi.set', 'pulumi.set', (['__self__', '"""http_failures"""', 'http_failures'], {}), "(__self__, 'http_failures', http_failures)\n", (10083, 10125), False, 'import pulumi\n'), ((10176, 10228), 'pulumi.set', 'pulumi.set', (['__self__', '"""http_statuses"""', 'http_statuses'], {}), "(__self__, 'http_statuses', http_statuses)\n", (10186, 10228), False, 'import pulumi\n'), ((10274, 10316), 'pulumi.set', 'pulumi.set', (['__self__', '"""interval"""', 'interval'], {}), "(__self__, 'interval', interval)\n", (10284, 10316), False, 'import pulumi\n'), ((10366, 10416), 'pulumi.set', 'pulumi.set', (['__self__', '"""tcp_failures"""', 'tcp_failures'], {}), "(__self__, 'tcp_failures', tcp_failures)\n", (10376, 10416), False, 'import pulumi\n'), ((10462, 10504), 'pulumi.set', 'pulumi.set', (['__self__', '"""timeouts"""', 'timeouts'], {}), "(__self__, 'timeouts', timeouts)\n", (10472, 10504), False, 'import pulumi\n'), ((12376, 12416), 'pulumi.set', 'pulumi.set', (['__self__', '"""healthy"""', 'healthy'], {}), "(__self__, 'healthy', healthy)\n", (12386, 12416), False, 'import pulumi\n'), ((12458, 12492), 'pulumi.set', 'pulumi.set', (['__self__', '"""type"""', 'type'], {}), "(__self__, 'type', type)\n", (12468, 12492), False, 'import pulumi\n'), ((12539, 12583), 'pulumi.set', 'pulumi.set', (['__self__', '"""unhealthy"""', 'unhealthy'], {}), "(__self__, 'unhealthy', unhealthy)\n", (12549, 12583), False, 'import pulumi\n'), ((13797, 13849), 'pulumi.set', 'pulumi.set', (['__self__', '"""http_statuses"""', 'http_statuses'], {}), "(__self__, 'http_statuses', http_statuses)\n", (13807, 13849), False, 'import pulumi\n'), ((13896, 13940), 'pulumi.set', 'pulumi.set', (['__self__', '"""successes"""', 'successes'], {}), "(__self__, 'successes', successes)\n", (13906, 13940), False, 'import pulumi\n'), ((14998, 15050), 'pulumi.set', 'pulumi.set', (['__self__', '"""http_failures"""', 'http_failures'], {}), "(__self__, 'http_failures', http_failures)\n", (15008, 15050), False, 'import pulumi\n'), ((15101, 15153), 'pulumi.set', 'pulumi.set', (['__self__', '"""http_statuses"""', 'http_statuses'], {}), "(__self__, 'http_statuses', http_statuses)\n", (15111, 15153), False, 'import pulumi\n'), ((15203, 15253), 'pulumi.set', 'pulumi.set', (['__self__', '"""tcp_failures"""', 'tcp_failures'], {}), "(__self__, 'tcp_failures', tcp_failures)\n", (15213, 15253), False, 'import pulumi\n'), ((15299, 15341), 'pulumi.set', 'pulumi.set', (['__self__', '"""timeouts"""', 'timeouts'], {}), "(__self__, 'timeouts', timeouts)\n", (15309, 15341), False, 'import pulumi\n')] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from logging import config
config.dictConfig({
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s| %(name)s/%(process)d: %(message)s @%(funcName)s:%(lineno)d #%(levelname)s',
}
},
'handlers': {
'console': {
'formatter': 'standard',
'class': 'logging.StreamHandler',
}
},
'root': {
'handlers': ['console'],
'level': 'INFO',
},
'loggers': {
'ripozo': {
'level': 'INFO',
}
}
})
| [
"logging.config.dictConfig"
] | [((178, 576), 'logging.config.dictConfig', 'config.dictConfig', (["{'version': 1, 'disable_existing_loggers': True, 'formatters': {'standard':\n {'format':\n '%(asctime)s| %(name)s/%(process)d: %(message)s @%(funcName)s:%(lineno)d #%(levelname)s'\n }}, 'handlers': {'console': {'formatter': 'standard', 'class':\n 'logging.StreamHandler'}}, 'root': {'handlers': ['console'], 'level':\n 'INFO'}, 'loggers': {'ripozo': {'level': 'INFO'}}}"], {}), "({'version': 1, 'disable_existing_loggers': True,\n 'formatters': {'standard': {'format':\n '%(asctime)s| %(name)s/%(process)d: %(message)s @%(funcName)s:%(lineno)d #%(levelname)s'\n }}, 'handlers': {'console': {'formatter': 'standard', 'class':\n 'logging.StreamHandler'}}, 'root': {'handlers': ['console'], 'level':\n 'INFO'}, 'loggers': {'ripozo': {'level': 'INFO'}}})\n", (195, 576), False, 'from logging import config\n')] |
# coding: utf-8
import fnmatch
import pathlib
import os.path
import re
import logging
logging.basicConfig(level=logging.INFO)
INCLUDED_SOURCES = ("*.py", )
EXCLUDED_SOURCES = ("__*__.py", )
INCLUDED_SOURCES_REGEX = tuple(re.compile(fnmatch.translate(pattern))
for pattern in INCLUDED_SOURCES)
EXCLUDED_SOURCES_REGEX = tuple(re.compile(fnmatch.translate(pattern))
for pattern in EXCLUDED_SOURCES)
def include_file(filename):
return (any(regex.match(filename) for regex in INCLUDED_SOURCES_REGEX) and
not any(regex.match(filename) for regex in EXCLUDED_SOURCES_REGEX))
def list_examples(src_dir):
examples = []
for dirname, _, filenames in os.walk(src_dir):
for filename in filenames:
if include_file(filename):
examples.append((pathlib.Path(dirname), filename))
index_contents = []
return sorted(examples)
def generate_examples_rst(src_dir="examples/"):
examples = list_examples(src_dir)
# Generate the index
logging.info("Creating index file")
with open(os.path.join(src_dir, "index.rst"), "w") as index:
index.write(
"List of code examples\n"
"---------------------\n"
"\n"
".. toctree::\n"
"\n"
)
for example_dirname, example_filename in examples:
example_pathname = os.path.join(
example_dirname.relative_to(src_dir),
example_filename)
rst_filename = os.path.join(src_dir, f"{example_pathname}.rst")
index.write(f" {example_pathname}\n")
logging.info("generating file for %s", example_pathname)
with open(rst_filename, "w") as example_rst:
example_rst.write(
f"``{example_pathname}``\n"
f"{'-' * (len(example_pathname) + 4)}\n\n"
f".. literalinclude:: {example_filename}\n"
)
logging.info("index and source file generated")
if __name__ == "__main__":
generate_examples_rst()
| [
"logging.basicConfig",
"logging.info",
"fnmatch.translate",
"pathlib.Path"
] | [((87, 126), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (106, 126), False, 'import logging\n'), ((1062, 1097), 'logging.info', 'logging.info', (['"""Creating index file"""'], {}), "('Creating index file')\n", (1074, 1097), False, 'import logging\n'), ((2015, 2062), 'logging.info', 'logging.info', (['"""index and source file generated"""'], {}), "('index and source file generated')\n", (2027, 2062), False, 'import logging\n'), ((235, 261), 'fnmatch.translate', 'fnmatch.translate', (['pattern'], {}), '(pattern)\n', (252, 261), False, 'import fnmatch\n'), ((370, 396), 'fnmatch.translate', 'fnmatch.translate', (['pattern'], {}), '(pattern)\n', (387, 396), False, 'import fnmatch\n'), ((1668, 1724), 'logging.info', 'logging.info', (['"""generating file for %s"""', 'example_pathname'], {}), "('generating file for %s', example_pathname)\n", (1680, 1724), False, 'import logging\n'), ((857, 878), 'pathlib.Path', 'pathlib.Path', (['dirname'], {}), '(dirname)\n', (869, 878), False, 'import pathlib\n')] |
# Copyright 2021 VMware, Inc.
# SPDX-License-Identifier: Apache-2.0
import argparse
import os
from githubDataExtraction import GithubDataExtractor
def getRepos(access_token, organization, reaction):
"""
Method to extract data for all repositories in organization
"""
extractor = GithubDataExtractor(access_token) # Create object
repos = extractor.g_ses.get_organization(organization).get_repos()
for repo in repos:
print("Starting: {}".format(repo.name))
extractor.openRepo(organization, repo.name)
extractor.getAllPulls("", reaction)
def getRepo(access_token, organization, reponame, reaction):
"""
Method to extract data for an individual repository
"""
extractor = GithubDataExtractor(access_token) # Create object
extractor.openRepo(organization, reponame)
extractor.getAllPulls("", reaction)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Create csv for all pulls in each repo for the organzation')
parser.add_argument('organization', help='Organization the repo belongs to.')
parser.add_argument('repo', help='Repo name or all if all repos in organization')
parser.add_argument('-reactions', action='store_true', default=False, help='Flag to extract reactions')
args = parser.parse_args()
ACCESS_TOKEN = os.environ["GITACCESS"] # Access Github token from environment for security purposes
if args.repo == 'all':
getRepos(ACCESS_TOKEN, args.organization, args.reactions)
else:
getRepo(ACCESS_TOKEN, args.organization, args.repo, args.reactions)
| [
"githubDataExtraction.GithubDataExtractor",
"argparse.ArgumentParser"
] | [((299, 332), 'githubDataExtraction.GithubDataExtractor', 'GithubDataExtractor', (['access_token'], {}), '(access_token)\n', (318, 332), False, 'from githubDataExtraction import GithubDataExtractor\n'), ((739, 772), 'githubDataExtraction.GithubDataExtractor', 'GithubDataExtractor', (['access_token'], {}), '(access_token)\n', (758, 772), False, 'from githubDataExtraction import GithubDataExtractor\n'), ((920, 1021), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Create csv for all pulls in each repo for the organzation"""'}), "(description=\n 'Create csv for all pulls in each repo for the organzation')\n", (943, 1021), False, 'import argparse\n')] |
#!/usr/bin/python35
import calc
from calc import mult
print(calc.add(1, 2))
print(calc.dec(2, 3))
print(calc.div(1, 2))
print(mult(2, 3))
| [
"calc.div",
"calc.mult",
"calc.dec",
"calc.add"
] | [((62, 76), 'calc.add', 'calc.add', (['(1)', '(2)'], {}), '(1, 2)\n', (70, 76), False, 'import calc\n'), ((84, 98), 'calc.dec', 'calc.dec', (['(2)', '(3)'], {}), '(2, 3)\n', (92, 98), False, 'import calc\n'), ((106, 120), 'calc.div', 'calc.div', (['(1)', '(2)'], {}), '(1, 2)\n', (114, 120), False, 'import calc\n'), ((128, 138), 'calc.mult', 'mult', (['(2)', '(3)'], {}), '(2, 3)\n', (132, 138), False, 'from calc import mult\n')] |
"""
Genetic solution to the 0/1 Knapsack Problem.
usage: knapsack01.py [-h] [--data-file DATA_FILE]
[--population-size POPULATION_SIZE]
[--iterations MAX_ITERATIONS] [--mutation MUTATION_PROB]
[--crossover CROSSOVER_PROB] [--seed SEED]
[--stats-file STATS_FILE]
[--population-file POPULATION_FILE] [--verbose]
[--elitism ELITISM] [--uniform_cx] [--generate]
[--items NUM_ITEMS]
"""
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
#from builtins import str
#from builtins import range
import math
import random
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
from levis import (configuration, crossover, mutation, FitnessLoggingGA,
ProportionateGA, ElitistGA)
class Knapsack01GA(ElitistGA, ProportionateGA, FitnessLoggingGA):
"""Genetic solution to the 0/1 Knapsack Problem."""
def __init__(self, config={}):
"""Initialize a 0/1 knapsack solver.
Raises:
AttributeError: If ``items`` is not in the configuration dict.
"""
super(self.__class__, self).__init__(config)
self.max_weight = self.config.setdefault("max_weight", 15)
self.items = self.config.setdefault("items", [])
self.chromosome_length = len(self.items)
self.uniform_cx = config.setdefault("uniform_cx", False)
for i, item in enumerate(self.items):
item["mask"] = 2 ** i
def assess(self, chromosome):
"""Return a 2-tuple of the total weight and value of a chromosome."""
weight = 0
value = 0
# pylint: disable=unused-variable
for locus, _ in enumerate(self.items):
if chromosome & 2 ** locus:
item = self.items[locus]
weight += item["weight"]
value += item["value"]
return (weight, value)
def score(self, chromosome):
weight, value = self.assess(chromosome)
if weight > self.max_weight:
return 0.0
return value
def create(self):
# The below generates chromosomes, but the majority are too heavy
# return random.randint(0, 2 ** self.chromosome_length - 1)
items = list(self.items)
random.shuffle(items)
weight = 0
chromosome = 0
for i in items:
if weight + i["weight"] <= self.max_weight:
weight += i["weight"]
chromosome |= i["mask"]
return chromosome
def crossover(self):
parent1 = self.select()
parent2 = self.select()
length = self.chromosome_length
if self.uniform_cx:
return crossover.uniform_bin(parent1, parent2, length)
else:
return crossover.single_point_bin(parent1, parent2, length)
def mutate(self, chromosome):
return mutation.toggle(chromosome, self.chromosome_length,
self.mutation_prob)
def chromosome_str(self, chromosome):
sack = []
for locus, _ in enumerate(self.items):
item = self.items[locus]["name"]
packed= 0
if chromosome & 2 ** locus:
packed = 1
sack.append("%s: %i" % (item, packed))
weight, value = self.assess(chromosome)
vals = (weight, value, ", ".join(sack))
return "{weight: %0.2f, value: %0.2f, contents: [%s]}" % vals
def chromosome_repr(self, chromosome):
return bin(chromosome)[2:].zfill(self.chromosome_length)
def create_data(config={}):
"""Create data and write to a JSON file."""
max_weight = config.setdefault("max_weight", 15)
items = []
if "num_items" in config:
num_items = config["num_items"]
del config["num_items"]
else:
num_items = 32
# Generate items
digits = int(math.ceil(math.log(num_items, 16)))
fmt = "%0" + str(digits) + "X"
for i in range(0, num_items):
name = fmt % (i + 1)
weight = random.triangular(1.0, max_weight // 3, max_weight)
value = random.random() * 100
items.append({"name": name, "weight": weight, "value": value})
config["items"] = items
configuration.write_file(config)
def main():
"""Main method to parse args and run."""
defaults = {
"population_size": 10,
"max_iterations": 10,
"elitism_pct": 0.01,
"population_file": "population.log",
"stats_file": "stats.csv"
}
description = "Genetic solution to the 0/1 Knapsack Problem"
parent = [Knapsack01GA.arg_parser()]
parser = configuration.get_parser(description, "knapsack01.json", parent)
parser.add_argument("--uniform_cx", action="store_true",
help="Use uniform crossover instead of single-point")
parser.add_argument("--generate", action="store_true",
help="Generate and store problem data")
group = parser.add_argument_group("data generation options")
group.add_argument("--items", type=int, dest="num_items", default=32,
help="Number of items to generate")
args = configuration.read_args(parser)
if args["generate"]:
del args["generate"]
create_data(args)
else:
config_file = configuration.read_file(args)
config = configuration.merge(defaults, config_file, args)
solver = Knapsack01GA(config)
solver.solve()
print(solver.chromosome_str(solver.best()))
if __name__ == "__main__":
main()
| [
"levis.crossover.single_point_bin",
"levis.configuration.merge",
"random.shuffle",
"levis.mutation.toggle",
"levis.configuration.write_file",
"random.triangular",
"levis.configuration.read_file",
"levis.configuration.get_parser",
"math.log",
"levis.crossover.uniform_bin",
"os.path.abspath",
"r... | [((794, 815), 'os.path.abspath', 'os.path.abspath', (['""".."""'], {}), "('..')\n", (809, 815), False, 'import os\n'), ((4382, 4414), 'levis.configuration.write_file', 'configuration.write_file', (['config'], {}), '(config)\n', (4406, 4414), False, 'from levis import configuration, crossover, mutation, FitnessLoggingGA, ProportionateGA, ElitistGA\n'), ((4786, 4850), 'levis.configuration.get_parser', 'configuration.get_parser', (['description', '"""knapsack01.json"""', 'parent'], {}), "(description, 'knapsack01.json', parent)\n", (4810, 4850), False, 'from levis import configuration, crossover, mutation, FitnessLoggingGA, ProportionateGA, ElitistGA\n'), ((5325, 5356), 'levis.configuration.read_args', 'configuration.read_args', (['parser'], {}), '(parser)\n', (5348, 5356), False, 'from levis import configuration, crossover, mutation, FitnessLoggingGA, ProportionateGA, ElitistGA\n'), ((2424, 2445), 'random.shuffle', 'random.shuffle', (['items'], {}), '(items)\n', (2438, 2445), False, 'import random\n'), ((3036, 3107), 'levis.mutation.toggle', 'mutation.toggle', (['chromosome', 'self.chromosome_length', 'self.mutation_prob'], {}), '(chromosome, self.chromosome_length, self.mutation_prob)\n', (3051, 3107), False, 'from levis import configuration, crossover, mutation, FitnessLoggingGA, ProportionateGA, ElitistGA\n'), ((4186, 4237), 'random.triangular', 'random.triangular', (['(1.0)', '(max_weight // 3)', 'max_weight'], {}), '(1.0, max_weight // 3, max_weight)\n', (4203, 4237), False, 'import random\n'), ((5471, 5500), 'levis.configuration.read_file', 'configuration.read_file', (['args'], {}), '(args)\n', (5494, 5500), False, 'from levis import configuration, crossover, mutation, FitnessLoggingGA, ProportionateGA, ElitistGA\n'), ((5518, 5566), 'levis.configuration.merge', 'configuration.merge', (['defaults', 'config_file', 'args'], {}), '(defaults, config_file, args)\n', (5537, 5566), False, 'from levis import configuration, crossover, mutation, FitnessLoggingGA, ProportionateGA, ElitistGA\n'), ((2852, 2899), 'levis.crossover.uniform_bin', 'crossover.uniform_bin', (['parent1', 'parent2', 'length'], {}), '(parent1, parent2, length)\n', (2873, 2899), False, 'from levis import configuration, crossover, mutation, FitnessLoggingGA, ProportionateGA, ElitistGA\n'), ((2933, 2985), 'levis.crossover.single_point_bin', 'crossover.single_point_bin', (['parent1', 'parent2', 'length'], {}), '(parent1, parent2, length)\n', (2959, 2985), False, 'from levis import configuration, crossover, mutation, FitnessLoggingGA, ProportionateGA, ElitistGA\n'), ((4044, 4067), 'math.log', 'math.log', (['num_items', '(16)'], {}), '(num_items, 16)\n', (4052, 4067), False, 'import math\n'), ((4254, 4269), 'random.random', 'random.random', ([], {}), '()\n', (4267, 4269), False, 'import random\n')] |
# -*- coding: utf-8 -*-
"""
@date Created on Tue Jan 12 13:54:56 2016
@copyright (C) 2015-2016 EOMYS ENGINEERING.
@author pierre_b
"""
from os.path import join
from unittest import TestCase
import matplotlib.pyplot as plt
from numpy import array, pi, zeros
from pyleecan.Classes.Frame import Frame
from pyleecan.Classes.LamSlotWind import LamSlotWind
from pyleecan.Classes.LamSquirrelCage import LamSquirrelCage
from pyleecan.Classes.Machine import Machine
from pyleecan.Classes.Shaft import Shaft
from pyleecan.Classes.VentilationCirc import VentilationCirc
from pyleecan.Classes.VentilationPolar import VentilationPolar
from pyleecan.Classes.VentilationTrap import VentilationTrap
from pyleecan.Classes.Winding import Winding
from pyleecan.Classes.WindingUD import WindingUD
from pyleecan.Classes.WindingCW2LT import WindingCW2LT
from pyleecan.Classes.WindingDW2L import WindingDW2L
from pyleecan.Classes.BHCurveMat import BHCurveMat
from pyleecan.Classes.BHCurveParam import BHCurveParam
from pyleecan.Classes.MatLamination import MatLamination
from pyleecan.Classes.SlotW60 import SlotW60
from pyleecan.Tests.Plot import save_path
from pyleecan.Tests.Plot.LamWind import wind_mat
class test_Lam_Wind_60_plot(TestCase):
"""unittest for Lamination with winding plot"""
def test_Lam_Wind_60(self):
"""Test machine plot with Slot 60
"""
print("\nTest plot Slot 60")
plt.close("all")
test_obj = Machine()
test_obj.rotor = LamSlotWind(
Rint=0, Rext=0.1325, is_internal=True, is_stator=False, L1=0.9
)
test_obj.rotor.slot = SlotW60(
Zs=12,
W1=25e-3,
W2=12.5e-3,
H1=20e-3,
H2=20e-3,
R1=0.1325,
H3=2e-3,
H4=1e-3,
W3=2e-3,
)
test_obj.rotor.winding = WindingCW2LT(qs=3, p=3, Lewout=60e-3)
plt.close("all")
test_obj.rotor.plot()
fig = plt.gcf()
fig.savefig(join(save_path, "test_Lam_Wind_s60_1-Rotor.png"))
# 1 for Lam, Zs*2 for wind
self.assertEqual(len(fig.axes[0].patches), 25)
test_obj.rotor.slot.W3 = 0
test_obj.rotor.slot.H3 = 0
test_obj.rotor.slot.H4 = 0
test_obj.rotor.plot()
fig = plt.gcf()
fig.savefig(join(save_path, "test_Lam_Wind_s60_2-Rotor Wind.png"))
# 1 for Lam, Zs*2 for wind
self.assertEqual(len(fig.axes[0].patches), 25)
| [
"matplotlib.pyplot.gcf",
"pyleecan.Classes.SlotW60.SlotW60",
"os.path.join",
"pyleecan.Classes.WindingCW2LT.WindingCW2LT",
"matplotlib.pyplot.close",
"pyleecan.Classes.LamSlotWind.LamSlotWind",
"pyleecan.Classes.Machine.Machine"
] | [((1407, 1423), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (1416, 1423), True, 'import matplotlib.pyplot as plt\n'), ((1443, 1452), 'pyleecan.Classes.Machine.Machine', 'Machine', ([], {}), '()\n', (1450, 1452), False, 'from pyleecan.Classes.Machine import Machine\n'), ((1478, 1553), 'pyleecan.Classes.LamSlotWind.LamSlotWind', 'LamSlotWind', ([], {'Rint': '(0)', 'Rext': '(0.1325)', 'is_internal': '(True)', 'is_stator': '(False)', 'L1': '(0.9)'}), '(Rint=0, Rext=0.1325, is_internal=True, is_stator=False, L1=0.9)\n', (1489, 1553), False, 'from pyleecan.Classes.LamSlotWind import LamSlotWind\n'), ((1606, 1704), 'pyleecan.Classes.SlotW60.SlotW60', 'SlotW60', ([], {'Zs': '(12)', 'W1': '(0.025)', 'W2': '(0.0125)', 'H1': '(0.02)', 'H2': '(0.02)', 'R1': '(0.1325)', 'H3': '(0.002)', 'H4': '(0.001)', 'W3': '(0.002)'}), '(Zs=12, W1=0.025, W2=0.0125, H1=0.02, H2=0.02, R1=0.1325, H3=0.002,\n H4=0.001, W3=0.002)\n', (1613, 1704), False, 'from pyleecan.Classes.SlotW60 import SlotW60\n'), ((1853, 1889), 'pyleecan.Classes.WindingCW2LT.WindingCW2LT', 'WindingCW2LT', ([], {'qs': '(3)', 'p': '(3)', 'Lewout': '(0.06)'}), '(qs=3, p=3, Lewout=0.06)\n', (1865, 1889), False, 'from pyleecan.Classes.WindingCW2LT import WindingCW2LT\n'), ((1899, 1915), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (1908, 1915), True, 'import matplotlib.pyplot as plt\n'), ((1961, 1970), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (1968, 1970), True, 'import matplotlib.pyplot as plt\n'), ((2281, 2290), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (2288, 2290), True, 'import matplotlib.pyplot as plt\n'), ((1991, 2039), 'os.path.join', 'join', (['save_path', '"""test_Lam_Wind_s60_1-Rotor.png"""'], {}), "(save_path, 'test_Lam_Wind_s60_1-Rotor.png')\n", (1995, 2039), False, 'from os.path import join\n'), ((2311, 2364), 'os.path.join', 'join', (['save_path', '"""test_Lam_Wind_s60_2-Rotor Wind.png"""'], {}), "(save_path, 'test_Lam_Wind_s60_2-Rotor Wind.png')\n", (2315, 2364), False, 'from os.path import join\n')] |
import json
import math
import re
from django.urls import reverse
from django.http.response import HttpResponse, HttpResponseRedirect
from django.shortcuts import redirect, render
from django.template.defaultfilters import slugify
from django.db.models import Q, F
from haystack.query import SQ, SearchQuerySet
from django.http import QueryDict
# from haystack.views import SearchView
from mpcomp.views import (get_prev_after_pages_count, get_valid_locations_list,
get_valid_skills_list, get_meta_data, get_404_meta)
from peeldb.models import (City, FunctionalArea, Industry, JobPost,
Qualification, Skill, State)
from pjob.refine_search import refined_search
from pjob.views import get_page_number
from search.forms import job_searchForm
from dashboard.tasks import save_search_results
# class search_job(SearchView):
# template_name = 'search/search_results.html'
# queryset = SearchQuerySet()
# form_class = job_searchForm
# def get_queryset(self):
# queryset = super(search_job, self).get_queryset()
# return queryset
# def get_context_data(self):
# context = super(search_job, self).get_context_data()
# return context
# def get_results(self):
# results = self.form.search()
# return results
# def build_page(self):
# jobs_list = self.results
# no_of_jobs = len(jobs_list)
# items_per_page = 10
# no_pages = int(math.ceil(float(jobs_list.count()) / items_per_page))
# page = 1
# jobs_list = jobs_list[
# (page - 1) * items_per_page:page * items_per_page]
# prev_page, previous_page, aft_page, after_page = get_prev_after_pages_count(page, no_pages)
# return (aft_page, after_page, prev_page, previous_page, page, no_pages, no_of_jobs, jobs_list)
# def create_response(self):
# (aft_page, after_page, prev_page, previous_page,
# page, no_pages, no_of_jobs) = self.build_page()
# results = [r.object for r in self.results]
# param = ""
# param = ('&' + 'q=' + self.form.cleaned_data['q'] + '&location=' + self.form.cleaned_data['location'] +
# '&experience=' + str(self.form.cleaned_data['experience'] or "") + '&salary=' + str(self.form.cleaned_data['salary'] or "") +
# '&industry=' + str(self.form.cleaned_data['industry'] or "") + '&functional_area=' + str(self.form.cleaned_data['functional_area'] or ""))
# context = {
# 'query': self.query,
# 'query_form': self.form,
# 'page': page,
# 'results': results,
# 'suggestion': None,
# 'param': param,
# 'aft_page': aft_page,
# 'after_page': after_page,
# 'prev_page': prev_page,
# 'previous_page': previous_page,
# 'current_page': page,
# 'last_page': no_pages,
# 'no_of_jobs': no_of_jobs,
# 'skill': self.form.cleaned_data['q'],
# 'location': self.form.cleaned_data['location'],
# }
# return render_to_response(self.template, context, context_instance=self.context_class(self.request))
def custom_search(data, request):
form = job_searchForm(data)
searched_locations = searched_skills = searched_edu = searched_industry = searched_states = ''
if request.POST.get('refine_search') == 'True':
jobs_list, searched_skills, searched_locations, searched_industry, searched_edu, searched_states = refined_search(request.POST)
else:
jobs_list = form.search()
jobs_list = JobPost.objects.filter(pk__in=[r.pk for r in jobs_list])
searched_locations = City.objects.filter(name__in=data.get('location'))
searched_skills = Skill.objects.filter(name__in=data.get('q'))
jobs_list = jobs_list.filter(status="Live")
job_type = data.get('job_type') or request.POST.get('job_type') or request.GET.get('job_type')
if job_type:
jobs_list = jobs_list.filter(job_type=job_type)
if data.get('walk-in'):
jobs_list = jobs_list.filter(job_type="walk-in")
no_of_jobs = len(jobs_list)
items_per_page = 20
no_pages = int(math.ceil(float(jobs_list.count()) / items_per_page))
page = request.POST.get('page') or data.get('page')
if page and bool(re.search(r"[0-9]", page)) and int(page) > 0:
if int(page) > (no_pages + 2):
page = 1
else:
page = int(data.get('page'))
else:
page = 1
jobs_list = jobs_list.select_related('company', 'user').prefetch_related('location', 'skills', 'industry').distinct()
jobs_list = jobs_list[(page - 1) * items_per_page:page * items_per_page]
prev_page, previous_page, aft_page, after_page = get_prev_after_pages_count(page, no_pages)
if form.is_valid():
context = {
'results': form.search(),
'query': form.query(),
'searchform': form,
'aft_page': aft_page,
'after_page': after_page,
'prev_page': prev_page,
'previous_page': previous_page,
'current_page': page,
'last_page': no_pages,
'no_of_jobs': no_of_jobs,
'job_list': jobs_list,
'skill': form.cleaned_data['q'],
'location': form.cleaned_data['location'],
'searched_skills': searched_skills,
'searched_locations': searched_locations,
'searched_industry': searched_industry,
'searched_edu': searched_edu,
'searched_experience': request.POST.get('experience'),
'searched_job_type': request.POST.get('job_type'),
'searched_functional_area': request.POST.get('functional_area'),
}
return context
return {'job_list': []}
def custome_search(request, skill_name, city_name, **kwargs):
current_url = reverse('custome_search', kwargs={'skill_name': skill_name, 'city_name': city_name})
if kwargs.get('page_num') == '1' or request.GET.get('page') == '1':
return redirect(current_url, permanent=True)
if 'page' in request.GET:
url = current_url + request.GET.get('page') + '/'
return redirect(url, permanent=True)
final_skill = get_valid_skills_list(skill_name)
final_location = get_valid_locations_list(city_name)
if request.POST:
save_search_results.delay(request.META['REMOTE_ADDR'], request.POST, 0, request.user.id)
if not final_location or not final_skill:
template = 'mobile/404.html' if request.is_mobile else '404.html'
return render(request, template, {'message': 'Unfortunately, we are unable to locate the jobs you are looking for',
'data_empty': True, 'job_search': True,
'reason': "Only Valid Skills/Cities names are accepted in search",
'searched_skills': final_skill or [skill_name],
'searched_locations': final_location or [city_name]}, status=404)
job_type = request.POST.get('job_type') or request.GET.get('job_type')
if request.POST.get('refine_search') == 'True':
job_list, searched_skills, searched_locations, searched_industry, searched_edu, searched_states = refined_search(request.POST)
else:
search_dict = QueryDict('', mutable=True)
search_dict.setlist('refine_skill', final_skill)
search_dict.setlist('refine_location', final_location)
if job_type:
search_dict.update({'job_type': job_type})
if request.POST.get('experience'):
search_dict.update({'refine_experience_min': request.POST.get('experience')})
job_list, searched_skills, searched_locations, searched_industry, searched_edu, searched_states = refined_search(search_dict)
if job_list:
no_of_jobs = job_list.count()
items_per_page = 20
no_pages = int(math.ceil(float(no_of_jobs) / items_per_page))
page = get_page_number(request, kwargs, no_pages)
if not page:
return HttpResponseRedirect(current_url)
prev_page, previous_page, aft_page, after_page = get_prev_after_pages_count(page, no_pages)
job_list = job_list[(page - 1) * items_per_page:page * items_per_page]
meta_title, meta_description, h1_tag = get_meta_data('skill_location_jobs', {'skills': searched_skills,
'final_skill': final_skill,
'page': page,
'locations': searched_locations,
'final_location': final_location})
data = {'job_list': job_list,
'aft_page': aft_page,
'after_page': after_page,
'prev_page': prev_page,
'previous_page': previous_page,
'current_page': page,
'last_page': no_pages,
'no_of_jobs': no_of_jobs,
"is_job_list": False,
'current_url': current_url,
'searched_skills': searched_skills,
'searched_states': searched_states,
'searched_locations': searched_locations,
'searched_industry': searched_industry,
'searched_edu': searched_edu,
'searched_experience': request.POST.get('experience'),
'searched_job_type': job_type,
'meta_title': meta_title,
'meta_description': meta_description,
'h1_tag': h1_tag,
}
template = 'jobs/jobs_list.html'
if request.is_mobile:
data.update({'searched_industry': request.POST.get('industry'),
'searched_functional_area': request.POST.get('functional_area')})
template = 'mobile/jobs/list.html'
return render(request, template, data)
else:
template = 'mobile/404.html' if request.is_mobile else '404.html'
meta_title, meta_description = get_404_meta(
'skill_location_404', {'skill': final_skill, 'city': final_location})
return render(request, template, {'message': 'Unfortunately, we are unable to locate the jobs you are looking for',
'reason': "Only Valid Skills/Cities names are accepted in search",
'job_search': True,
'searched_skills': searched_skills, 'searched_locations': searched_locations,
'meta_title': meta_title, 'meta_description': meta_description})
def custom_walkins(request, skill_name, city_name, **kwargs):
current_url = reverse('custom_walkins', kwargs={'skill_name': skill_name, 'city_name': city_name})
if kwargs.get('page_num') == '1' or request.GET.get('page') == '1':
return redirect(current_url, permanent=True)
if 'page' in request.GET:
url = current_url + request.GET.get('page') + '/'
return redirect(url, permanent=True)
final_skill = get_valid_skills_list(skill_name)
final_location = get_valid_locations_list(city_name)
if not final_location or not final_skill:
if request.POST:
save_search_results.delay(request.META['REMOTE_ADDR'], request.POST, 0, request.user.id)
location = final_location or [city_name]
skills = final_skill or [skill_name]
template = 'mobile/404.html' if request.is_mobile else '404.html'
meta_title = meta_description = ''
return render(request, template, {'message': 'Unfortunately, we are unable to locate the jobs you are looking for',
'searched_job_type': 'walk-in',
'reason': "Only Valid Skills/Cities names are accepted in search",
'searched_skills': skills, 'searched_locations': location,
'meta_title': meta_title, 'meta_description': meta_description,
'data_empty': True,
'job_search': True}, status=404)
if request.POST.get('refine_search') == 'True':
job_list, searched_skills, searched_locations, searched_industry, searched_edu, searched_states = refined_search(request.POST)
else:
search_dict = QueryDict('', mutable=True)
search_dict.setlist('refine_skill', final_skill)
search_dict.setlist('refine_location', final_location)
search_dict.update({'job_type': 'walk-in'})
if request.POST.get('experience'):
search_dict.update({'refine_experience_min': request.POST.get('experience')})
job_list, searched_skills, searched_locations, searched_industry, searched_edu, searched_states = refined_search(search_dict)
if job_list:
no_of_jobs = job_list.count()
items_per_page = 20
no_pages = int(math.ceil(float(no_of_jobs) / items_per_page))
page = get_page_number(request, kwargs, no_pages)
if not page:
return HttpResponseRedirect(current_url)
prev_page, previous_page, aft_page, after_page = get_prev_after_pages_count(page, no_pages)
job_list = job_list[(page - 1) * items_per_page:page * items_per_page]
meta_title, meta_description, h1_tag = get_meta_data('skill_location_walkin_jobs', {'skills': searched_skills,
'final_skill': final_skill,
'page': page,
'locations': searched_locations,
'final_location': final_location})
data = {'job_list': job_list,
'aft_page': aft_page,
'after_page': after_page,
'prev_page': prev_page,
'previous_page': previous_page,
'current_page': page,
'last_page': no_pages,
'no_of_jobs': no_of_jobs,
"is_job_list": False,
'current_url': current_url,
'searched_skills': searched_skills,
'searched_states': searched_states,
'searched_locations': searched_locations,
'searched_industry': searched_industry,
'searched_edu': searched_edu,
'searched_experience': request.POST.get('experience'),
'searched_job_type': 'walk-in',
'meta_title': meta_title,
'meta_description': meta_description,
'h1_tag': h1_tag,
'walkin': True
}
template = 'jobs/jobs_list.html'
if request.is_mobile:
data.update({'searched_industry': request.POST.get('industry'),
'searched_functional_area': request.POST.get('functional_area')})
template = 'mobile/jobs/list.html'
return render(request, template, data)
else:
template = 'mobile/404.html' if request.is_mobile else '404.html'
meta_title, meta_description = get_404_meta(
'skill_location_404', {'skill': final_skill, 'city': final_location})
return render(request, template, {'message': 'Unfortunately, we are unable to locate the jobs you are looking for',
'reason': "Only Valid Skills/Cities names are accepted in search",
'job_search': True,
'searched_skills': searched_skills, 'searched_locations': searched_locations,
'meta_title': meta_title, 'meta_description': meta_description})
def skill_auto_search(request):
text = request.GET.get('text', '').split(', ')[:-1]
search = request.GET.get('q', '')
sqs = SearchQuerySet().models(Skill).filter_and(SQ(skill_name__contains=search) |
SQ(skill_slug__contains=search))
if text:
sqs = sqs.exclude(skill_name__in=text)
suggestions = [{
'name': result.skill_name,
'slug': result.skill_slug,
'jobs_count': result.no_of_jobposts,
'id': result.pk
} for result in sqs]
suggestions = sorted(suggestions, key=lambda k: len(k['name']), reverse=False)
if not request.GET.get('search') == 'filter':
deg = SearchQuerySet().models(Qualification).filter_and(SQ(edu_name__contains=search) |
SQ(edu_slug__contains=search))
if text:
deg = deg.exclude(edu_name__in=text)
degrees = [{'name': result.edu_name, 'id': result.pk, 'slug': result.edu_slug} for result in deg]
suggestions = suggestions + degrees
# suggestions = sorted(suggestions, key=int(itemgetter('jobs_count'), reverse=True)
the_data = json.dumps({
'results': suggestions[:10]
})
return HttpResponse(the_data, content_type='application/json')
def city_auto_search(request):
text = request.GET.get('text', '').split(', ')[:-1]
search = request.GET.get('location', '')
sqs = SearchQuerySet().models(City).filter(city_name__contains=search)
if text:
sqs = sqs.exclude(city_name__in=text)
suggestions = [{
'name': result.city_name,
'jobs_count': result.no_of_jobposts,
'id': result.pk
} for result in sqs]
suggestions = sorted(suggestions, key=lambda k: len(k['name']), reverse=False)
if not request.GET.get('search') == 'filter':
state = SearchQuerySet().models(State).filter_and(SQ(state_name__contains=search) |
SQ(state_slug__contains=search))
state = state.exclude(is_duplicate__in=[True])
if text:
state = state.exclude(state_name__in=text)
states = [{'name': result.state_name, 'id': result.pk, 'slug': result.state_slug} for result in state]
suggestions = suggestions + states
the_data = json.dumps({
'results': suggestions[:10]
})
return HttpResponse(the_data, content_type='application/json')
def industry_auto_search(request):
sqs = SearchQuerySet().models(Industry).filter(
industry_name__icontains=request.GET.get('industry', ''))
suggestions = [{
'name': result.industry_name.split('/')[0],
'jobs_count': result.no_of_jobposts,
'id': result.pk,
'slug': result.industry_slug,
} for result in sqs]
# suggestions = sorted(suggestions, key=lambda k: int(k['jobs_count']), reverse=True)
the_data = json.dumps({
'results': suggestions[:10]
})
return HttpResponse(the_data, content_type='application/json')
def functional_area_auto_search(request):
sqs = SearchQuerySet().models(FunctionalArea).filter(
functionalarea_name__contains=request.GET.get('functional_area', ''))[:10]
suggestions = [{
'name': result.functionalarea_name,
'jobs_count': result.no_of_jobposts
} for result in sqs]
suggestions = sorted(suggestions, key=lambda k: int(k['jobs_count']), reverse=True)
the_data = json.dumps({
'results': suggestions
})
return HttpResponse(the_data, content_type='application/json')
def education_auto_search(request):
degrees = SearchQuerySet().models(Qualification).filter_and(SQ(edu_name__contains=request.GET.get('education', '')) |
SQ(edu_slug__contains=request.GET.get('education', '')))
suggestions = [{'name': result.edu_name,
'id': result.pk,
'slug': result.edu_slug,
'jobs_count': result.no_of_jobposts or 0
} for result in degrees]
suggestions = sorted(suggestions, key=lambda k: int(k['jobs_count']), reverse=True)
the_data = json.dumps({'results': suggestions[:10]})
return HttpResponse(the_data, content_type='application/json')
def state_auto_search(request):
text = request.GET.get('text', '').split(', ')[:-1]
states = SearchQuerySet().models(State).filter_and(SQ(state_name__contains=request.GET.get('state', '')) |
SQ(state_slug__contains=request.GET.get('state', '')))
if text:
states = states.exclude(state_name__in=text)
suggestions = [{'name': result.state_name,
'id': result.pk,
'slug': result.state_slug,
'jobs_count': result.no_of_jobposts or 0
} for result in states]
suggestions = sorted(suggestions, key=lambda k: int(k['jobs_count']), reverse=True)
the_data = json.dumps({'results': suggestions[:10]})
return HttpResponse(the_data, content_type='application/json')
def search_slugs(request):
searched = request.GET.get('q_slug', '').replace('jobs', '').replace('job', '')
search_list = [i.strip() for i in searched.split(',') if i.strip()]
slug = ''
for search in search_list:
skills = Skill.objects.filter(Q(slug__iexact=search) | Q(name__iexact=search))
degrees = Qualification.objects.filter(Q(slug__iexact=search) | Q(name__iexact=search))
for skill in skills:
slug += ('-' + skill.slug) if slug else skill.slug
for degree in degrees:
slug += ('-' + degree.slug) if slug else degree.slug
if not skills and not degrees:
slug += ('-' + slugify(search)) if slug else slugify(search)
location = request.GET.get('location', '')
location_slug = ''
if location:
search_list = [i.strip() for i in location.split(',') if i.strip()]
for search in search_list:
locations = City.objects.filter(
Q(Q(slug__iexact=search) | Q(name__iexact=search)) & ~Q(state__name=F('name')))
states = State.objects.filter(Q(slug__iexact=search) | Q(name__iexact=search))
for loc in locations:
location_slug += ('-' + loc.slug) if location_slug else loc.slug
for state in states:
location_slug += ('-' + state.slug) if location_slug else state.slug
if not location_slug:
location_slug = slugify(location)
the_data = json.dumps({'skill_slug': slug, 'location_slug': location_slug})
return HttpResponse(the_data, content_type='application/json')
| [
"haystack.query.SQ",
"django.http.QueryDict",
"django.http.response.HttpResponse",
"django.http.response.HttpResponseRedirect",
"search.forms.job_searchForm",
"pjob.views.get_page_number",
"django.urls.reverse",
"re.search",
"django.shortcuts.render",
"peeldb.models.JobPost.objects.filter",
"mpc... | [((3293, 3313), 'search.forms.job_searchForm', 'job_searchForm', (['data'], {}), '(data)\n', (3307, 3313), False, 'from search.forms import job_searchForm\n'), ((4827, 4869), 'mpcomp.views.get_prev_after_pages_count', 'get_prev_after_pages_count', (['page', 'no_pages'], {}), '(page, no_pages)\n', (4853, 4869), False, 'from mpcomp.views import get_prev_after_pages_count, get_valid_locations_list, get_valid_skills_list, get_meta_data, get_404_meta\n'), ((5960, 6048), 'django.urls.reverse', 'reverse', (['"""custome_search"""'], {'kwargs': "{'skill_name': skill_name, 'city_name': city_name}"}), "('custome_search', kwargs={'skill_name': skill_name, 'city_name':\n city_name})\n", (5967, 6048), False, 'from django.urls import reverse\n'), ((6321, 6354), 'mpcomp.views.get_valid_skills_list', 'get_valid_skills_list', (['skill_name'], {}), '(skill_name)\n', (6342, 6354), False, 'from mpcomp.views import get_prev_after_pages_count, get_valid_locations_list, get_valid_skills_list, get_meta_data, get_404_meta\n'), ((6376, 6411), 'mpcomp.views.get_valid_locations_list', 'get_valid_locations_list', (['city_name'], {}), '(city_name)\n', (6400, 6411), False, 'from mpcomp.views import get_prev_after_pages_count, get_valid_locations_list, get_valid_skills_list, get_meta_data, get_404_meta\n'), ((11066, 11154), 'django.urls.reverse', 'reverse', (['"""custom_walkins"""'], {'kwargs': "{'skill_name': skill_name, 'city_name': city_name}"}), "('custom_walkins', kwargs={'skill_name': skill_name, 'city_name':\n city_name})\n", (11073, 11154), False, 'from django.urls import reverse\n'), ((11427, 11460), 'mpcomp.views.get_valid_skills_list', 'get_valid_skills_list', (['skill_name'], {}), '(skill_name)\n', (11448, 11460), False, 'from mpcomp.views import get_prev_after_pages_count, get_valid_locations_list, get_valid_skills_list, get_meta_data, get_404_meta\n'), ((11482, 11517), 'mpcomp.views.get_valid_locations_list', 'get_valid_locations_list', (['city_name'], {}), '(city_name)\n', (11506, 11517), False, 'from mpcomp.views import get_prev_after_pages_count, get_valid_locations_list, get_valid_skills_list, get_meta_data, get_404_meta\n'), ((17528, 17569), 'json.dumps', 'json.dumps', (["{'results': suggestions[:10]}"], {}), "({'results': suggestions[:10]})\n", (17538, 17569), False, 'import json\n'), ((17595, 17650), 'django.http.response.HttpResponse', 'HttpResponse', (['the_data'], {'content_type': '"""application/json"""'}), "(the_data, content_type='application/json')\n", (17607, 17650), False, 'from django.http.response import HttpResponse, HttpResponseRedirect\n'), ((18680, 18721), 'json.dumps', 'json.dumps', (["{'results': suggestions[:10]}"], {}), "({'results': suggestions[:10]})\n", (18690, 18721), False, 'import json\n'), ((18747, 18802), 'django.http.response.HttpResponse', 'HttpResponse', (['the_data'], {'content_type': '"""application/json"""'}), "(the_data, content_type='application/json')\n", (18759, 18802), False, 'from django.http.response import HttpResponse, HttpResponseRedirect\n'), ((19269, 19310), 'json.dumps', 'json.dumps', (["{'results': suggestions[:10]}"], {}), "({'results': suggestions[:10]})\n", (19279, 19310), False, 'import json\n'), ((19336, 19391), 'django.http.response.HttpResponse', 'HttpResponse', (['the_data'], {'content_type': '"""application/json"""'}), "(the_data, content_type='application/json')\n", (19348, 19391), False, 'from django.http.response import HttpResponse, HttpResponseRedirect\n'), ((19814, 19850), 'json.dumps', 'json.dumps', (["{'results': suggestions}"], {}), "({'results': suggestions})\n", (19824, 19850), False, 'import json\n'), ((19876, 19931), 'django.http.response.HttpResponse', 'HttpResponse', (['the_data'], {'content_type': '"""application/json"""'}), "(the_data, content_type='application/json')\n", (19888, 19931), False, 'from django.http.response import HttpResponse, HttpResponseRedirect\n'), ((20549, 20590), 'json.dumps', 'json.dumps', (["{'results': suggestions[:10]}"], {}), "({'results': suggestions[:10]})\n", (20559, 20590), False, 'import json\n'), ((20602, 20657), 'django.http.response.HttpResponse', 'HttpResponse', (['the_data'], {'content_type': '"""application/json"""'}), "(the_data, content_type='application/json')\n", (20614, 20657), False, 'from django.http.response import HttpResponse, HttpResponseRedirect\n'), ((21374, 21415), 'json.dumps', 'json.dumps', (["{'results': suggestions[:10]}"], {}), "({'results': suggestions[:10]})\n", (21384, 21415), False, 'import json\n'), ((21427, 21482), 'django.http.response.HttpResponse', 'HttpResponse', (['the_data'], {'content_type': '"""application/json"""'}), "(the_data, content_type='application/json')\n", (21439, 21482), False, 'from django.http.response import HttpResponse, HttpResponseRedirect\n'), ((22950, 23014), 'json.dumps', 'json.dumps', (["{'skill_slug': slug, 'location_slug': location_slug}"], {}), "({'skill_slug': slug, 'location_slug': location_slug})\n", (22960, 23014), False, 'import json\n'), ((23026, 23081), 'django.http.response.HttpResponse', 'HttpResponse', (['the_data'], {'content_type': '"""application/json"""'}), "(the_data, content_type='application/json')\n", (23038, 23081), False, 'from django.http.response import HttpResponse, HttpResponseRedirect\n'), ((3572, 3600), 'pjob.refine_search.refined_search', 'refined_search', (['request.POST'], {}), '(request.POST)\n', (3586, 3600), False, 'from pjob.refine_search import refined_search\n'), ((3665, 3721), 'peeldb.models.JobPost.objects.filter', 'JobPost.objects.filter', ([], {'pk__in': '[r.pk for r in jobs_list]'}), '(pk__in=[r.pk for r in jobs_list])\n', (3687, 3721), False, 'from peeldb.models import City, FunctionalArea, Industry, JobPost, Qualification, Skill, State\n'), ((6132, 6169), 'django.shortcuts.redirect', 'redirect', (['current_url'], {'permanent': '(True)'}), '(current_url, permanent=True)\n', (6140, 6169), False, 'from django.shortcuts import redirect, render\n'), ((6273, 6302), 'django.shortcuts.redirect', 'redirect', (['url'], {'permanent': '(True)'}), '(url, permanent=True)\n', (6281, 6302), False, 'from django.shortcuts import redirect, render\n'), ((6441, 6533), 'dashboard.tasks.save_search_results.delay', 'save_search_results.delay', (["request.META['REMOTE_ADDR']", 'request.POST', '(0)', 'request.user.id'], {}), "(request.META['REMOTE_ADDR'], request.POST, 0,\n request.user.id)\n", (6466, 6533), False, 'from dashboard.tasks import save_search_results\n'), ((6665, 7015), 'django.shortcuts.render', 'render', (['request', 'template', "{'message':\n 'Unfortunately, we are unable to locate the jobs you are looking for',\n 'data_empty': True, 'job_search': True, 'reason':\n 'Only Valid Skills/Cities names are accepted in search',\n 'searched_skills': final_skill or [skill_name], 'searched_locations': \n final_location or [city_name]}"], {'status': '(404)'}), "(request, template, {'message':\n 'Unfortunately, we are unable to locate the jobs you are looking for',\n 'data_empty': True, 'job_search': True, 'reason':\n 'Only Valid Skills/Cities names are accepted in search',\n 'searched_skills': final_skill or [skill_name], 'searched_locations': \n final_location or [city_name]}, status=404)\n", (6671, 7015), False, 'from django.shortcuts import redirect, render\n'), ((7396, 7424), 'pjob.refine_search.refined_search', 'refined_search', (['request.POST'], {}), '(request.POST)\n', (7410, 7424), False, 'from pjob.refine_search import refined_search\n'), ((7457, 7484), 'django.http.QueryDict', 'QueryDict', (['""""""'], {'mutable': '(True)'}), "('', mutable=True)\n", (7466, 7484), False, 'from django.http import QueryDict\n'), ((7920, 7947), 'pjob.refine_search.refined_search', 'refined_search', (['search_dict'], {}), '(search_dict)\n', (7934, 7947), False, 'from pjob.refine_search import refined_search\n'), ((8116, 8158), 'pjob.views.get_page_number', 'get_page_number', (['request', 'kwargs', 'no_pages'], {}), '(request, kwargs, no_pages)\n', (8131, 8158), False, 'from pjob.views import get_page_number\n'), ((8290, 8332), 'mpcomp.views.get_prev_after_pages_count', 'get_prev_after_pages_count', (['page', 'no_pages'], {}), '(page, no_pages)\n', (8316, 8332), False, 'from mpcomp.views import get_prev_after_pages_count, get_valid_locations_list, get_valid_skills_list, get_meta_data, get_404_meta\n'), ((8459, 8641), 'mpcomp.views.get_meta_data', 'get_meta_data', (['"""skill_location_jobs"""', "{'skills': searched_skills, 'final_skill': final_skill, 'page': page,\n 'locations': searched_locations, 'final_location': final_location}"], {}), "('skill_location_jobs', {'skills': searched_skills,\n 'final_skill': final_skill, 'page': page, 'locations':\n searched_locations, 'final_location': final_location})\n", (8472, 8641), False, 'from mpcomp.views import get_prev_after_pages_count, get_valid_locations_list, get_valid_skills_list, get_meta_data, get_404_meta\n'), ((10211, 10242), 'django.shortcuts.render', 'render', (['request', 'template', 'data'], {}), '(request, template, data)\n', (10217, 10242), False, 'from django.shortcuts import redirect, render\n'), ((10366, 10452), 'mpcomp.views.get_404_meta', 'get_404_meta', (['"""skill_location_404"""', "{'skill': final_skill, 'city': final_location}"], {}), "('skill_location_404', {'skill': final_skill, 'city':\n final_location})\n", (10378, 10452), False, 'from mpcomp.views import get_prev_after_pages_count, get_valid_locations_list, get_valid_skills_list, get_meta_data, get_404_meta\n'), ((10477, 10835), 'django.shortcuts.render', 'render', (['request', 'template', "{'message':\n 'Unfortunately, we are unable to locate the jobs you are looking for',\n 'reason': 'Only Valid Skills/Cities names are accepted in search',\n 'job_search': True, 'searched_skills': searched_skills,\n 'searched_locations': searched_locations, 'meta_title': meta_title,\n 'meta_description': meta_description}"], {}), "(request, template, {'message':\n 'Unfortunately, we are unable to locate the jobs you are looking for',\n 'reason': 'Only Valid Skills/Cities names are accepted in search',\n 'job_search': True, 'searched_skills': searched_skills,\n 'searched_locations': searched_locations, 'meta_title': meta_title,\n 'meta_description': meta_description})\n", (10483, 10835), False, 'from django.shortcuts import redirect, render\n'), ((11238, 11275), 'django.shortcuts.redirect', 'redirect', (['current_url'], {'permanent': '(True)'}), '(current_url, permanent=True)\n', (11246, 11275), False, 'from django.shortcuts import redirect, render\n'), ((11379, 11408), 'django.shortcuts.redirect', 'redirect', (['url'], {'permanent': '(True)'}), '(url, permanent=True)\n', (11387, 11408), False, 'from django.shortcuts import redirect, render\n'), ((11916, 12323), 'django.shortcuts.render', 'render', (['request', 'template', "{'message':\n 'Unfortunately, we are unable to locate the jobs you are looking for',\n 'searched_job_type': 'walk-in', 'reason':\n 'Only Valid Skills/Cities names are accepted in search',\n 'searched_skills': skills, 'searched_locations': location, 'meta_title':\n meta_title, 'meta_description': meta_description, 'data_empty': True,\n 'job_search': True}"], {'status': '(404)'}), "(request, template, {'message':\n 'Unfortunately, we are unable to locate the jobs you are looking for',\n 'searched_job_type': 'walk-in', 'reason':\n 'Only Valid Skills/Cities names are accepted in search',\n 'searched_skills': skills, 'searched_locations': location, 'meta_title':\n meta_title, 'meta_description': meta_description, 'data_empty': True,\n 'job_search': True}, status=404)\n", (11922, 12323), False, 'from django.shortcuts import redirect, render\n'), ((12710, 12738), 'pjob.refine_search.refined_search', 'refined_search', (['request.POST'], {}), '(request.POST)\n', (12724, 12738), False, 'from pjob.refine_search import refined_search\n'), ((12771, 12798), 'django.http.QueryDict', 'QueryDict', (['""""""'], {'mutable': '(True)'}), "('', mutable=True)\n", (12780, 12798), False, 'from django.http import QueryDict\n'), ((13210, 13237), 'pjob.refine_search.refined_search', 'refined_search', (['search_dict'], {}), '(search_dict)\n', (13224, 13237), False, 'from pjob.refine_search import refined_search\n'), ((13406, 13448), 'pjob.views.get_page_number', 'get_page_number', (['request', 'kwargs', 'no_pages'], {}), '(request, kwargs, no_pages)\n', (13421, 13448), False, 'from pjob.views import get_page_number\n'), ((13580, 13622), 'mpcomp.views.get_prev_after_pages_count', 'get_prev_after_pages_count', (['page', 'no_pages'], {}), '(page, no_pages)\n', (13606, 13622), False, 'from mpcomp.views import get_prev_after_pages_count, get_valid_locations_list, get_valid_skills_list, get_meta_data, get_404_meta\n'), ((13749, 13938), 'mpcomp.views.get_meta_data', 'get_meta_data', (['"""skill_location_walkin_jobs"""', "{'skills': searched_skills, 'final_skill': final_skill, 'page': page,\n 'locations': searched_locations, 'final_location': final_location}"], {}), "('skill_location_walkin_jobs', {'skills': searched_skills,\n 'final_skill': final_skill, 'page': page, 'locations':\n searched_locations, 'final_location': final_location})\n", (13762, 13938), False, 'from mpcomp.views import get_prev_after_pages_count, get_valid_locations_list, get_valid_skills_list, get_meta_data, get_404_meta\n'), ((15568, 15599), 'django.shortcuts.render', 'render', (['request', 'template', 'data'], {}), '(request, template, data)\n', (15574, 15599), False, 'from django.shortcuts import redirect, render\n'), ((15723, 15809), 'mpcomp.views.get_404_meta', 'get_404_meta', (['"""skill_location_404"""', "{'skill': final_skill, 'city': final_location}"], {}), "('skill_location_404', {'skill': final_skill, 'city':\n final_location})\n", (15735, 15809), False, 'from mpcomp.views import get_prev_after_pages_count, get_valid_locations_list, get_valid_skills_list, get_meta_data, get_404_meta\n'), ((15834, 16192), 'django.shortcuts.render', 'render', (['request', 'template', "{'message':\n 'Unfortunately, we are unable to locate the jobs you are looking for',\n 'reason': 'Only Valid Skills/Cities names are accepted in search',\n 'job_search': True, 'searched_skills': searched_skills,\n 'searched_locations': searched_locations, 'meta_title': meta_title,\n 'meta_description': meta_description}"], {}), "(request, template, {'message':\n 'Unfortunately, we are unable to locate the jobs you are looking for',\n 'reason': 'Only Valid Skills/Cities names are accepted in search',\n 'job_search': True, 'searched_skills': searched_skills,\n 'searched_locations': searched_locations, 'meta_title': meta_title,\n 'meta_description': meta_description})\n", (15840, 16192), False, 'from django.shortcuts import redirect, render\n'), ((4385, 4409), 're.search', 're.search', (['"""[0-9]"""', 'page'], {}), "('[0-9]', page)\n", (4394, 4409), False, 'import re\n'), ((8199, 8232), 'django.http.response.HttpResponseRedirect', 'HttpResponseRedirect', (['current_url'], {}), '(current_url)\n', (8219, 8232), False, 'from django.http.response import HttpResponse, HttpResponseRedirect\n'), ((11601, 11693), 'dashboard.tasks.save_search_results.delay', 'save_search_results.delay', (["request.META['REMOTE_ADDR']", 'request.POST', '(0)', 'request.user.id'], {}), "(request.META['REMOTE_ADDR'], request.POST, 0,\n request.user.id)\n", (11626, 11693), False, 'from dashboard.tasks import save_search_results\n'), ((13489, 13522), 'django.http.response.HttpResponseRedirect', 'HttpResponseRedirect', (['current_url'], {}), '(current_url)\n', (13509, 13522), False, 'from django.http.response import HttpResponse, HttpResponseRedirect\n'), ((16521, 16552), 'haystack.query.SQ', 'SQ', ([], {'skill_name__contains': 'search'}), '(skill_name__contains=search)\n', (16523, 16552), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((16607, 16638), 'haystack.query.SQ', 'SQ', ([], {'skill_slug__contains': 'search'}), '(skill_slug__contains=search)\n', (16609, 16638), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((22917, 22934), 'django.template.defaultfilters.slugify', 'slugify', (['location'], {}), '(location)\n', (22924, 22934), False, 'from django.template.defaultfilters import slugify\n'), ((17082, 17111), 'haystack.query.SQ', 'SQ', ([], {'edu_name__contains': 'search'}), '(edu_name__contains=search)\n', (17084, 17111), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((17178, 17207), 'haystack.query.SQ', 'SQ', ([], {'edu_slug__contains': 'search'}), '(edu_slug__contains=search)\n', (17180, 17207), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((18259, 18290), 'haystack.query.SQ', 'SQ', ([], {'state_name__contains': 'search'}), '(state_name__contains=search)\n', (18261, 18290), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((18351, 18382), 'haystack.query.SQ', 'SQ', ([], {'state_slug__contains': 'search'}), '(state_slug__contains=search)\n', (18353, 18382), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((21751, 21773), 'django.db.models.Q', 'Q', ([], {'slug__iexact': 'search'}), '(slug__iexact=search)\n', (21752, 21773), False, 'from django.db.models import Q, F\n'), ((21776, 21798), 'django.db.models.Q', 'Q', ([], {'name__iexact': 'search'}), '(name__iexact=search)\n', (21777, 21798), False, 'from django.db.models import Q, F\n'), ((21847, 21869), 'django.db.models.Q', 'Q', ([], {'slug__iexact': 'search'}), '(slug__iexact=search)\n', (21848, 21869), False, 'from django.db.models import Q, F\n'), ((21872, 21894), 'django.db.models.Q', 'Q', ([], {'name__iexact': 'search'}), '(name__iexact=search)\n', (21873, 21894), False, 'from django.db.models import Q, F\n'), ((22180, 22195), 'django.template.defaultfilters.slugify', 'slugify', (['search'], {}), '(search)\n', (22187, 22195), False, 'from django.template.defaultfilters import slugify\n'), ((16479, 16495), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (16493, 16495), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((17795, 17811), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (17809, 17811), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((18850, 18866), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (18864, 18866), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((19984, 20000), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (19998, 20000), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((20761, 20777), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (20775, 20777), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((22150, 22165), 'django.template.defaultfilters.slugify', 'slugify', (['search'], {}), '(search)\n', (22157, 22165), False, 'from django.template.defaultfilters import slugify\n'), ((22577, 22599), 'django.db.models.Q', 'Q', ([], {'slug__iexact': 'search'}), '(slug__iexact=search)\n', (22578, 22599), False, 'from django.db.models import Q, F\n'), ((22602, 22624), 'django.db.models.Q', 'Q', ([], {'name__iexact': 'search'}), '(name__iexact=search)\n', (22603, 22624), False, 'from django.db.models import Q, F\n'), ((17032, 17048), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (17046, 17048), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((18217, 18233), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (18231, 18233), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((19446, 19462), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (19460, 19462), False, 'from haystack.query import SQ, SearchQuerySet\n'), ((22457, 22479), 'django.db.models.Q', 'Q', ([], {'slug__iexact': 'search'}), '(slug__iexact=search)\n', (22458, 22479), False, 'from django.db.models import Q, F\n'), ((22482, 22504), 'django.db.models.Q', 'Q', ([], {'name__iexact': 'search'}), '(name__iexact=search)\n', (22483, 22504), False, 'from django.db.models import Q, F\n'), ((22523, 22532), 'django.db.models.F', 'F', (['"""name"""'], {}), "('name')\n", (22524, 22532), False, 'from django.db.models import Q, F\n')] |
# Importing default django packages:
from django.db import models
from django.template.defaultfilters import slugify
# Importing models from the research core:
from research_core.models import Topic
# Importing 3rd party packages:
from tinymce import models as tinymce_models
class BlogPost(models.Model):
"""The main model for Blog Posts. The main content for the blog is rendered via a
tinymce HTML field and it connects to the research_core application via a ForeginKey
connecton to the Topic model.
Args:
title (models.CharField): The title of the blog post that is displayed on the page and
is used to generate the slug (unique ID) for the post.
blog_thumbnail (models.ImageField): The image that is used as the thumbnail and header image
in the blog post. If none is provided that thumbnail img for the posts' Topic object is used.
content (tinymce.HTMLField): The main content of the blog post. It is HTML content that is stored in
the database and is ediable in the admin page as a fully functional text editor. This field is
a 3rd party package called TinyMCE that deals with all CRUD functions of the text field.
published_at (models.DateTimeField): The datetime when the blog post was created.
last_updated (models.DateTimeField): The datetime when the last changes were made to the model instance.
slug (models.SlugField): The unique URL identifier that is used to query specific blog posts. It is generated
by 'slugifying' the title or can be directly created.
topic (models.ForeignKey): The topic that the blog post is assocaited with. It connects to the topic object in the
'research core' application via a ForeignKey.
"""
# Model Specific Fields:
title = models.CharField(max_length=250, unique=True)
blog_thumbnail = models.ImageField(upload_to="blog/thumbnails", null=True, blank=True, default=None)
content = tinymce_models.HTMLField()
published_at = models.DateTimeField(auto_now_add=True)
last_updated = models.DateTimeField(auto_now=True)
slug = models.SlugField(max_length=300, unique=True, null=True, blank=True)
# Foregin Connection Fields:
topic = models.ForeignKey(Topic, on_delete=models.SET_NULL, null=True)
def save(self, *args, **kwargs):
# If slug field is empty, generate slug based off of title:
if self.slug:
pass
else:
self.slug = slugify(self.title)
# If there was no image field provided, use the thumbnail from the topic ForeginKey as the thumbnail:
if self.blog_thumbnail:
pass
else:
self.blog_thumbnail = self.topic.topic_img
super(BlogPost, self).save(*args, **kwargs)
class Meta:
ordering = ["-published_at"]
def __str__(self):
return self.title | [
"django.db.models.ForeignKey",
"django.db.models.DateTimeField",
"django.template.defaultfilters.slugify",
"django.db.models.SlugField",
"tinymce.models.HTMLField",
"django.db.models.ImageField",
"django.db.models.CharField"
] | [((1849, 1894), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'unique': '(True)'}), '(max_length=250, unique=True)\n', (1865, 1894), False, 'from django.db import models\n'), ((1916, 2003), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""blog/thumbnails"""', 'null': '(True)', 'blank': '(True)', 'default': 'None'}), "(upload_to='blog/thumbnails', null=True, blank=True,\n default=None)\n", (1933, 2003), False, 'from django.db import models\n'), ((2014, 2040), 'tinymce.models.HTMLField', 'tinymce_models.HTMLField', ([], {}), '()\n', (2038, 2040), True, 'from tinymce import models as tinymce_models\n'), ((2060, 2099), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2080, 2099), False, 'from django.db import models\n'), ((2119, 2154), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (2139, 2154), False, 'from django.db import models\n'), ((2166, 2234), 'django.db.models.SlugField', 'models.SlugField', ([], {'max_length': '(300)', 'unique': '(True)', 'null': '(True)', 'blank': '(True)'}), '(max_length=300, unique=True, null=True, blank=True)\n', (2182, 2234), False, 'from django.db import models\n'), ((2281, 2343), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Topic'], {'on_delete': 'models.SET_NULL', 'null': '(True)'}), '(Topic, on_delete=models.SET_NULL, null=True)\n', (2298, 2343), False, 'from django.db import models\n'), ((2531, 2550), 'django.template.defaultfilters.slugify', 'slugify', (['self.title'], {}), '(self.title)\n', (2538, 2550), False, 'from django.template.defaultfilters import slugify\n')] |
from contextlib import contextmanager
import torch
import torch.nn.functional as F
from torch.nn import Module, Parameter
from torch.nn import init
_WN_INIT_STDV = 0.05
_SMALL = 1e-10
_INIT_ENABLED = False
def is_init_enabled():
return _INIT_ENABLED
@contextmanager
def init_mode():
global _INIT_ENABLED
assert not _INIT_ENABLED
_INIT_ENABLED = True
yield
_INIT_ENABLED = False
class DataDepInitModule(Module):
"""
Module with data-dependent initialization
"""
def __init__(self):
super().__init__()
# self._wn_initialized = False
def _init(self, *args, **kwargs):
"""
Data-dependent initialization. Will be called on the first forward()
"""
raise NotImplementedError
def _forward(self, *args, **kwargs):
"""
The standard forward pass
"""
raise NotImplementedError
def forward(self, *args, **kwargs):
"""
Calls _init (with no_grad) if not initialized.
If initialized already, calls _forward.
"""
# assert self._wn_initialized == (not _INIT_ENABLED)
if _INIT_ENABLED: # not self._wn_initialized:
# self._wn_initialized = True
with torch.no_grad(): # no gradients for the init pass
return self._init(*args, **kwargs)
return self._forward(*args, **kwargs)
class Dense(DataDepInitModule):
def __init__(self, in_features, out_features, init_scale=1.0):
super().__init__()
self.in_features, self.out_features, self.init_scale = in_features, out_features, init_scale
self.w = Parameter(torch.Tensor(out_features, in_features))
self.b = Parameter(torch.Tensor(out_features))
init.normal_(self.w, 0, _WN_INIT_STDV)
init.zeros_(self.b)
def _init(self, x):
y = self._forward(x)
m = y.mean(dim=0)
s = self.init_scale / (y.std(dim=0) + _SMALL)
assert m.shape == s.shape == self.b.shape
self.w.copy_(self.w * s[:, None])
self.b.copy_(-m * s)
return self._forward(x)
def _forward(self, x):
return F.linear(x, self.w, self.b[None, :])
class WnDense(DataDepInitModule):
def __init__(self, in_features, out_features, init_scale=1.0):
super().__init__()
self.in_features, self.out_features, self.init_scale = in_features, out_features, init_scale
self.v = Parameter(torch.Tensor(out_features, in_features))
self.g = Parameter(torch.Tensor(out_features))
self.b = Parameter(torch.Tensor(out_features))
init.normal_(self.v, 0., _WN_INIT_STDV)
init.ones_(self.g)
init.zeros_(self.b)
def _init(self, x):
# calculate unnormalized activations
y_unnormalized = self._forward(x)
# set g and b so that activations are normalized
m = y_unnormalized.mean(dim=0)
s = self.init_scale / (y_unnormalized.std(dim=0) + _SMALL)
assert m.shape == s.shape == self.g.shape == self.b.shape
self.g.data.copy_(s)
self.b.data.sub_(m * s)
# forward pass again, now normalized
return self._forward(x)
def _forward(self, x):
(bs, in_features), out_features = x.shape, self.v.shape[0]
assert in_features == self.v.shape[1]
vnorm = self.v.norm(p=2, dim=1)
assert vnorm.shape == self.g.shape == self.b.shape
y = torch.addcmul(self.b[None, :], (self.g / vnorm)[None, :], x @ self.v.t())
# the line above is equivalent to: y = self.b[None, :] + (self.g / vnorm)[None, :] * (x @ self.v.t())
assert y.shape == (bs, out_features)
return y
def extra_repr(self):
return f'in_features={self.in_dim}, out_features={self.out_features}, init_scale={self.init_scale}'
class _Nin(DataDepInitModule):
def __init__(self, in_features, out_features, wn: bool, init_scale: float):
super().__init__()
base_module = WnDense if wn else Dense
self.dense = base_module(in_features=in_features, out_features=out_features, init_scale=init_scale)
self.height, self.width = None, None
def _preprocess(self, x):
"""(b,c,h,w) -> (b*h*w,c)"""
B, C, H, W = x.shape
if self.height is None or self.width is None:
self.height, self.width = H, W
else:
assert self.height == H and self.width == W, 'nin input image shape changed!'
assert C == self.dense.in_features
return x.permute(0, 2, 3, 1).reshape(B * H * W, C)
def _postprocess(self, x):
"""(b*h*w,c) -> (b,c,h,w)"""
BHW, C = x.shape
out = x.reshape(-1, self.height, self.width, C).permute(0, 3, 1, 2)
assert out.shape[1:] == (self.dense.out_features, self.height, self.width)
return out
def _init(self, x):
return self._postprocess(self.dense._init(self._preprocess(x)))
def _forward(self, x):
return self._postprocess(self.dense._forward(self._preprocess(x)))
class Nin(_Nin):
def __init__(self, in_features, out_features, init_scale=1.0):
super().__init__(in_features=in_features, out_features=out_features, wn=False, init_scale=init_scale)
class WnNin(_Nin):
def __init__(self, in_features, out_features, init_scale=1.0):
super().__init__(in_features=in_features, out_features=out_features, wn=True, init_scale=init_scale)
class Conv2d(DataDepInitModule):
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1, init_scale=1.0):
super().__init__()
self.in_channels, self.out_channels, self.kernel_size, self.stride, self.padding, self.dilation, self.init_scale = \
in_channels, out_channels, kernel_size, stride, padding, dilation, init_scale
self.w = Parameter(torch.Tensor(out_channels, in_channels, self.kernel_size, self.kernel_size))
self.b = Parameter(torch.Tensor(out_channels))
init.normal_(self.w, 0, _WN_INIT_STDV)
init.zeros_(self.b)
def _init(self, x):
# x.shape == (batch, channels, h, w)
y = self._forward(x) # (batch, out_channels, h, w)
m = y.transpose(0, 1).reshape(y.shape[1], -1).mean(dim=1) # out_channels
s = self.init_scale / (y.transpose(0, 1).reshape(y.shape[1], -1).std(dim=1) + _SMALL) # out_channels
self.w.copy_(self.w * s[:, None, None, None]) # (out, in, k, k) * (ou))
self.b.copy_(-m * s)
return self._forward(x)
def _forward(self, x):
return F.conv2d(x, self.w, self.b, self.stride, self.padding, self.dilation, 1)
class WnConv2d(DataDepInitModule):
def __init__(self, in_channels, out_channels, kernel_size, padding, init_scale=1.0):
super().__init__()
self.in_channels, self.out_channels, self.kernel_size, self.padding = in_channels, out_channels, kernel_size, padding
self.init_scale = init_scale
self.v = Parameter(torch.Tensor(out_channels, in_channels, self.kernel_size, self.kernel_size))
self.g = Parameter(torch.Tensor(out_channels))
self.b = Parameter(torch.Tensor(out_channels))
init.normal_(self.v, 0., _WN_INIT_STDV)
init.ones_(self.g)
init.zeros_(self.b)
def _init(self, x):
# calculate unnormalized activations
y_bchw = self._forward(x)
assert len(y_bchw.shape) == 4 and y_bchw.shape[:2] == (x.shape[0], self.out_channels)
# set g and b so that activations are normalized
y_c = y_bchw.transpose(0, 1).reshape(self.out_channels, -1)
m = y_c.mean(dim=1)
s = self.init_scale / (y_c.std(dim=1) + _SMALL)
assert m.shape == s.shape == self.g.shape == self.b.shape
self.g.data.copy_(s)
self.b.data.sub_(m * s)
# forward pass again, now normalized
return self._forward(x)
def _forward(self, x):
vnorm = self.v.view(self.out_channels, -1).norm(p=2, dim=1)
assert vnorm.shape == self.g.shape == self.b.shape
w = self.v * (self.g / (vnorm + _SMALL)).view(self.out_channels, 1, 1, 1)
return F.conv2d(x, w, self.b, padding=self.padding)
def extra_repr(self):
return f'in_channels={self.in_dim}, out_channels={self.out_channels}, kernel_size={self.kernel_size}, padding={self.padding}, init_scale={self.init_scale}'
class LearnedNorm(DataDepInitModule):
def __init__(self, shape, init_scale=1.0):
super().__init__()
self.init_scale = init_scale
self.g = Parameter(torch.ones(*shape))
self.b = Parameter(torch.zeros(*shape))
def _init(self, x, *, inverse):
assert not inverse
assert x.shape[1:] == self.g.shape == self.b.shape
m_init = x.mean(dim=0)
scale_init = self.init_scale / (x.std(dim=0) + _SMALL)
self.g.copy_(scale_init)
self.b.copy_(-m_init * scale_init)
return self._forward(x, inverse=inverse)
def get_gain(self):
return torch.clamp(self.g, min=1e-10)
def _forward(self, x, *, inverse):
"""
inverse == False to normalize; inverse == True to unnormalize
"""
assert x.shape[1:] == self.g.shape == self.b.shape
assert x.dtype == self.g.dtype == self.b.dtype
g = self.get_gain()
if not inverse:
return x * g[None] + self.b[None]
else:
return (x - self.b[None]) / g[None]
@torch.no_grad()
def _test_data_dep_init(m, x, init_scale, verbose=True, tol=1e-8, kwargs=None):
if kwargs is None:
kwargs = {}
with init_mode():
y_init = m(x, **kwargs)
y = m(x, **kwargs)
assert (y - y_init).abs().max() < tol, 'init pass output does not match normal forward pass'
y_outputs_flat = y.transpose(0, 1).reshape(y.shape[1], -1) # assumes axis 1 is the output axis
assert y_outputs_flat.mean(dim=1).abs().max() < tol, 'means wrong after normalization'
assert (y_outputs_flat.std(dim=1) - init_scale).abs().max() < tol, 'standard deviations wrong after normalization'
if verbose:
print('ok')
def test_dense():
bs = 128
in_features = 20
out_features = 29
init_scale = 3.14159
x = torch.randn(bs, in_features, dtype=torch.float64)
for module in [Dense, WnDense]:
m = module(in_features=in_features, out_features=out_features, init_scale=init_scale).double()
_test_data_dep_init(m, x, init_scale)
assert m(x).shape == (bs, out_features)
def test_conv2d():
bs = 128
in_channels = 20
out_channels = 29
height = 9
width = 11
init_scale = 3.14159
x = torch.randn(bs, in_channels, height, width, dtype=torch.float64)
for module in [Conv2d, WnConv2d]:
m = module(in_channels=in_channels, out_channels=out_channels, kernel_size=3, padding=1,
init_scale=init_scale).double()
_test_data_dep_init(m, x, init_scale)
assert m(x).shape == (bs, out_channels, height, width)
def test_learnednorm():
bs = 128
in_features = 20
init_scale = 3.14159
x = torch.rand(bs, in_features, dtype=torch.float64)
m = LearnedNorm(shape=(in_features,), init_scale=init_scale).double()
_test_data_dep_init(m, x, init_scale, kwargs={'inverse': False})
y = m(x, inverse=False)
assert y.shape == (bs, in_features)
assert torch.allclose(m(y, inverse=True), x), 'inverse failed'
| [
"torch.nn.functional.linear",
"torch.nn.functional.conv2d",
"torch.rand",
"torch.nn.init.ones_",
"torch.Tensor",
"torch.nn.init.zeros_",
"torch.clamp",
"torch.zeros",
"torch.no_grad",
"torch.randn",
"torch.nn.init.normal_",
"torch.ones"
] | [((9450, 9465), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (9463, 9465), False, 'import torch\n'), ((10218, 10267), 'torch.randn', 'torch.randn', (['bs', 'in_features'], {'dtype': 'torch.float64'}), '(bs, in_features, dtype=torch.float64)\n', (10229, 10267), False, 'import torch\n'), ((10641, 10705), 'torch.randn', 'torch.randn', (['bs', 'in_channels', 'height', 'width'], {'dtype': 'torch.float64'}), '(bs, in_channels, height, width, dtype=torch.float64)\n', (10652, 10705), False, 'import torch\n'), ((11094, 11142), 'torch.rand', 'torch.rand', (['bs', 'in_features'], {'dtype': 'torch.float64'}), '(bs, in_features, dtype=torch.float64)\n', (11104, 11142), False, 'import torch\n'), ((1758, 1796), 'torch.nn.init.normal_', 'init.normal_', (['self.w', '(0)', '_WN_INIT_STDV'], {}), '(self.w, 0, _WN_INIT_STDV)\n', (1770, 1796), False, 'from torch.nn import init\n'), ((1805, 1824), 'torch.nn.init.zeros_', 'init.zeros_', (['self.b'], {}), '(self.b)\n', (1816, 1824), False, 'from torch.nn import init\n'), ((2155, 2191), 'torch.nn.functional.linear', 'F.linear', (['x', 'self.w', 'self.b[None, :]'], {}), '(x, self.w, self.b[None, :])\n', (2163, 2191), True, 'import torch.nn.functional as F\n'), ((2611, 2651), 'torch.nn.init.normal_', 'init.normal_', (['self.v', '(0.0)', '_WN_INIT_STDV'], {}), '(self.v, 0.0, _WN_INIT_STDV)\n', (2623, 2651), False, 'from torch.nn import init\n'), ((2659, 2677), 'torch.nn.init.ones_', 'init.ones_', (['self.g'], {}), '(self.g)\n', (2669, 2677), False, 'from torch.nn import init\n'), ((2686, 2705), 'torch.nn.init.zeros_', 'init.zeros_', (['self.b'], {}), '(self.b)\n', (2697, 2705), False, 'from torch.nn import init\n'), ((5999, 6037), 'torch.nn.init.normal_', 'init.normal_', (['self.w', '(0)', '_WN_INIT_STDV'], {}), '(self.w, 0, _WN_INIT_STDV)\n', (6011, 6037), False, 'from torch.nn import init\n'), ((6046, 6065), 'torch.nn.init.zeros_', 'init.zeros_', (['self.b'], {}), '(self.b)\n', (6057, 6065), False, 'from torch.nn import init\n'), ((6573, 6645), 'torch.nn.functional.conv2d', 'F.conv2d', (['x', 'self.w', 'self.b', 'self.stride', 'self.padding', 'self.dilation', '(1)'], {}), '(x, self.w, self.b, self.stride, self.padding, self.dilation, 1)\n', (6581, 6645), True, 'import torch.nn.functional as F\n'), ((7186, 7226), 'torch.nn.init.normal_', 'init.normal_', (['self.v', '(0.0)', '_WN_INIT_STDV'], {}), '(self.v, 0.0, _WN_INIT_STDV)\n', (7198, 7226), False, 'from torch.nn import init\n'), ((7234, 7252), 'torch.nn.init.ones_', 'init.ones_', (['self.g'], {}), '(self.g)\n', (7244, 7252), False, 'from torch.nn import init\n'), ((7261, 7280), 'torch.nn.init.zeros_', 'init.zeros_', (['self.b'], {}), '(self.b)\n', (7272, 7280), False, 'from torch.nn import init\n'), ((8144, 8188), 'torch.nn.functional.conv2d', 'F.conv2d', (['x', 'w', 'self.b'], {'padding': 'self.padding'}), '(x, w, self.b, padding=self.padding)\n', (8152, 8188), True, 'import torch.nn.functional as F\n'), ((9008, 9038), 'torch.clamp', 'torch.clamp', (['self.g'], {'min': '(1e-10)'}), '(self.g, min=1e-10)\n', (9019, 9038), False, 'import torch\n'), ((1653, 1692), 'torch.Tensor', 'torch.Tensor', (['out_features', 'in_features'], {}), '(out_features, in_features)\n', (1665, 1692), False, 'import torch\n'), ((1721, 1747), 'torch.Tensor', 'torch.Tensor', (['out_features'], {}), '(out_features)\n', (1733, 1747), False, 'import torch\n'), ((2451, 2490), 'torch.Tensor', 'torch.Tensor', (['out_features', 'in_features'], {}), '(out_features, in_features)\n', (2463, 2490), False, 'import torch\n'), ((2519, 2545), 'torch.Tensor', 'torch.Tensor', (['out_features'], {}), '(out_features)\n', (2531, 2545), False, 'import torch\n'), ((2574, 2600), 'torch.Tensor', 'torch.Tensor', (['out_features'], {}), '(out_features)\n', (2586, 2600), False, 'import torch\n'), ((5858, 5933), 'torch.Tensor', 'torch.Tensor', (['out_channels', 'in_channels', 'self.kernel_size', 'self.kernel_size'], {}), '(out_channels, in_channels, self.kernel_size, self.kernel_size)\n', (5870, 5933), False, 'import torch\n'), ((5962, 5988), 'torch.Tensor', 'torch.Tensor', (['out_channels'], {}), '(out_channels)\n', (5974, 5988), False, 'import torch\n'), ((6990, 7065), 'torch.Tensor', 'torch.Tensor', (['out_channels', 'in_channels', 'self.kernel_size', 'self.kernel_size'], {}), '(out_channels, in_channels, self.kernel_size, self.kernel_size)\n', (7002, 7065), False, 'import torch\n'), ((7094, 7120), 'torch.Tensor', 'torch.Tensor', (['out_channels'], {}), '(out_channels)\n', (7106, 7120), False, 'import torch\n'), ((7149, 7175), 'torch.Tensor', 'torch.Tensor', (['out_channels'], {}), '(out_channels)\n', (7161, 7175), False, 'import torch\n'), ((8558, 8576), 'torch.ones', 'torch.ones', (['*shape'], {}), '(*shape)\n', (8568, 8576), False, 'import torch\n'), ((8605, 8624), 'torch.zeros', 'torch.zeros', (['*shape'], {}), '(*shape)\n', (8616, 8624), False, 'import torch\n'), ((1248, 1263), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1261, 1263), False, 'import torch\n')] |
import pytest
from google.protobuf import json_format
import verta.code
from verta._internal_utils import _git_utils
class TestGit:
def test_no_autocapture(self):
code_ver = verta.code.Git(_autocapture=False)
# protobuf message is empty
assert not json_format.MessageToDict(
code_ver._msg,
including_default_value_fields=False,
)
def test_repr(self):
"""Tests that __repr__() executes without error"""
try:
_git_utils.get_git_repo_root_dir()
except OSError:
pytest.skip("not in git repo")
code_ver = verta.code.Git()
assert code_ver.__repr__()
class TestNotebook:
def test_no_autocapture(self):
code_ver = verta.code.Notebook(_autocapture=False)
# protobuf message is empty
assert not json_format.MessageToDict(
code_ver._msg,
including_default_value_fields=False,
)
| [
"google.protobuf.json_format.MessageToDict",
"pytest.skip",
"verta._internal_utils._git_utils.get_git_repo_root_dir"
] | [((281, 359), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', (['code_ver._msg'], {'including_default_value_fields': '(False)'}), '(code_ver._msg, including_default_value_fields=False)\n', (306, 359), False, 'from google.protobuf import json_format\n'), ((505, 539), 'verta._internal_utils._git_utils.get_git_repo_root_dir', '_git_utils.get_git_repo_root_dir', ([], {}), '()\n', (537, 539), False, 'from verta._internal_utils import _git_utils\n'), ((852, 930), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', (['code_ver._msg'], {'including_default_value_fields': '(False)'}), '(code_ver._msg, including_default_value_fields=False)\n', (877, 930), False, 'from google.protobuf import json_format\n'), ((576, 606), 'pytest.skip', 'pytest.skip', (['"""not in git repo"""'], {}), "('not in git repo')\n", (587, 606), False, 'import pytest\n')] |
from django import forms
class PostForm(forms.Form):
title = forms.CharField(max_length=30, label='タイトル')
content = forms.CharField(label='内容', widget=forms.Textarea()) | [
"django.forms.Textarea",
"django.forms.CharField"
] | [((65, 109), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(30)', 'label': '"""タイトル"""'}), "(max_length=30, label='タイトル')\n", (80, 109), False, 'from django import forms\n'), ((161, 177), 'django.forms.Textarea', 'forms.Textarea', ([], {}), '()\n', (175, 177), False, 'from django import forms\n')] |
from django.urls import path
from django.conf.urls import url
from django.urls import path,include
import grievance.views as VIEWS
from django.conf.urls.static import static
from django.conf import settings
app_name = 'grievance'
urlpatterns =[
# path('', VIEWS.HomeView.as_view())
path('level1/', VIEWS.level1HomeView.as_view()),
path('level1/type/<type>',VIEWS.level1RequestView.as_view()),
path('level1/student/<student_id>', VIEWS.level1StudentView.as_view()),
path('level1/psd-student-status/student/<student_id>',VIEWS.ViewOnlyPSDStudentPageView.as_view()),
path('', include('django.contrib.auth.urls')),
path('redirect/', VIEWS.RedirectView.as_view()),
path('student/', VIEWS.studentHomeView.as_view()),
path('level2/', VIEWS.level2HomeView.as_view()),
path('level2/type/<type>', VIEWS.level2RequestView.as_view()),
path('level2/student/<student_id>', VIEWS.level2StudentView.as_view()),
path('<level>/student-status/student/<student_id>', VIEWS.ViewOnlyStudentPageView.as_view()),
path('website-admin', VIEWS.websiteAdminHomePageView.as_view()),
# path('website-admin/change-deadline', VIEWS.changeDeadlineView.as_view()),
path('website-admin/add-user', VIEWS.addUser.as_view()),
path('get-deadline', VIEWS.showDeadlineInHeader.as_view()),
path('dev_page', VIEWS.devPageView.as_view()),
]
urlpatterns+= static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"grievance.views.level1HomeView.as_view",
"grievance.views.studentHomeView.as_view",
"grievance.views.websiteAdminHomePageView.as_view",
"grievance.views.ViewOnlyPSDStudentPageView.as_view",
"grievance.views.level1StudentView.as_view",
"django.urls.include",
"grievance.views.level1RequestView.as_view",
... | [((1332, 1393), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (1338, 1393), False, 'from django.conf.urls.static import static\n'), ((302, 332), 'grievance.views.level1HomeView.as_view', 'VIEWS.level1HomeView.as_view', ([], {}), '()\n', (330, 332), True, 'import grievance.views as VIEWS\n'), ((362, 395), 'grievance.views.level1RequestView.as_view', 'VIEWS.level1RequestView.as_view', ([], {}), '()\n', (393, 395), True, 'import grievance.views as VIEWS\n'), ((435, 468), 'grievance.views.level1StudentView.as_view', 'VIEWS.level1StudentView.as_view', ([], {}), '()\n', (466, 468), True, 'import grievance.views as VIEWS\n'), ((526, 568), 'grievance.views.ViewOnlyPSDStudentPageView.as_view', 'VIEWS.ViewOnlyPSDStudentPageView.as_view', ([], {}), '()\n', (566, 568), True, 'import grievance.views as VIEWS\n'), ((581, 616), 'django.urls.include', 'include', (['"""django.contrib.auth.urls"""'], {}), "('django.contrib.auth.urls')\n", (588, 616), False, 'from django.urls import path, include\n'), ((638, 666), 'grievance.views.RedirectView.as_view', 'VIEWS.RedirectView.as_view', ([], {}), '()\n', (664, 666), True, 'import grievance.views as VIEWS\n'), ((687, 718), 'grievance.views.studentHomeView.as_view', 'VIEWS.studentHomeView.as_view', ([], {}), '()\n', (716, 718), True, 'import grievance.views as VIEWS\n'), ((738, 768), 'grievance.views.level2HomeView.as_view', 'VIEWS.level2HomeView.as_view', ([], {}), '()\n', (766, 768), True, 'import grievance.views as VIEWS\n'), ((799, 832), 'grievance.views.level2RequestView.as_view', 'VIEWS.level2RequestView.as_view', ([], {}), '()\n', (830, 832), True, 'import grievance.views as VIEWS\n'), ((872, 905), 'grievance.views.level2StudentView.as_view', 'VIEWS.level2StudentView.as_view', ([], {}), '()\n', (903, 905), True, 'import grievance.views as VIEWS\n'), ((961, 1000), 'grievance.views.ViewOnlyStudentPageView.as_view', 'VIEWS.ViewOnlyStudentPageView.as_view', ([], {}), '()\n', (998, 1000), True, 'import grievance.views as VIEWS\n'), ((1026, 1066), 'grievance.views.websiteAdminHomePageView.as_view', 'VIEWS.websiteAdminHomePageView.as_view', ([], {}), '()\n', (1064, 1066), True, 'import grievance.views as VIEWS\n'), ((1179, 1202), 'grievance.views.addUser.as_view', 'VIEWS.addUser.as_view', ([], {}), '()\n', (1200, 1202), True, 'import grievance.views as VIEWS\n'), ((1227, 1263), 'grievance.views.showDeadlineInHeader.as_view', 'VIEWS.showDeadlineInHeader.as_view', ([], {}), '()\n', (1261, 1263), True, 'import grievance.views as VIEWS\n'), ((1284, 1311), 'grievance.views.devPageView.as_view', 'VIEWS.devPageView.as_view', ([], {}), '()\n', (1309, 1311), True, 'import grievance.views as VIEWS\n')] |
from time import sleep
from cores import *
print(f'{cores["azul"]}Em breve a queima de fogos irá começar...{limpar}')
for c in range(10, -1, -1):
print(c)
sleep(1)
print(f'{cores["vermelho"]}{fx["negrito"]}Feliz ano novo!{limpar} 🎆 🎆')
| [
"time.sleep"
] | [((163, 171), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (168, 171), False, 'from time import sleep\n')] |
from optparse import make_option
from django.apps import apps
from django.conf import settings
from django.core.files.storage import default_storage
from django.core.management.base import BaseCommand
from django.db.models import FileField, ImageField
from database_files.models import File
class Command(BaseCommand):
args = ''
help = 'Deletes all files in the database that are not referenced by ' + \
'any model fields.'
option_list = BaseCommand.option_list + (
make_option(
'--dryrun', action='store_true', dest='dryrun', default=False,
help='If given, only displays the names of orphaned files and does not delete them.'),
make_option(
'--filenames', default='', help='If given, only files with these names will be checked')
)
def handle(self, *args, **options):
tmp_debug = settings.DEBUG
settings.DEBUG = False
names = set()
dryrun = options['dryrun']
filenames = set(_.strip() for _ in options['filenames'].split(',') if _.strip())
try:
for model in apps.get_models():
print('Checking model %s...' % (model,))
for field in model._meta.fields:
if not isinstance(field, (FileField, ImageField)):
continue
# Ignore records with null or empty string values.
q = {'%s__isnull' % field.name: False}
xq = {field.name: ''}
subq = model.objects.filter(**q).exclude(**xq)
subq_total = subq.count()
subq_i = 0
for row in subq.iterator():
subq_i += 1
if subq_i == 1 or not subq_i % 100:
print('%i of %i' % (subq_i, subq_total))
f = getattr(row, field.name)
if f is None:
continue
if not f.name:
continue
names.add(f.name)
# Find all database files with names not in our list.
print('Finding orphaned files...')
orphan_files = File.objects.exclude(name__in=names)
if filenames:
orphan_files = orphan_files.filter(name__in=filenames)
orphan_files = orphan_files.only('name', 'size')
total_bytes = 0
orphan_total = orphan_files.count()
orphan_i = 0
print('Deleting %i orphaned files...' % (orphan_total,))
for f in orphan_files.iterator():
orphan_i += 1
if orphan_i == 1 or not orphan_i % 100:
print('%i of %i' % (orphan_i, orphan_total))
total_bytes += f.size
if dryrun:
print('File %s is orphaned.' % (f.name,))
else:
print('Deleting orphan file %s...' % (f.name,))
default_storage.delete(f.name)
print('%i total bytes in orphan files.' % total_bytes)
finally:
settings.DEBUG = tmp_debug
| [
"database_files.models.File.objects.exclude",
"django.core.files.storage.default_storage.delete",
"django.apps.apps.get_models",
"optparse.make_option"
] | [((498, 672), 'optparse.make_option', 'make_option', (['"""--dryrun"""'], {'action': '"""store_true"""', 'dest': '"""dryrun"""', 'default': '(False)', 'help': '"""If given, only displays the names of orphaned files and does not delete them."""'}), "('--dryrun', action='store_true', dest='dryrun', default=False,\n help=\n 'If given, only displays the names of orphaned files and does not delete them.'\n )\n", (509, 672), False, 'from optparse import make_option\n'), ((693, 798), 'optparse.make_option', 'make_option', (['"""--filenames"""'], {'default': '""""""', 'help': '"""If given, only files with these names will be checked"""'}), "('--filenames', default='', help=\n 'If given, only files with these names will be checked')\n", (704, 798), False, 'from optparse import make_option\n'), ((1108, 1125), 'django.apps.apps.get_models', 'apps.get_models', ([], {}), '()\n', (1123, 1125), False, 'from django.apps import apps\n'), ((2253, 2289), 'database_files.models.File.objects.exclude', 'File.objects.exclude', ([], {'name__in': 'names'}), '(name__in=names)\n', (2273, 2289), False, 'from database_files.models import File\n'), ((3052, 3082), 'django.core.files.storage.default_storage.delete', 'default_storage.delete', (['f.name'], {}), '(f.name)\n', (3074, 3082), False, 'from django.core.files.storage import default_storage\n')] |
from spectacles import utils
from spectacles.logger import GLOBAL_LOGGER as logger
from unittest.mock import MagicMock
import pytest
import unittest
TEST_BASE_URL = "https://test.looker.com"
def test_compose_url_one_path_component():
url = utils.compose_url(TEST_BASE_URL, ["api"])
assert url == "https://test.looker.com/api"
def test_compose_url_multiple_path_components():
url = utils.compose_url(TEST_BASE_URL, ["api", "3.0", "login", "42", "auth", "27"])
assert url == "https://test.looker.com/api/3.0/login/42/auth/27"
def test_compose_url_multiple_path_components_and_multiple_field_params():
url = utils.compose_url(
TEST_BASE_URL,
["api", "3.0", "login", "42", "auth", "27"],
{"fields": ["joins", "id"]},
)
assert url == "https://test.looker.com/api/3.0/login/42/auth/27?fields=joins%2Cid"
def test_compose_url_multiple_path_components_and_one_field_params():
url = utils.compose_url(
TEST_BASE_URL,
["api", "3.0", "login", "42", "auth", "27"],
{"fields": ["joins"]},
)
assert url == "https://test.looker.com/api/3.0/login/42/auth/27?fields=joins"
def test_compose_url_with_extra_slashes():
url = utils.compose_url(TEST_BASE_URL + "/", ["/api//", "3.0/login/"])
assert url == "https://test.looker.com/api/3.0/login"
human_readable_testcases = [
(0.000002345, "0 seconds"),
(0.02, "0 seconds"),
(60, "1 minute"),
(61.002, "1 minute and 1 second"),
(62, "1 minute and 2 seconds"),
(2790, "46 minutes and 30 seconds"),
]
@pytest.mark.parametrize("elapsed,expected", human_readable_testcases)
def test_human_readable(elapsed, expected):
human_readable = utils.human_readable(elapsed)
assert human_readable == expected
get_detail_testcases = [
("run_sql", "SQL "),
("run_assert", "data test "),
("run_content", "content "),
("OtherClass.validate", ""),
]
@pytest.mark.parametrize("fn_name,expected", get_detail_testcases)
def test_get_detail(fn_name, expected):
detail = utils.get_detail(fn_name)
assert detail == expected
class TestLogDurationDecorator(unittest.TestCase):
def test_log_SQL(self):
with self.assertLogs(logger=logger, level="INFO") as cm:
func = MagicMock()
func.__name__ = "run_sql"
decorated_func = utils.log_duration(func)
decorated_func()
self.assertIn("INFO:spectacles:Completed SQL validation in", cm.output[0])
def test_log_assert(self):
with self.assertLogs(logger=logger, level="INFO") as cm:
func = MagicMock()
func.__name__ = "run_assert"
decorated_func = utils.log_duration(func)
decorated_func()
self.assertIn("INFO:spectacles:Completed data test validation in", cm.output[0])
def test_log_content(self):
with self.assertLogs(logger=logger, level="INFO") as cm:
func = MagicMock()
func.__name__ = "run_content"
decorated_func = utils.log_duration(func)
decorated_func()
self.assertIn("INFO:spectacles:Completed content validation in", cm.output[0])
def test_log_other(self):
with self.assertLogs(logger=logger, level="INFO") as cm:
func = MagicMock()
func.__name__ = "OtherValidator.validate"
decorated_func = utils.log_duration(func)
decorated_func()
self.assertIn("INFO:spectacles:Completed validation in", cm.output[0])
def test_chunks_returns_expected_results():
to_chunk = list(range(10)) # has length of 10
assert len(list(utils.chunks(to_chunk, 5))) == 2
assert len(list(utils.chunks(to_chunk, 9))) == 2
assert len(list(utils.chunks(to_chunk, 10))) == 1
assert len(list(utils.chunks(to_chunk, 11))) == 1
| [
"spectacles.utils.human_readable",
"spectacles.utils.compose_url",
"unittest.mock.MagicMock",
"spectacles.utils.log_duration",
"pytest.mark.parametrize",
"spectacles.utils.get_detail",
"spectacles.utils.chunks"
] | [((1562, 1631), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""elapsed,expected"""', 'human_readable_testcases'], {}), "('elapsed,expected', human_readable_testcases)\n", (1585, 1631), False, 'import pytest\n'), ((1922, 1987), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""fn_name,expected"""', 'get_detail_testcases'], {}), "('fn_name,expected', get_detail_testcases)\n", (1945, 1987), False, 'import pytest\n'), ((247, 288), 'spectacles.utils.compose_url', 'utils.compose_url', (['TEST_BASE_URL', "['api']"], {}), "(TEST_BASE_URL, ['api'])\n", (264, 288), False, 'from spectacles import utils\n'), ((398, 475), 'spectacles.utils.compose_url', 'utils.compose_url', (['TEST_BASE_URL', "['api', '3.0', 'login', '42', 'auth', '27']"], {}), "(TEST_BASE_URL, ['api', '3.0', 'login', '42', 'auth', '27'])\n", (415, 475), False, 'from spectacles import utils\n'), ((632, 743), 'spectacles.utils.compose_url', 'utils.compose_url', (['TEST_BASE_URL', "['api', '3.0', 'login', '42', 'auth', '27']", "{'fields': ['joins', 'id']}"], {}), "(TEST_BASE_URL, ['api', '3.0', 'login', '42', 'auth', '27'\n ], {'fields': ['joins', 'id']})\n", (649, 743), False, 'from spectacles import utils\n'), ((939, 1044), 'spectacles.utils.compose_url', 'utils.compose_url', (['TEST_BASE_URL', "['api', '3.0', 'login', '42', 'auth', '27']", "{'fields': ['joins']}"], {}), "(TEST_BASE_URL, ['api', '3.0', 'login', '42', 'auth', '27'\n ], {'fields': ['joins']})\n", (956, 1044), False, 'from spectacles import utils\n'), ((1208, 1272), 'spectacles.utils.compose_url', 'utils.compose_url', (["(TEST_BASE_URL + '/')", "['/api//', '3.0/login/']"], {}), "(TEST_BASE_URL + '/', ['/api//', '3.0/login/'])\n", (1225, 1272), False, 'from spectacles import utils\n'), ((1697, 1726), 'spectacles.utils.human_readable', 'utils.human_readable', (['elapsed'], {}), '(elapsed)\n', (1717, 1726), False, 'from spectacles import utils\n'), ((2041, 2066), 'spectacles.utils.get_detail', 'utils.get_detail', (['fn_name'], {}), '(fn_name)\n', (2057, 2066), False, 'from spectacles import utils\n'), ((2262, 2273), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2271, 2273), False, 'from unittest.mock import MagicMock\n'), ((2341, 2365), 'spectacles.utils.log_duration', 'utils.log_duration', (['func'], {}), '(func)\n', (2359, 2365), False, 'from spectacles import utils\n'), ((2594, 2605), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2603, 2605), False, 'from unittest.mock import MagicMock\n'), ((2676, 2700), 'spectacles.utils.log_duration', 'utils.log_duration', (['func'], {}), '(func)\n', (2694, 2700), False, 'from spectacles import utils\n'), ((2936, 2947), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2945, 2947), False, 'from unittest.mock import MagicMock\n'), ((3019, 3043), 'spectacles.utils.log_duration', 'utils.log_duration', (['func'], {}), '(func)\n', (3037, 3043), False, 'from spectacles import utils\n'), ((3275, 3286), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3284, 3286), False, 'from unittest.mock import MagicMock\n'), ((3370, 3394), 'spectacles.utils.log_duration', 'utils.log_duration', (['func'], {}), '(func)\n', (3388, 3394), False, 'from spectacles import utils\n'), ((3620, 3645), 'spectacles.utils.chunks', 'utils.chunks', (['to_chunk', '(5)'], {}), '(to_chunk, 5)\n', (3632, 3645), False, 'from spectacles import utils\n'), ((3673, 3698), 'spectacles.utils.chunks', 'utils.chunks', (['to_chunk', '(9)'], {}), '(to_chunk, 9)\n', (3685, 3698), False, 'from spectacles import utils\n'), ((3726, 3752), 'spectacles.utils.chunks', 'utils.chunks', (['to_chunk', '(10)'], {}), '(to_chunk, 10)\n', (3738, 3752), False, 'from spectacles import utils\n'), ((3780, 3806), 'spectacles.utils.chunks', 'utils.chunks', (['to_chunk', '(11)'], {}), '(to_chunk, 11)\n', (3792, 3806), False, 'from spectacles import utils\n')] |
'''
Service class for utility functions that I need throught the app
'''
class Utilities:
@staticmethod
def clickedOn(onScreenCoordinates, grid, cell, clickCoords):
i, j = cell
cellX, cellY = onScreenCoordinates[i][j]
x, y = clickCoords
import math, constants
radius = math.sqrt((cellX - x) * (cellX - x) + (cellY - y) * (cellY - y))
radius = round(radius, 5)
if grid[i][j] is None:
if radius <= constants.SQUARE_SIDE_WIDTH:
return True
return False
elif radius <= constants.CIRCLE_RADIUS:
return True
return False
@staticmethod
def clickCoordsToCell(onScreenCoordinates, grid, clickCoords):
row, col = None, None
for i in range(len(grid)):
for j in range(len(grid[0])):
if Utilities.clickedOn(onScreenCoordinates, grid, (i, j), clickCoords):
row = i
col = j
break
return row, col
@staticmethod
def cellToNode(grid, cell):
i, j = cell
return i * len(grid[0]) + j
@staticmethod
def nodeToCell(grid, node):
i = node // len(grid[0])
j = node % len(grid[0])
return (i, j)
@staticmethod
def sign(x):
if x == 0:
return 0
if x < 0:
return -1
return 1
@staticmethod
def endGame(grid):
cntDogs = 0
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == 'd':
cntDogs += 1
elif grid[i][j] == 'j':
cougarPos = (i, j)
if cntDogs < 10:
return "Jaguar won"
import constants
blockCnt = 0
for nextNode in constants.ADJACENCY_LIST[Utilities.cellToNode(grid, cougarPos)]:
i, j = Utilities.nodeToCell(grid, nextNode)
if grid[i][j] == 'd':
blockCnt += 1
if blockCnt == len(constants.ADJACENCY_LIST[Utilities.cellToNode(grid, cougarPos)]):
return "Dogs won"
return None
@staticmethod
def inGrid(grid, cell):
i, j = cell
return 0 <= i < len(grid) and 0 <= j < len(grid[0])
| [
"math.sqrt"
] | [((285, 349), 'math.sqrt', 'math.sqrt', (['((cellX - x) * (cellX - x) + (cellY - y) * (cellY - y))'], {}), '((cellX - x) * (cellX - x) + (cellY - y) * (cellY - y))\n', (294, 349), False, 'import math, constants\n')] |
import pytest
from unittest.mock import Mock
from datacrunch.http_client.http_client import HTTPClient
BASE_URL = "https://api-testing.datacrunch.io/v1"
ACCESS_TOKEN = "<PASSWORD>"
CLIENT_ID = "0123456789xyz"
@pytest.fixture
def http_client():
auth_service = Mock()
auth_service._access_token = ACCESS_TOKEN
auth_service.is_expired = Mock(return_value=True)
auth_service.refresh = Mock(return_value=None)
auth_service._client_id = CLIENT_ID
return HTTPClient(auth_service, BASE_URL)
| [
"datacrunch.http_client.http_client.HTTPClient",
"unittest.mock.Mock"
] | [((267, 273), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (271, 273), False, 'from unittest.mock import Mock\n'), ((350, 373), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (354, 373), False, 'from unittest.mock import Mock\n'), ((401, 424), 'unittest.mock.Mock', 'Mock', ([], {'return_value': 'None'}), '(return_value=None)\n', (405, 424), False, 'from unittest.mock import Mock\n'), ((477, 511), 'datacrunch.http_client.http_client.HTTPClient', 'HTTPClient', (['auth_service', 'BASE_URL'], {}), '(auth_service, BASE_URL)\n', (487, 511), False, 'from datacrunch.http_client.http_client import HTTPClient\n')] |
import asyncio
import re
from base64 import b64encode
# pattern taken from:
# https://mybuddymichael.com/writings/a-regular-expression-for-irc-messages.html
IRC_MSG_PATTERN = "^(?:[:](\S+) )?(\S+)(?: (?!:)(.+?))?(?: [:](.+))?$"
# class adapted from a sample kindly provided by https://github.com/emersonveenstra
class IRCClientProtocol(asyncio.Protocol):
def __init__(self, conf, message_handler, logger):
self._read_buffer = ""
self._conf = conf
self._log = logger
self._message_handler = message_handler
""" ========================================================================
protocol required methods
======================================================================== """
def connection_made(self, transport):
self.transport = transport
self.send_message("CAP REQ :sasl")
self.send_message(f"NICK {self._conf.botname}")
self.send_message(f"USER {self._conf.username} 8 * :{self._conf.realname}")
def data_received(self, data):
self._read_buffer += data.decode()
messages = re.split("\r\n", self._read_buffer)
# put back incomplete message, if any
self._read_buffer = messages.pop()
for msg in messages:
self.parse_message(msg)
""" ========================================================================
own methods
======================================================================== """
def send_message(self, message, log_this = True):
if log_this:
self._log("--> " + message)
self.transport.write(f"{message}\r\n".encode())
#---------------------------------------------------------------------------
# parse the message and process it directly in some cases
# pass it over to the external _message_handler() in all other cases
def parse_message(self, msg):
match = re.search(IRC_MSG_PATTERN, msg)
if not match:
return
sender = match.group(1)
if sender:
sender = sender.split("!")[0]
irc_command = match.group(2)
channel = match.group(3)
message = match.group(4)
if irc_command == "PING":
self.send_message(f"PONG :{message}", log_this = False)
# bail out immediately to avoid logging pings
return
self._log("<-- " + msg)
if irc_command == "CAP":
self.send_message("AUTHENTICATE PLAIN")
elif irc_command == "AUTHENTICATE":
authstring = b64encode(
f"\0{self._conf.username}\0{self._conf.password}".encode()
).decode()
self.send_message(f"AUTHENTICATE {authstring}", log_this = False)
elif irc_command == "900":
self.send_message("CAP END")
elif irc_command == "376":
for channel in self._conf.channels:
self.send_message(f"JOIN {channel}")
else:
self._message_handler(sender, irc_command, channel, message)
| [
"re.split",
"re.search"
] | [((1121, 1156), 're.split', 're.split', (["'\\r\\n'", 'self._read_buffer'], {}), "('\\r\\n', self._read_buffer)\n", (1129, 1156), False, 'import re\n'), ((1941, 1972), 're.search', 're.search', (['IRC_MSG_PATTERN', 'msg'], {}), '(IRC_MSG_PATTERN, msg)\n', (1950, 1972), False, 'import re\n')] |
"""Module containing definitions of arithmetic functions used by perceptrons"""
from abc import ABC, abstractmethod
import numpy as np
from NaiveNeurals.utils import ErrorAlgorithm
class ActivationFunction(ABC):
"""Abstract function for defining functions"""
label = ''
@staticmethod
@abstractmethod
def function(arg: np.array) -> np.array:
"""Implementation of function
:param arg: float
:return: float
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def prime(cls, arg: np.array) -> np.array:
"""First derivative of implemented function
:param arg: float
:return: float
"""
raise NotImplementedError()
class Sigmoid(ActivationFunction):
"""Represents sigmoid function and its derivative"""
label = 'sigmoid'
@staticmethod
def function(arg: np.array) -> np.array:
"""Calculate sigmoid(arg)
:param arg: float input value
:return: float sig(arg) value
"""
return 1 / (1 + np.exp(-arg))
@classmethod
def prime(cls, arg: np.array) -> np.array:
"""Calculate value of sigmoid's prime derivative for given arg
:param arg: float input value
:return: float value
"""
return cls.function(arg) * (1 - cls.function(arg))
class Tanh(ActivationFunction):
"""Represents hyperbolic tangent"""
label = 'tanh'
@staticmethod
def function(arg: np.array) -> np.array:
"""Calculate tanh(arg)
:param arg: float input value
:return: float tanh(arg) value
"""
return np.tanh(arg)
@classmethod
def prime(cls, arg: np.array) -> np.array:
"""Calculate value of tanh's prime derivative for given arg
:param arg: float input value
:return: float value
"""
return 1 - np.tanh(arg)**2
class Linear(ActivationFunction):
"""Represents linear function"""
label = 'lin'
@staticmethod
def function(arg: np.array) -> np.array:
"""Calculate lin(arg)
:param arg: float input value
:return: float lin(arg) value
"""
return arg
@classmethod
def prime(cls, arg: np.array) -> np.array:
"""Calculate value of lin's prime derivative for given arg
:param arg: float input value
:return: float value
"""
ones = np.array(arg)
ones[::] = 1.0
return ones
class SoftMax(ActivationFunction):
"""Represents SoftMax function
The ``softmax`` function takes an N-dimensional vector of arbitrary real values and produces
another N-dimensional vector with real values in the range (0, 1) that add up to 1.0.
source: https://eli.thegreenplace.net/2016/the-softmax-function-and-its-derivative/
"""
label = 'softmax'
@staticmethod
def function(arg: np.array, beta: int = 20) -> np.array: # pylint: disable=arguments-differ
"""Calculate softmax(arg)
:param arg: float input value
:param beta: scaling parameter
:return: float softmax(arg) value
"""
exps = np.exp(beta * arg - beta * arg.max())
return exps / np.sum(exps)
@classmethod
def prime(cls, arg: np.array) -> np.array:
"""Calculate value of softmax's prime derivative for given arg
:param arg: float input value
:return: float value
"""
return cls.function(arg) * (1 - cls.function(arg))
class SoftPlus(ActivationFunction):
"""Represents softplus function"""
label = 'softplus'
@staticmethod
def function(arg: np.array) -> np.array:
"""Calculate softplus(arg)
:param arg: float input value
:return: float softmax(arg) value
"""
return np.log(1 + np.exp(arg))
@classmethod
def prime(cls, arg: np.array) -> np.array:
"""Calculate value of softplus's prime derivative for given arg
:param arg: float input value
:return: float value
"""
return 1/(1 + np.exp(-arg))
def get_activation_function(label: str) -> ActivationFunction:
"""Get activation function by label
:param label: string denoting function
:return: callable function
"""
if label == 'lin':
return Linear()
if label == 'sigmoid':
return Sigmoid()
if label == 'tanh':
return Tanh()
return Sigmoid()
def calculate_error(target: np.array, actual: np.array,
func_type: ErrorAlgorithm = ErrorAlgorithm.SQR) -> np.array:
"""Calculates error for provided actual and targeted data.
:param target: target data
:param actual: actual training data
:param func_type: denotes type of used function for error
:return: calculated error
"""
if func_type == ErrorAlgorithm.SQR:
return np.sum(0.5 * np.power(actual - target, 2), axis=1)
elif func_type == ErrorAlgorithm.CE:
return -1 * np.sum(target * np.log(abs(actual)), axis=1)
raise NotImplementedError()
| [
"numpy.power",
"numpy.tanh",
"numpy.exp",
"numpy.sum",
"numpy.array"
] | [((1650, 1662), 'numpy.tanh', 'np.tanh', (['arg'], {}), '(arg)\n', (1657, 1662), True, 'import numpy as np\n'), ((2432, 2445), 'numpy.array', 'np.array', (['arg'], {}), '(arg)\n', (2440, 2445), True, 'import numpy as np\n'), ((3232, 3244), 'numpy.sum', 'np.sum', (['exps'], {}), '(exps)\n', (3238, 3244), True, 'import numpy as np\n'), ((1067, 1079), 'numpy.exp', 'np.exp', (['(-arg)'], {}), '(-arg)\n', (1073, 1079), True, 'import numpy as np\n'), ((1895, 1907), 'numpy.tanh', 'np.tanh', (['arg'], {}), '(arg)\n', (1902, 1907), True, 'import numpy as np\n'), ((3839, 3850), 'numpy.exp', 'np.exp', (['arg'], {}), '(arg)\n', (3845, 3850), True, 'import numpy as np\n'), ((4091, 4103), 'numpy.exp', 'np.exp', (['(-arg)'], {}), '(-arg)\n', (4097, 4103), True, 'import numpy as np\n'), ((4901, 4929), 'numpy.power', 'np.power', (['(actual - target)', '(2)'], {}), '(actual - target, 2)\n', (4909, 4929), True, 'import numpy as np\n')] |
# alevel.py Test/demo program for Adafruit ssd1351-based OLED displays
# Adafruit 1.5" 128*128 OLED display: https://www.adafruit.com/product/1431
# Adafruit 1.27" 128*96 display https://www.adafruit.com/product/1673
# The MIT License (MIT)
# Copyright (c) 2018 <NAME>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# WIRING
# Pyb SSD
# 3v3 Vin
# Gnd Gnd
# X1 DC
# X2 CS
# X3 Rst
# X6 CLK
# X8 DATA
height = 96 # 1.27 inch 96*128 (rows*cols) display
# height = 128 # 1.5 inch 128*128 display
# Demo of initialisation procedure designed to minimise risk of memory fail
# when instantiating the frame buffer. The aim is to do this as early as
# possible before importing other modules.
import machine
import gc
from ssd1351 import SSD1351 as SSD
# Initialise hardware
pdc = machine.Pin('X1', machine.Pin.OUT_PP, value=0)
pcs = machine.Pin('X2', machine.Pin.OUT_PP, value=1)
prst = machine.Pin('X3', machine.Pin.OUT_PP, value=1)
spi = machine.SPI(1)
gc.collect() # Precaution befor instantiating framebuf
ssd = SSD(spi, pcs, pdc, prst, height) # Create a display instance
from nanogui import Dial, Pointer, refresh
refresh(ssd) # Initialise and clear display.
# Now import other modules
import utime
import pyb
from writer import CWriter
import arial10 # Font
GREEN = SSD.rgb(0, 255, 0)
RED = SSD.rgb(255, 0, 0)
BLUE = SSD.rgb(0, 0, 255)
YELLOW = SSD.rgb(255, 255, 0)
BLACK = 0
def main():
print('alevel test is running.')
CWriter.set_textpos(ssd, 0, 0) # In case previous tests have altered it
wri = CWriter(ssd, arial10, GREEN, BLACK, verbose=False)
wri.set_clip(True, True, False)
acc = pyb.Accel()
dial = Dial(wri, 5, 5, height = 75, ticks = 12, bdcolor=None,
label='Tilt Pyboard', style = Dial.COMPASS, pip=YELLOW) # Border in fg color
ptr = Pointer(dial)
scale = 1/40
while True:
x, y, z = acc.filtered_xyz()
# Depending on relative alignment of display and Pyboard this line may
# need changing: swap x and y or change signs so arrow points in direction
# board is tilted.
ptr.value(-y*scale + 1j*x*scale, YELLOW)
refresh(ssd)
utime.sleep_ms(200)
main()
| [
"writer.CWriter.set_textpos",
"ssd1351.SSD1351.rgb",
"machine.SPI",
"pyb.Accel",
"utime.sleep_ms",
"machine.Pin",
"nanogui.refresh",
"writer.CWriter",
"ssd1351.SSD1351",
"gc.collect",
"nanogui.Dial",
"nanogui.Pointer"
] | [((1810, 1856), 'machine.Pin', 'machine.Pin', (['"""X1"""', 'machine.Pin.OUT_PP'], {'value': '(0)'}), "('X1', machine.Pin.OUT_PP, value=0)\n", (1821, 1856), False, 'import machine\n'), ((1863, 1909), 'machine.Pin', 'machine.Pin', (['"""X2"""', 'machine.Pin.OUT_PP'], {'value': '(1)'}), "('X2', machine.Pin.OUT_PP, value=1)\n", (1874, 1909), False, 'import machine\n'), ((1917, 1963), 'machine.Pin', 'machine.Pin', (['"""X3"""', 'machine.Pin.OUT_PP'], {'value': '(1)'}), "('X3', machine.Pin.OUT_PP, value=1)\n", (1928, 1963), False, 'import machine\n'), ((1970, 1984), 'machine.SPI', 'machine.SPI', (['(1)'], {}), '(1)\n', (1981, 1984), False, 'import machine\n'), ((1985, 1997), 'gc.collect', 'gc.collect', ([], {}), '()\n', (1995, 1997), False, 'import gc\n'), ((2047, 2079), 'ssd1351.SSD1351', 'SSD', (['spi', 'pcs', 'pdc', 'prst', 'height'], {}), '(spi, pcs, pdc, prst, height)\n', (2050, 2079), True, 'from ssd1351 import SSD1351 as SSD\n'), ((2152, 2164), 'nanogui.refresh', 'refresh', (['ssd'], {}), '(ssd)\n', (2159, 2164), False, 'from nanogui import Dial, Pointer, refresh\n'), ((2310, 2328), 'ssd1351.SSD1351.rgb', 'SSD.rgb', (['(0)', '(255)', '(0)'], {}), '(0, 255, 0)\n', (2317, 2328), True, 'from ssd1351 import SSD1351 as SSD\n'), ((2335, 2353), 'ssd1351.SSD1351.rgb', 'SSD.rgb', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (2342, 2353), True, 'from ssd1351 import SSD1351 as SSD\n'), ((2361, 2379), 'ssd1351.SSD1351.rgb', 'SSD.rgb', (['(0)', '(0)', '(255)'], {}), '(0, 0, 255)\n', (2368, 2379), True, 'from ssd1351 import SSD1351 as SSD\n'), ((2389, 2409), 'ssd1351.SSD1351.rgb', 'SSD.rgb', (['(255)', '(255)', '(0)'], {}), '(255, 255, 0)\n', (2396, 2409), True, 'from ssd1351 import SSD1351 as SSD\n'), ((2475, 2505), 'writer.CWriter.set_textpos', 'CWriter.set_textpos', (['ssd', '(0)', '(0)'], {}), '(ssd, 0, 0)\n', (2494, 2505), False, 'from writer import CWriter\n'), ((2558, 2608), 'writer.CWriter', 'CWriter', (['ssd', 'arial10', 'GREEN', 'BLACK'], {'verbose': '(False)'}), '(ssd, arial10, GREEN, BLACK, verbose=False)\n', (2565, 2608), False, 'from writer import CWriter\n'), ((2655, 2666), 'pyb.Accel', 'pyb.Accel', ([], {}), '()\n', (2664, 2666), False, 'import pyb\n'), ((2678, 2786), 'nanogui.Dial', 'Dial', (['wri', '(5)', '(5)'], {'height': '(75)', 'ticks': '(12)', 'bdcolor': 'None', 'label': '"""Tilt Pyboard"""', 'style': 'Dial.COMPASS', 'pip': 'YELLOW'}), "(wri, 5, 5, height=75, ticks=12, bdcolor=None, label='Tilt Pyboard',\n style=Dial.COMPASS, pip=YELLOW)\n", (2682, 2786), False, 'from nanogui import Dial, Pointer, refresh\n'), ((2837, 2850), 'nanogui.Pointer', 'Pointer', (['dial'], {}), '(dial)\n', (2844, 2850), False, 'from nanogui import Dial, Pointer, refresh\n'), ((3167, 3179), 'nanogui.refresh', 'refresh', (['ssd'], {}), '(ssd)\n', (3174, 3179), False, 'from nanogui import Dial, Pointer, refresh\n'), ((3188, 3207), 'utime.sleep_ms', 'utime.sleep_ms', (['(200)'], {}), '(200)\n', (3202, 3207), False, 'import utime\n')] |
import logging
from django.db import models
logger = logging.getLogger(__name__)
class PostmarkWebhook(models.Model):
received_at = models.DateTimeField(auto_now_add=True)
body = models.JSONField()
headers = models.JSONField()
note = models.TextField(blank=True)
class Status(models.TextChoices):
NEW = "new"
PROCESSED = "processed"
ERROR = "error"
status = models.CharField(
max_length=127, choices=Status.choices, default=Status.NEW
)
| [
"logging.getLogger",
"django.db.models.TextField",
"django.db.models.JSONField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((54, 81), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (71, 81), False, 'import logging\n'), ((139, 178), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (159, 178), False, 'from django.db import models\n'), ((190, 208), 'django.db.models.JSONField', 'models.JSONField', ([], {}), '()\n', (206, 208), False, 'from django.db import models\n'), ((223, 241), 'django.db.models.JSONField', 'models.JSONField', ([], {}), '()\n', (239, 241), False, 'from django.db import models\n'), ((253, 281), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (269, 281), False, 'from django.db import models\n'), ((411, 487), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(127)', 'choices': 'Status.choices', 'default': 'Status.NEW'}), '(max_length=127, choices=Status.choices, default=Status.NEW)\n', (427, 487), False, 'from django.db import models\n')] |
"""GUI elements for use in the sidebar of the main window.
Classes
-------
**InfoWidget** - Sidebar widget for basic file information.
**MetaWidget** - Sidebar widget for basic metadata.
**ConsoleWidget** - Sidebar widget for basic text output.
"""
from PySide2.QtCore import QSize
from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, \
QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout
from pyseus.settings import settings
class InfoWidget(QFrame):
"""The widget for file info. Displays path, scan ID and slice index."""
def __init__(self, app):
QFrame.__init__(self)
self.app = app
self.setLayout(QVBoxLayout())
self.layout().setContentsMargins(0, 0, 0, 0)
self.path = QLineEdit("")
self.scan = QLineEdit("")
self.slice = QLabel("")
info = QFrame()
info.setLayout(QFormLayout())
info.layout().addRow("Path:", self.path)
info.layout().addRow("Scan:", self.scan)
info.layout().addRow("Slice:", self.slice)
self.layout().addWidget(info)
self.setSizePolicy(QSizePolicy.Policy.Fixed,
QSizePolicy.Policy.Fixed)
self.updateGeometry()
def minimumSizeHint(self): # pylint: disable=C0103,R0201
"""Return widget size to ensure unifrom sidebar width."""
return QSize(int(settings["ui"]["sidebar_size"]), 80)
def update_slice(self, current, slices):
"""Update the displayed slice index."""
self.slice.setText("{} / {}".format(current+1, slices))
def update_scan(self, scan):
"""Update the displayed scan ID."""
self.scan.setText("{}".format(scan))
def update_path(self, path):
"""Update the displayed path."""
self.path.setText(path)
class MetaWidget(QScrollArea):
"""The widget for metadata display."""
def __init__(self, app):
QScrollArea.__init__(self)
self.app = app
self._reset_ui()
def _reset_ui(self):
"""Remove all metadata rows and reset the layout."""
table = QFrame()
table.setLayout(QFormLayout())
self.table = table.layout()
self.setWidgetResizable(True)
self.setWidget(table)
self.setSizePolicy(QSizePolicy.Policy.Fixed,
QSizePolicy.Policy.MinimumExpanding)
self.updateGeometry()
def minimumSizeHint(self): # pylint: disable=C0103,R0201
"""Return widget size to ensure unifrom sidebar width."""
return QSize(int(settings["ui"]["sidebar_size"]), 100)
def update_meta(self, data, more=True):
"""Set the displayed metadata; if *more* is True, display a button to
show all metadata."""
self._reset_ui()
if data is not None and data:
for key in sorted(data.keys()):
value = QLineEdit(str(data[key]))
self.table.addRow(key, value)
if more:
more_label = QLabel("more ...")
more_label.mouseReleaseEvent = self._show_more
self.table.addRow(more_label, None)
elif data is None or not data:
self.table.addRow("No metadata available", None)
def _show_more(self, event): # pylint: disable=W0613
"""Display a window showing all available metadata."""
self.app.show_metadata_window()
class ConsoleWidget(QTextEdit):
"""The widget for generic text output."""
def __init__(self, app):
QTextEdit.__init__(self)
self.app = app
self.setReadOnly(True)
self.setSizePolicy(QSizePolicy.Policy.Fixed,
QSizePolicy.Policy.MinimumExpanding)
self.updateGeometry()
def minimumSizeHint(self): # pylint: disable=C0103,R0201
"""Return widget size to ensure unifrom sidebar width."""
return QSize(int(settings["ui"]["sidebar_size"]), 100)
def print(self, text):
"""Print a simple text message to the console."""
self.append(text)
self.verticalScrollBar().setValue(self.verticalScrollBar().maximum())
| [
"PySide2.QtWidgets.QTextEdit.__init__",
"PySide2.QtWidgets.QScrollArea.__init__",
"PySide2.QtWidgets.QFrame",
"PySide2.QtWidgets.QFrame.__init__",
"PySide2.QtWidgets.QLineEdit",
"PySide2.QtWidgets.QFormLayout",
"PySide2.QtWidgets.QLabel",
"PySide2.QtWidgets.QVBoxLayout"
] | [((590, 611), 'PySide2.QtWidgets.QFrame.__init__', 'QFrame.__init__', (['self'], {}), '(self)\n', (605, 611), False, 'from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout\n'), ((748, 761), 'PySide2.QtWidgets.QLineEdit', 'QLineEdit', (['""""""'], {}), "('')\n", (757, 761), False, 'from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout\n'), ((782, 795), 'PySide2.QtWidgets.QLineEdit', 'QLineEdit', (['""""""'], {}), "('')\n", (791, 795), False, 'from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout\n'), ((817, 827), 'PySide2.QtWidgets.QLabel', 'QLabel', (['""""""'], {}), "('')\n", (823, 827), False, 'from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout\n'), ((844, 852), 'PySide2.QtWidgets.QFrame', 'QFrame', ([], {}), '()\n', (850, 852), False, 'from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout\n'), ((1909, 1935), 'PySide2.QtWidgets.QScrollArea.__init__', 'QScrollArea.__init__', (['self'], {}), '(self)\n', (1929, 1935), False, 'from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout\n'), ((2087, 2095), 'PySide2.QtWidgets.QFrame', 'QFrame', ([], {}), '()\n', (2093, 2095), False, 'from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout\n'), ((3488, 3512), 'PySide2.QtWidgets.QTextEdit.__init__', 'QTextEdit.__init__', (['self'], {}), '(self)\n', (3506, 3512), False, 'from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout\n'), ((659, 672), 'PySide2.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (670, 672), False, 'from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout\n'), ((876, 889), 'PySide2.QtWidgets.QFormLayout', 'QFormLayout', ([], {}), '()\n', (887, 889), False, 'from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout\n'), ((2120, 2133), 'PySide2.QtWidgets.QFormLayout', 'QFormLayout', ([], {}), '()\n', (2131, 2133), False, 'from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout\n'), ((2982, 3000), 'PySide2.QtWidgets.QLabel', 'QLabel', (['"""more ..."""'], {}), "('more ...')\n", (2988, 3000), False, 'from PySide2.QtWidgets import QFormLayout, QFrame, QLabel, QLineEdit, QScrollArea, QSizePolicy, QTextEdit, QVBoxLayout\n')] |
import string
import spacy
from text_studio.utils.timer import timer
from text_studio.transformer import Transformer
class SpacyTokenizer(Transformer):
def setup(self, stopwords=None, punct=None, lower=True, strip=True):
spacy.cli.download("en_core_web_sm")
self.nlp = spacy.load(
"en_core_web_sm", disable=["parser", "tagger", "ner"]
)
self.lower = lower
self.punct = punct or set(string.punctuation)
def process_batch(self, X):
docs = list(self.nlp.pipe(X))
return [list(self.process_instance(doc)) for doc in docs]
def process_single(self, document):
return self.process_instance(document)
def process_instance(self, document):
for token in document:
lexeme = self.nlp.vocab[token.text]
if lexeme.is_stop or (token.text in self.punct):
continue
yield token.lemma_
| [
"spacy.load",
"spacy.cli.download"
] | [((236, 272), 'spacy.cli.download', 'spacy.cli.download', (['"""en_core_web_sm"""'], {}), "('en_core_web_sm')\n", (254, 272), False, 'import spacy\n'), ((292, 357), 'spacy.load', 'spacy.load', (['"""en_core_web_sm"""'], {'disable': "['parser', 'tagger', 'ner']"}), "('en_core_web_sm', disable=['parser', 'tagger', 'ner'])\n", (302, 357), False, 'import spacy\n')] |
from OpenGL import GL
from PIL import Image
from pathlib import Path
import numpy as np
import gc
import os
import ctypes
GL_COMPRESSED_RGBA_S3TC_DXT1_EXT = 0x83F1
VBO = None
VAO = None
TEXTURE = None
SHADER = None
vertexData = [
-1.0, -1.0, 0.0, 0.0, 1.0,
-1.0, 1.0, 0.0, 0.0, 0.0,
1.0, 1.0, 0.0, 1.0, 0.0,
1.0, 1.0, 0.0, 1.0, 0.0,
1.0, -1.0, 0.0, 1.0, 1.0,
-1.0, -1.0, 0.0, 0.0, 1.0]
_filepath = os.path.join(
Path(__file__).parent.parent.parent, "branding/spykeLogo.dds")
tex = dds_loader.DDSTexture()
tex.load(_filepath)
texData = np.fromstring(tex.data, dtype=np.uint8)
texImageSize = tex.real_size
vertSource = """
#version 450 core
layout(location = 0) in vec3 aPosition;
layout(location = 1) in vec2 aTexCoord;
out vec2 vTexCoord;
void main() {
vTexCoord = aTexCoord;
gl_Position = vec4(aPosition, 1.0f);
}
"""
fragSource = """
#version 450 core
in vec2 vTexCoord;
uniform sampler2D uTexture;
out vec4 Color;
void main() {
Color = texture(uTexture, vTexCoord);
}
"""
def __SetupShader():
global SHADER
SHADER = GL.glCreateProgram()
vert = GL.glCreateShader(GL.GL_VERTEX_SHADER)
GL.glShaderSource(vert, vertSource)
GL.glCompileShader(vert)
GL.glAttachShader(SHADER, vert)
frag = GL.glCreateShader(GL.GL_FRAGMENT_SHADER)
GL.glShaderSource(frag, fragSource)
GL.glCompileShader(frag)
GL.glAttachShader(SHADER, frag)
GL.glLinkProgram(SHADER)
GL.glValidateProgram(SHADER)
GL.glDetachShader(SHADER, vert)
GL.glDetachShader(SHADER, frag)
GL.glDeleteShader(vert)
GL.glDeleteShader(frag)
def __SetupVbo():
global VBO
VBO = GL.glGenBuffers(1)
GL.glBindBuffer(GL.GL_ARRAY_BUFFER, VBO)
GL.glBufferData(GL.GL_ARRAY_BUFFER, len(vertexData) * ctypes.sizeof(ctypes.c_float),
np.asarray(vertexData, dtype=np.float32), GL.GL_STATIC_DRAW)
def __SetupVao():
global VAO
vertexSize = (3 + 2) * ctypes.sizeof(ctypes.c_float)
VAO = GL.glGenVertexArrays(1)
GL.glBindVertexArray(VAO)
GL.glBindBuffer(GL.GL_ARRAY_BUFFER, VBO)
GL.glVertexAttribPointer(0, 3, GL.GL_FLOAT, False,
vertexSize, ctypes.c_void_p(0))
GL.glEnableVertexAttribArray(0)
GL.glVertexAttribPointer(1, 2, GL.GL_FLOAT, False, vertexSize, ctypes.c_void_p(
3 * ctypes.sizeof(ctypes.c_float)))
GL.glEnableVertexAttribArray(1)
def __SetupTexture():
global TEXTURE
TEXTURE = GL.glGenTextures(1)
GL.glBindTexture(GL.GL_TEXTURE_2D, TEXTURE)
GL.glCompressedTexImage2D(
GL.GL_TEXTURE_2D, 0, GL_COMPRESSED_RGBA_S3TC_DXT1_EXT, 1024, 1024, texImageSize, texData)
GL.glTexParameter(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR)
GL.glTexParameter(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR)
def CleanupPreview():
global vertexData, texData, vertSource, fragSource
GL.glClear(GL.GL_COLOR_BUFFER_BIT)
GL.glDeleteProgram(SHADER)
GL.glDeleteBuffers(1, [VBO])
GL.glDeleteVertexArrays(1, [VAO])
GL.glDeleteTextures(1, [TEXTURE])
err = GL.glGetError()
while err != GL.GL_NO_ERROR:
err = GL.glGetError()
del vertexData
del texData
del vertSource
del fragSource
gc.collect()
def RenderPreview():
global VBO, VAO, TEXTURE, SHADER
__SetupShader()
__SetupVbo()
__SetupVao()
__SetupTexture()
GL.glEnable(GL.GL_BLEND)
GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA)
GL.glUseProgram(SHADER)
GL.glBindVertexArray(VAO)
GL.glBindTexture(GL.GL_TEXTURE_2D, TEXTURE)
GL.glClear(GL.GL_COLOR_BUFFER_BIT)
GL.glDrawArrays(GL.GL_TRIANGLES, 0, 6)
GL.glBindTexture(GL.GL_TEXTURE_2D, 0)
err = GL.glGetError()
while err != GL.GL_NO_ERROR:
err = GL.glGetError()
| [
"OpenGL.GL.glTexParameter",
"OpenGL.GL.glDeleteProgram",
"ctypes.c_void_p",
"OpenGL.GL.glAttachShader",
"OpenGL.GL.glCreateShader",
"OpenGL.GL.glDrawArrays",
"OpenGL.GL.glDeleteBuffers",
"OpenGL.GL.glGenTextures",
"pathlib.Path",
"OpenGL.GL.glBindVertexArray",
"OpenGL.GL.glGenBuffers",
"OpenGL... | [((575, 614), 'numpy.fromstring', 'np.fromstring', (['tex.data'], {'dtype': 'np.uint8'}), '(tex.data, dtype=np.uint8)\n', (588, 614), True, 'import numpy as np\n'), ((1089, 1109), 'OpenGL.GL.glCreateProgram', 'GL.glCreateProgram', ([], {}), '()\n', (1107, 1109), False, 'from OpenGL import GL\n'), ((1122, 1160), 'OpenGL.GL.glCreateShader', 'GL.glCreateShader', (['GL.GL_VERTEX_SHADER'], {}), '(GL.GL_VERTEX_SHADER)\n', (1139, 1160), False, 'from OpenGL import GL\n'), ((1165, 1200), 'OpenGL.GL.glShaderSource', 'GL.glShaderSource', (['vert', 'vertSource'], {}), '(vert, vertSource)\n', (1182, 1200), False, 'from OpenGL import GL\n'), ((1205, 1229), 'OpenGL.GL.glCompileShader', 'GL.glCompileShader', (['vert'], {}), '(vert)\n', (1223, 1229), False, 'from OpenGL import GL\n'), ((1234, 1265), 'OpenGL.GL.glAttachShader', 'GL.glAttachShader', (['SHADER', 'vert'], {}), '(SHADER, vert)\n', (1251, 1265), False, 'from OpenGL import GL\n'), ((1278, 1318), 'OpenGL.GL.glCreateShader', 'GL.glCreateShader', (['GL.GL_FRAGMENT_SHADER'], {}), '(GL.GL_FRAGMENT_SHADER)\n', (1295, 1318), False, 'from OpenGL import GL\n'), ((1323, 1358), 'OpenGL.GL.glShaderSource', 'GL.glShaderSource', (['frag', 'fragSource'], {}), '(frag, fragSource)\n', (1340, 1358), False, 'from OpenGL import GL\n'), ((1363, 1387), 'OpenGL.GL.glCompileShader', 'GL.glCompileShader', (['frag'], {}), '(frag)\n', (1381, 1387), False, 'from OpenGL import GL\n'), ((1392, 1423), 'OpenGL.GL.glAttachShader', 'GL.glAttachShader', (['SHADER', 'frag'], {}), '(SHADER, frag)\n', (1409, 1423), False, 'from OpenGL import GL\n'), ((1429, 1453), 'OpenGL.GL.glLinkProgram', 'GL.glLinkProgram', (['SHADER'], {}), '(SHADER)\n', (1445, 1453), False, 'from OpenGL import GL\n'), ((1458, 1486), 'OpenGL.GL.glValidateProgram', 'GL.glValidateProgram', (['SHADER'], {}), '(SHADER)\n', (1478, 1486), False, 'from OpenGL import GL\n'), ((1492, 1523), 'OpenGL.GL.glDetachShader', 'GL.glDetachShader', (['SHADER', 'vert'], {}), '(SHADER, vert)\n', (1509, 1523), False, 'from OpenGL import GL\n'), ((1528, 1559), 'OpenGL.GL.glDetachShader', 'GL.glDetachShader', (['SHADER', 'frag'], {}), '(SHADER, frag)\n', (1545, 1559), False, 'from OpenGL import GL\n'), ((1565, 1588), 'OpenGL.GL.glDeleteShader', 'GL.glDeleteShader', (['vert'], {}), '(vert)\n', (1582, 1588), False, 'from OpenGL import GL\n'), ((1593, 1616), 'OpenGL.GL.glDeleteShader', 'GL.glDeleteShader', (['frag'], {}), '(frag)\n', (1610, 1616), False, 'from OpenGL import GL\n'), ((1663, 1681), 'OpenGL.GL.glGenBuffers', 'GL.glGenBuffers', (['(1)'], {}), '(1)\n', (1678, 1681), False, 'from OpenGL import GL\n'), ((1686, 1726), 'OpenGL.GL.glBindBuffer', 'GL.glBindBuffer', (['GL.GL_ARRAY_BUFFER', 'VBO'], {}), '(GL.GL_ARRAY_BUFFER, VBO)\n', (1701, 1726), False, 'from OpenGL import GL\n'), ((2001, 2024), 'OpenGL.GL.glGenVertexArrays', 'GL.glGenVertexArrays', (['(1)'], {}), '(1)\n', (2021, 2024), False, 'from OpenGL import GL\n'), ((2029, 2054), 'OpenGL.GL.glBindVertexArray', 'GL.glBindVertexArray', (['VAO'], {}), '(VAO)\n', (2049, 2054), False, 'from OpenGL import GL\n'), ((2060, 2100), 'OpenGL.GL.glBindBuffer', 'GL.glBindBuffer', (['GL.GL_ARRAY_BUFFER', 'VBO'], {}), '(GL.GL_ARRAY_BUFFER, VBO)\n', (2075, 2100), False, 'from OpenGL import GL\n'), ((2222, 2253), 'OpenGL.GL.glEnableVertexAttribArray', 'GL.glEnableVertexAttribArray', (['(0)'], {}), '(0)\n', (2250, 2253), False, 'from OpenGL import GL\n'), ((2387, 2418), 'OpenGL.GL.glEnableVertexAttribArray', 'GL.glEnableVertexAttribArray', (['(1)'], {}), '(1)\n', (2415, 2418), False, 'from OpenGL import GL\n'), ((2477, 2496), 'OpenGL.GL.glGenTextures', 'GL.glGenTextures', (['(1)'], {}), '(1)\n', (2493, 2496), False, 'from OpenGL import GL\n'), ((2501, 2544), 'OpenGL.GL.glBindTexture', 'GL.glBindTexture', (['GL.GL_TEXTURE_2D', 'TEXTURE'], {}), '(GL.GL_TEXTURE_2D, TEXTURE)\n', (2517, 2544), False, 'from OpenGL import GL\n'), ((2550, 2669), 'OpenGL.GL.glCompressedTexImage2D', 'GL.glCompressedTexImage2D', (['GL.GL_TEXTURE_2D', '(0)', 'GL_COMPRESSED_RGBA_S3TC_DXT1_EXT', '(1024)', '(1024)', 'texImageSize', 'texData'], {}), '(GL.GL_TEXTURE_2D, 0,\n GL_COMPRESSED_RGBA_S3TC_DXT1_EXT, 1024, 1024, texImageSize, texData)\n', (2575, 2669), False, 'from OpenGL import GL\n'), ((2679, 2754), 'OpenGL.GL.glTexParameter', 'GL.glTexParameter', (['GL.GL_TEXTURE_2D', 'GL.GL_TEXTURE_MAG_FILTER', 'GL.GL_LINEAR'], {}), '(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR)\n', (2696, 2754), False, 'from OpenGL import GL\n'), ((2759, 2834), 'OpenGL.GL.glTexParameter', 'GL.glTexParameter', (['GL.GL_TEXTURE_2D', 'GL.GL_TEXTURE_MIN_FILTER', 'GL.GL_LINEAR'], {}), '(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR)\n', (2776, 2834), False, 'from OpenGL import GL\n'), ((2919, 2953), 'OpenGL.GL.glClear', 'GL.glClear', (['GL.GL_COLOR_BUFFER_BIT'], {}), '(GL.GL_COLOR_BUFFER_BIT)\n', (2929, 2953), False, 'from OpenGL import GL\n'), ((2959, 2985), 'OpenGL.GL.glDeleteProgram', 'GL.glDeleteProgram', (['SHADER'], {}), '(SHADER)\n', (2977, 2985), False, 'from OpenGL import GL\n'), ((2990, 3018), 'OpenGL.GL.glDeleteBuffers', 'GL.glDeleteBuffers', (['(1)', '[VBO]'], {}), '(1, [VBO])\n', (3008, 3018), False, 'from OpenGL import GL\n'), ((3023, 3056), 'OpenGL.GL.glDeleteVertexArrays', 'GL.glDeleteVertexArrays', (['(1)', '[VAO]'], {}), '(1, [VAO])\n', (3046, 3056), False, 'from OpenGL import GL\n'), ((3061, 3094), 'OpenGL.GL.glDeleteTextures', 'GL.glDeleteTextures', (['(1)', '[TEXTURE]'], {}), '(1, [TEXTURE])\n', (3080, 3094), False, 'from OpenGL import GL\n'), ((3106, 3121), 'OpenGL.GL.glGetError', 'GL.glGetError', ([], {}), '()\n', (3119, 3121), False, 'from OpenGL import GL\n'), ((3264, 3276), 'gc.collect', 'gc.collect', ([], {}), '()\n', (3274, 3276), False, 'import gc\n'), ((3418, 3442), 'OpenGL.GL.glEnable', 'GL.glEnable', (['GL.GL_BLEND'], {}), '(GL.GL_BLEND)\n', (3429, 3442), False, 'from OpenGL import GL\n'), ((3447, 3505), 'OpenGL.GL.glBlendFunc', 'GL.glBlendFunc', (['GL.GL_SRC_ALPHA', 'GL.GL_ONE_MINUS_SRC_ALPHA'], {}), '(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA)\n', (3461, 3505), False, 'from OpenGL import GL\n'), ((3511, 3534), 'OpenGL.GL.glUseProgram', 'GL.glUseProgram', (['SHADER'], {}), '(SHADER)\n', (3526, 3534), False, 'from OpenGL import GL\n'), ((3539, 3564), 'OpenGL.GL.glBindVertexArray', 'GL.glBindVertexArray', (['VAO'], {}), '(VAO)\n', (3559, 3564), False, 'from OpenGL import GL\n'), ((3569, 3612), 'OpenGL.GL.glBindTexture', 'GL.glBindTexture', (['GL.GL_TEXTURE_2D', 'TEXTURE'], {}), '(GL.GL_TEXTURE_2D, TEXTURE)\n', (3585, 3612), False, 'from OpenGL import GL\n'), ((3618, 3652), 'OpenGL.GL.glClear', 'GL.glClear', (['GL.GL_COLOR_BUFFER_BIT'], {}), '(GL.GL_COLOR_BUFFER_BIT)\n', (3628, 3652), False, 'from OpenGL import GL\n'), ((3657, 3695), 'OpenGL.GL.glDrawArrays', 'GL.glDrawArrays', (['GL.GL_TRIANGLES', '(0)', '(6)'], {}), '(GL.GL_TRIANGLES, 0, 6)\n', (3672, 3695), False, 'from OpenGL import GL\n'), ((3701, 3738), 'OpenGL.GL.glBindTexture', 'GL.glBindTexture', (['GL.GL_TEXTURE_2D', '(0)'], {}), '(GL.GL_TEXTURE_2D, 0)\n', (3717, 3738), False, 'from OpenGL import GL\n'), ((3750, 3765), 'OpenGL.GL.glGetError', 'GL.glGetError', ([], {}), '()\n', (3763, 3765), False, 'from OpenGL import GL\n'), ((1836, 1876), 'numpy.asarray', 'np.asarray', (['vertexData'], {'dtype': 'np.float32'}), '(vertexData, dtype=np.float32)\n', (1846, 1876), True, 'import numpy as np\n'), ((1960, 1989), 'ctypes.sizeof', 'ctypes.sizeof', (['ctypes.c_float'], {}), '(ctypes.c_float)\n', (1973, 1989), False, 'import ctypes\n'), ((2198, 2216), 'ctypes.c_void_p', 'ctypes.c_void_p', (['(0)'], {}), '(0)\n', (2213, 2216), False, 'import ctypes\n'), ((3169, 3184), 'OpenGL.GL.glGetError', 'GL.glGetError', ([], {}), '()\n', (3182, 3184), False, 'from OpenGL import GL\n'), ((3813, 3828), 'OpenGL.GL.glGetError', 'GL.glGetError', ([], {}), '()\n', (3826, 3828), False, 'from OpenGL import GL\n'), ((1785, 1814), 'ctypes.sizeof', 'ctypes.sizeof', (['ctypes.c_float'], {}), '(ctypes.c_float)\n', (1798, 1814), False, 'import ctypes\n'), ((451, 465), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (455, 465), False, 'from pathlib import Path\n'), ((2351, 2380), 'ctypes.sizeof', 'ctypes.sizeof', (['ctypes.c_float'], {}), '(ctypes.c_float)\n', (2364, 2380), False, 'import ctypes\n')] |
import os
'''
path and dataset parameter
配置文件
'''
DATA_PATH = 'data'
PASCAL_PATH = os.path.join(DATA_PATH, 'pascal_voc')
CACHE_PATH = os.path.join(PASCAL_PATH, 'cache')
OUTPUT_DIR = os.path.join(PASCAL_PATH, 'output') # 存放输出文件的地方,data/pascal_voc/output
WEIGHTS_DIR = os.path.join(PASCAL_PATH, 'weights') # weights_dir, 路径为data/pascal_voc/weights
WEIGHTS_FILE = None # weights file
# WEIGHTS_FILE = os.path.join(DATA_PATH, 'weights', 'YOLO_small.ckpt')
# PASCAL VOC数据集的20个类别
CLASSES = ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus',
'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse',
'motorbike', 'person', 'pottedplant', 'sheep', 'sofa',
'train', 'tvmonitor']
FLIPPED = True
"""
model parameter
"""
IMAGE_SIZE = 448 # 输入图片的大小
CELL_SIZE = 7 # grid cell大小(cell_size * cell_size的大小)
BOXES_PER_CELL = 2 # 每个cell负责预测两个bounding box
ALPHA = 0.1 # Leaky Relu的泄露参数
DISP_CONSOLE = False
"""
下面这几个是论文中涉及的参数
"""
OBJECT_SCALE = 1.0
NOOBJECT_SCALE = 1.0
CLASS_SCALE = 2.0
COORD_SCALE = 5.0
"""
hyper-parameter
"""
GPU = ''
LEARNING_RATE = 0.0001 # 学习率
DECAY_STEPS = 30000
DECAY_RATE = 0.1
STAIRCASE = True
BATCH_SIZE = 64 # batch size
MAX_ITER = 135 # 迭代次数135,论文中为135个迭代,可自定义
SUMMARY_ITER = 10
SAVE_ITER = 1000
MOMENTUM = 0.9 # 角动量
"""
test parameter
"""
THRESHOLD = 0.2
IOU_THRESHOLD = 0.5 # IOU阈值0.5 | [
"os.path.join"
] | [((85, 122), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""pascal_voc"""'], {}), "(DATA_PATH, 'pascal_voc')\n", (97, 122), False, 'import os\n'), ((136, 170), 'os.path.join', 'os.path.join', (['PASCAL_PATH', '"""cache"""'], {}), "(PASCAL_PATH, 'cache')\n", (148, 170), False, 'import os\n'), ((184, 219), 'os.path.join', 'os.path.join', (['PASCAL_PATH', '"""output"""'], {}), "(PASCAL_PATH, 'output')\n", (196, 219), False, 'import os\n'), ((276, 312), 'os.path.join', 'os.path.join', (['PASCAL_PATH', '"""weights"""'], {}), "(PASCAL_PATH, 'weights')\n", (288, 312), False, 'import os\n')] |
from os.path import join, splitext
from uuid import uuid4
import datetime
from django.db import models
#from django.utils.encoding import python_2_unicode_compatible
from django.utils import timezone
from django.urls import reverse
from django.contrib.auth.models import User
# Create your models here.
#@python_2_unicode_compatible # only if you need to support Python 2
class Organisation(models.Model):
name = models.TextField(max_length = None, null = True)
abbreviation = models.TextField(max_length = None, null = True)
address = models.TextField(max_length = None, blank = True, null = True)
notes = models.TextField(max_length = None, blank = True, null = True)
old_id = models.IntegerField(default = 0)
class Meta:
ordering = ('abbreviation',)
def __str__(self):
output = self.abbreviation if self.abbreviation else self.organisation
return(str(output))
#@python_2_unicode_compatible # only if you need to support Python 2
class Client(models.Model):
organisation = models.ForeignKey(Organisation, to_field = 'id', on_delete = models.CASCADE, null = True)
firstname = models.TextField(max_length = None, null = True)
lastname = models.TextField(max_length = None, null = True)
status = models.BooleanField(default=1)
notes = models.TextField(max_length = None, blank = True, null = True)
old_id = models.IntegerField(default = 0)
class Meta:
ordering = ('firstname',)
def __str__(self):
fullname = str(self.firstname) + ' ' + str(self.lastname)
if self.organisation:
fullname += ' at ' + str(self.organisation)
return(fullname)
#@python_2_unicode_compatible # only if you need to support Python 2
class Location(models.Model):
description = models.TextField(max_length = None)
notes = models.TextField(max_length = None, blank = True, null = True)
old_id = models.IntegerField(default = 0)
class Meta:
ordering = ('-id',)
def __str__(self):
return(self.description)
#@python_2_unicode_compatible # only if you need to support Python 2
class Job(models.Model):
location = models.ForeignKey(Location, to_field = 'id', on_delete = models.CASCADE, blank = True, null = True)
client = models.ForeignKey(Client, to_field = 'id', on_delete = models.CASCADE)
description = models.TextField(max_length = None)
open = models.DateField()
notes = models.TextField(max_length = None, blank = True, null = True)
old_id = models.IntegerField(default = 0)
class Meta:
ordering = ('-open',)
def __str__(self):
output = str(self.id) + ' ' + str(self.client) + ' ' + str(self.description)
return(output)
def get_absolute_url(self):
return reverse('old:job_detail', args=[self.id])
#@python_2_unicode_compatible # only if you need to support Python 2
class JobStatus(models.Model):
job = models.ForeignKey(Job, to_field = 'id', on_delete = models.CASCADE)
date = models.DateField()
status = models.BooleanField()
notes = models.TextField(max_length = None, blank = True, null = True)
class Meta:
ordering = ('-date',)
def __str__(self):
output = str(job.id)
return(output)
#@python_2_unicode_compatible
#class Manager(models.Model):
# job = models.ForeignKey(Job, to_field = 'id', on_delete = models.CASCADE)
# person = models.ForeignKey(User, to_field = 'id', on_delete = models.CASCADE, blank = True, null = True, related_name='person')
# notes = models.TextField(max_length = None, blank = True, null = True)
#
# class Meta:
# ordering = ('-job',)
#
# def __str__(self):
# output = str(self.person)
# return(output)
#@python_2_unicode_compatible
class Closure(models.Model):
"""All closed jobs"""
job = models.ForeignKey(Job, to_field = 'id', on_delete = models.CASCADE)
date = models.DateField(null = True, max_length = None, blank = True)
notes = models.TextField(max_length = None, blank = True, null = True)
class Meta:
ordering = ('-date',)
def __str__(self):
output = str(self.job)
return(output)
#@python_2_unicode_compatible # only if you need to support Python 2
class Invoice(models.Model):
job = models.ForeignKey(Job, to_field = 'id', on_delete = models.CASCADE)
date = models.DateField(null = True, max_length = None, blank = True)
value = models.DecimalField(decimal_places=2, max_digits=13)
payment_date = models.DateField(null = True, max_length = None, blank = True)
notes = models.TextField(max_length = None, blank = True, null = True)
class Meta:
ordering = ('-id',)
def __str__(self):
output = str(self.id) + ' ' + str(self.date)
return(output)
#@python_2_unicode_compatible # only if you need to support Python 2
class Quote(models.Model):
job = models.ForeignKey(Job, to_field = 'id', on_delete = models.CASCADE, blank = True, null = True)
date = models.DateField(null = True, max_length = None, blank = True)
description = models.TextField(max_length = None, blank = True, null = True)
status = models.NullBooleanField(default = None, blank = True, null = True)
notes = models.TextField(max_length = None, blank = True, null = True)
class Meta:
ordering = ('-id',)
def __str__(self):
output = str(self.id) + ' ' + str(self.date)
return(output)
#@python_2_unicode_compatible # only if you need to support Python 2
class Factor(models.Model):
job = models.ForeignKey(Job, to_field = 'id', on_delete = models.CASCADE)
label = models.TextField(max_length = None)
notes = models.TextField(null = True, max_length = None, blank = True)
class Meta:
ordering = ('-id',)
def __str__(self):
output = str(self.job) + ' ' + str(self.label)
output = str(self.label)
return(output)
class Element(models.Model):
factor = models.ForeignKey('Factor', to_field = 'id', on_delete = models.CASCADE)
value = models.TextField(max_length = None)
notes = models.TextField(null = True, max_length = None, blank = True)
class Meta:
ordering = ('-id',)
def __str__(self):
return(str(self.factor) + str(self.value))
class Rank(models.Model):
parent = models.ForeignKey(Element, to_field = 'id', on_delete = models.CASCADE, related_name='parent_element')
child = models.ForeignKey(Element, to_field = 'id', on_delete = models.CASCADE, related_name='child_element')
class Meta:
ordering = ('-id',)
def __str__(self):
return(str(self.parent) + str(self.child))
#@python_2_unicode_compatible # only if you need to support Python 2
class ASC(models.Model):
element = models.ForeignKey(Element, to_field = 'id', on_delete = models.CASCADE)
label = models.TextField(max_length = None, blank = True, null = True)
unit_order = models.IntegerField(blank = True, null = True)
horizon_prefix = models.IntegerField(blank = True, null = True)
horizon = models.TextField(null = True, max_length = None, blank = True)
horizon_suffix = models.IntegerField(blank = True, null = True)
horizon_suffix2 = models.IntegerField(blank = True, null = True)
upper_depth = models.FloatField(blank = True, null = True)
lower_depth = models.FloatField(blank = True, null = True)
colour = models.TextField(null = True, max_length = None, blank = True)
hue_dry = models.TextField(null = True, max_length = None, blank = True)
value_dry = models.TextField(null = True, max_length = None, blank = True)
chroma_dry = models.TextField(null = True, max_length = None, blank = True)
hue_moist = models.TextField(null = True, max_length = None, blank = True)
value_moist = models.TextField(null = True, max_length = None, blank = True)
chroma_moist = models.TextField(null = True, max_length = None, blank = True)
field_texture = models.TextField(null = True, max_length = None, blank = True)
texture_qualifier = models.TextField(null = True, max_length = None, blank = True)
sand_size = models.TextField(null = True, max_length = None, blank = True)
sand_sorting = models.TextField(null = True, max_length = None, blank = True)
moisture = models.TextField(null = True, max_length = None, blank = True)
strength = models.TextField(null = True, max_length = None, blank = True)
structure_type = models.TextField(null = True, max_length = None, blank = True)
structure_grade = models.TextField(null = True, max_length = None, blank = True)
structure_size = models.TextField(null = True, max_length = None, blank = True)
coarse_frags_distribution = models.TextField(null = True, max_length = None, blank = True)
coarse_frags_abundance = models.TextField(null = True, max_length = None, blank = True)
coarse_frags_size = models.TextField(null = True, max_length = None, blank = True)
coarse_frags_roundness = models.TextField(null = True, max_length = None, blank = True)
coarse_frags_sphericity = models.TextField(null = True, max_length = None, blank = True)
coarse_frags_type = models.TextField(null = True, max_length = None, blank = True)
coarse_frags2_distribution = models.TextField(null = True, max_length = None, blank = True)
coarse_frags2_abundance = models.TextField(null = True, max_length = None, blank = True)
coarse_frags2_size = models.TextField(null = True, max_length = None, blank = True)
coarse_frags2_roundness = models.TextField(null = True, max_length = None, blank = True)
coarse_frags2_sphericity = models.TextField(null = True, max_length = None, blank = True)
coarse_frags2_type = models.TextField(null = True, max_length = None, blank = True)
voids_cracks = models.TextField(null = True, max_length = None, blank = True)
voids_pore_size = models.TextField(null = True, max_length = None, blank = True)
voids_pore_abundance = models.TextField(null = True, max_length = None, blank = True)
roots1_size = models.TextField(null = True, max_length = None, blank = True)
roots1_abundance = models.TextField(null = True, max_length = None, blank = True)
roots2_size = models.TextField(null = True, max_length = None, blank = True)
roots2_abundance = models.TextField(null = True, max_length = None, blank = True)
segregations1_colour = models.TextField(null = True, max_length = None, blank = True)
segregations1_abundance = models.TextField(null = True, max_length = None, blank = True)
segregations1_size = models.TextField(null = True, max_length = None, blank = True)
segregations1_form = models.TextField(null = True, max_length = None, blank = True)
segregations2_colour = models.TextField(null = True, max_length = None, blank = True)
segregations2_abundance = models.TextField(null = True, max_length = None, blank = True)
segregations2_size = models.TextField(null = True, max_length = None, blank = True)
segregations2_form = models.TextField(null = True, max_length = None, blank = True)
lower_bound_dist = models.TextField(null = True, max_length = None, blank = True)
lower_bound_shape = models.TextField(null = True, max_length = None, blank = True)
notes = models.TextField(null = True, max_length = None, blank = True)
data_entry_notes = models.TextField(null = True, max_length = None, blank = True)
def __str__(self):
return(str(self.element))
#@python_2_unicode_compatible # only if you need to support Python 2
class Sample(models.Model):
element = models.ForeignKey(Element, to_field = 'id', on_delete = models.CASCADE)
# date field here which would represent prep date
field_label = models.TextField(null = True, max_length = None, blank = True)
x_cm = models.FloatField(null = True, default = None, blank = True)
y_cm = models.FloatField(null = True, default = None, blank = True)
z_cm = models.FloatField(null = True, blank = True)
upper_depth_cm = models.FloatField(null = True, blank = True)
lower_depth_cm = models.FloatField(null = True, blank = True)
sample_and_vessel_g = models.FloatField(null = True, blank = True)
vessel_g = models.FloatField(null = True, blank = True)
gravel_g = models.FloatField(null = True, blank = True)
notes = models.TextField(null = True, max_length = None, blank = True)
def __str__(self):
return(str(self.element))
#@python_2_unicode_compatible # only if you need to support Python 2
class PSA(models.Model):
lab_id = models.IntegerField()
sample = models.ForeignKey(Sample, to_field = 'id', on_delete = models.CASCADE)
date = models.DateField(null = True, max_length = None, blank = True)
notes = models.TextField(null = True, max_length = None, blank = True)
def __str__(self):
return(self.sample)
def rename_receipt(instance, filename):
Y, m, d = instance.date.isoformat().split('-')
upload_to = Y + '/' + m + '/' + d
infile, ext = splitext(filename)
outfile = '{}{}'.format(uuid4().hex, ext)
outpath = join(upload_to, outfile)
return(outpath)
class Receipt(models.Model):
"""A ledger of receipts. Experience uploading receipts has shown that multiple documents may be relevant for a single transaction; for example, an invoice from the University of Gloucestershire for OSL dating, and, a bankwest statement documenting the transaction. This probably argues for a singular document ledger with a field linking to one or multiple documents"""
upload = models.FileField(upload_to=rename_receipt)
date = models.DateField()
value = models.DecimalField(max_digits=9, decimal_places=2)
currency = models.TextField(default='AUD')
RECEIPT_CHOICE = (
("asset", "Asset"),
("computer_part", "Computer Part"),
("computer_software", "Computer Software"),
("equipment_hire", "Equipment Hire"),
("equipment_repair", "Equipment Repair"), # this should be changed to 'equipment maintenance' to include repair, maintenance, and license fees (trailer rego)
("field_supplies", "Field Supplies"),
("hardware", "Hardware"),
("household", "Household"), # renovation and maintenance of home office property
("insurance", "Insurance"),
("it_service", "IT Service"),
("laboratory_chemicals", "Laboratory Chemicals"),
("laboratory_hardware", "Laboratory Hardware"),
("laboratory_services", "Laboratory Services"),
("laboratory_supplies", "Laboratory Supplies"),
("meals_and_accommodation", "Meals and Accommodation"),
("office_supplies", "Office Supplies"),
("phone", "Phone"),
("post", "Post"),
("professional_development", "Professional Development"),
("reference_material", "Reference Material"),
("travel", "Travel"),
("vehicle_accessories", "Vehicle Accessories"),
("vehicle_fuel", "Vehicle Fuel"),
("vehicle_insurance", "Vehicle Insurance"),
("vehicle_maintenance", "Vehicle Maintenance"),
("vehicle_registration", "Vehicle Registration"),
("wages_salary", "Wages/Salary"),
)
category = models.TextField(choices=RECEIPT_CHOICE, max_length=None, blank=True, null=True)
description = models.TextField(max_length=None, blank=True, null=True)
note = models.TextField(max_length=None, blank=True, null=True)
class Meta:
ordering = ('-id',)
def __str__(self):
output = str(self.description)
return(output)
| [
"django.db.models.DateField",
"django.db.models.FloatField",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.NullBooleanField",
"os.path.splitext",
"os.path.join",
"django.db.models.FileField",
"django.db.models.BooleanField",
"uuid... | [((435, 479), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'null': '(True)'}), '(max_length=None, null=True)\n', (451, 479), False, 'from django.db import models\n'), ((504, 548), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'null': '(True)'}), '(max_length=None, null=True)\n', (520, 548), False, 'from django.db import models\n'), ((568, 624), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (584, 624), False, 'from django.db import models\n'), ((644, 700), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (660, 700), False, 'from django.db import models\n'), ((721, 751), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (740, 751), False, 'from django.db import models\n'), ((1072, 1159), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Organisation'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE', 'null': '(True)'}), "(Organisation, to_field='id', on_delete=models.CASCADE,\n null=True)\n", (1089, 1159), False, 'from django.db import models\n'), ((1179, 1223), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'null': '(True)'}), '(max_length=None, null=True)\n', (1195, 1223), False, 'from django.db import models\n'), ((1244, 1288), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'null': '(True)'}), '(max_length=None, null=True)\n', (1260, 1288), False, 'from django.db import models\n'), ((1307, 1337), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(1)'}), '(default=1)\n', (1326, 1337), False, 'from django.db import models\n'), ((1351, 1407), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (1367, 1407), False, 'from django.db import models\n'), ((1428, 1458), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1447, 1458), False, 'from django.db import models\n'), ((1857, 1890), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None'}), '(max_length=None)\n', (1873, 1890), False, 'from django.db import models\n'), ((1906, 1962), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (1922, 1962), False, 'from django.db import models\n'), ((1983, 2013), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2002, 2013), False, 'from django.db import models\n'), ((2243, 2339), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Location'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE', 'blank': '(True)', 'null': '(True)'}), "(Location, to_field='id', on_delete=models.CASCADE, blank=\n True, null=True)\n", (2260, 2339), False, 'from django.db import models\n'), ((2357, 2423), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Client'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE'}), "(Client, to_field='id', on_delete=models.CASCADE)\n", (2374, 2423), False, 'from django.db import models\n'), ((2447, 2480), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None'}), '(max_length=None)\n', (2463, 2480), False, 'from django.db import models\n'), ((2495, 2513), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (2511, 2513), False, 'from django.db import models\n'), ((2527, 2583), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (2543, 2583), False, 'from django.db import models\n'), ((2604, 2634), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2623, 2634), False, 'from django.db import models\n'), ((3052, 3115), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Job'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE'}), "(Job, to_field='id', on_delete=models.CASCADE)\n", (3069, 3115), False, 'from django.db import models\n'), ((3132, 3150), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (3148, 3150), False, 'from django.db import models\n'), ((3165, 3186), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (3184, 3186), False, 'from django.db import models\n'), ((3200, 3256), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (3216, 3256), False, 'from django.db import models\n'), ((4022, 4085), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Job'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE'}), "(Job, to_field='id', on_delete=models.CASCADE)\n", (4039, 4085), False, 'from django.db import models\n'), ((4102, 4158), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (4118, 4158), False, 'from django.db import models\n'), ((4178, 4234), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (4194, 4234), False, 'from django.db import models\n'), ((4503, 4566), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Job'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE'}), "(Job, to_field='id', on_delete=models.CASCADE)\n", (4520, 4566), False, 'from django.db import models\n'), ((4583, 4639), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (4599, 4639), False, 'from django.db import models\n'), ((4659, 4711), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(13)'}), '(decimal_places=2, max_digits=13)\n', (4678, 4711), False, 'from django.db import models\n'), ((4732, 4788), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (4748, 4788), False, 'from django.db import models\n'), ((4808, 4864), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (4824, 4864), False, 'from django.db import models\n'), ((5151, 5241), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Job'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE', 'blank': '(True)', 'null': '(True)'}), "(Job, to_field='id', on_delete=models.CASCADE, blank=True,\n null=True)\n", (5168, 5241), False, 'from django.db import models\n'), ((5258, 5314), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (5274, 5314), False, 'from django.db import models\n'), ((5340, 5396), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (5356, 5396), False, 'from django.db import models\n'), ((5417, 5477), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'default': 'None', 'blank': '(True)', 'null': '(True)'}), '(default=None, blank=True, null=True)\n', (5440, 5477), False, 'from django.db import models\n'), ((5497, 5553), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (5513, 5553), False, 'from django.db import models\n'), ((5841, 5904), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Job'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE'}), "(Job, to_field='id', on_delete=models.CASCADE)\n", (5858, 5904), False, 'from django.db import models\n'), ((5922, 5955), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None'}), '(max_length=None)\n', (5938, 5955), False, 'from django.db import models\n'), ((5971, 6027), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (5987, 6027), False, 'from django.db import models\n'), ((6284, 6352), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Factor"""'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE'}), "('Factor', to_field='id', on_delete=models.CASCADE)\n", (6301, 6352), False, 'from django.db import models\n'), ((6370, 6403), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None'}), '(max_length=None)\n', (6386, 6403), False, 'from django.db import models\n'), ((6419, 6475), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (6435, 6475), False, 'from django.db import models\n'), ((6667, 6769), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Element'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE', 'related_name': '"""parent_element"""'}), "(Element, to_field='id', on_delete=models.CASCADE,\n related_name='parent_element')\n", (6684, 6769), False, 'from django.db import models\n'), ((6783, 6884), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Element'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE', 'related_name': '"""child_element"""'}), "(Element, to_field='id', on_delete=models.CASCADE,\n related_name='child_element')\n", (6800, 6884), False, 'from django.db import models\n'), ((7137, 7204), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Element'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE'}), "(Element, to_field='id', on_delete=models.CASCADE)\n", (7154, 7204), False, 'from django.db import models\n'), ((7222, 7278), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (7238, 7278), False, 'from django.db import models\n'), ((7303, 7345), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7322, 7345), False, 'from django.db import models\n'), ((7372, 7414), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7391, 7414), False, 'from django.db import models\n'), ((7434, 7490), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (7450, 7490), False, 'from django.db import models\n'), ((7519, 7561), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7538, 7561), False, 'from django.db import models\n'), ((7589, 7631), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7608, 7631), False, 'from django.db import models\n'), ((7655, 7695), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7672, 7695), False, 'from django.db import models\n'), ((7719, 7759), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7736, 7759), False, 'from django.db import models\n'), ((7778, 7834), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (7794, 7834), False, 'from django.db import models\n'), ((7856, 7912), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (7872, 7912), False, 'from django.db import models\n'), ((7936, 7992), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (7952, 7992), False, 'from django.db import models\n'), ((8017, 8073), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (8033, 8073), False, 'from django.db import models\n'), ((8097, 8153), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (8113, 8153), False, 'from django.db import models\n'), ((8179, 8235), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (8195, 8235), False, 'from django.db import models\n'), ((8262, 8318), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (8278, 8318), False, 'from django.db import models\n'), ((8346, 8402), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (8362, 8402), False, 'from django.db import models\n'), ((8434, 8490), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (8450, 8490), False, 'from django.db import models\n'), ((8514, 8570), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (8530, 8570), False, 'from django.db import models\n'), ((8597, 8653), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (8613, 8653), False, 'from django.db import models\n'), ((8676, 8732), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (8692, 8732), False, 'from django.db import models\n'), ((8755, 8811), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (8771, 8811), False, 'from django.db import models\n'), ((8840, 8896), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (8856, 8896), False, 'from django.db import models\n'), ((8926, 8982), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (8942, 8982), False, 'from django.db import models\n'), ((9011, 9067), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (9027, 9067), False, 'from django.db import models\n'), ((9107, 9163), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (9123, 9163), False, 'from django.db import models\n'), ((9200, 9256), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (9216, 9256), False, 'from django.db import models\n'), ((9288, 9344), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (9304, 9344), False, 'from django.db import models\n'), ((9381, 9437), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (9397, 9437), False, 'from django.db import models\n'), ((9475, 9531), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (9491, 9531), False, 'from django.db import models\n'), ((9563, 9619), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (9579, 9619), False, 'from django.db import models\n'), ((9660, 9716), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (9676, 9716), False, 'from django.db import models\n'), ((9754, 9810), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (9770, 9810), False, 'from django.db import models\n'), ((9843, 9899), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (9859, 9899), False, 'from django.db import models\n'), ((9937, 9993), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (9953, 9993), False, 'from django.db import models\n'), ((10032, 10088), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (10048, 10088), False, 'from django.db import models\n'), ((10121, 10177), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (10137, 10177), False, 'from django.db import models\n'), ((10204, 10260), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (10220, 10260), False, 'from django.db import models\n'), ((10290, 10346), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (10306, 10346), False, 'from django.db import models\n'), ((10381, 10437), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (10397, 10437), False, 'from django.db import models\n'), ((10463, 10519), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (10479, 10519), False, 'from django.db import models\n'), ((10550, 10606), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (10566, 10606), False, 'from django.db import models\n'), ((10632, 10688), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (10648, 10688), False, 'from django.db import models\n'), ((10719, 10775), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (10735, 10775), False, 'from django.db import models\n'), ((10810, 10866), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (10826, 10866), False, 'from django.db import models\n'), ((10904, 10960), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (10920, 10960), False, 'from django.db import models\n'), ((10993, 11049), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (11009, 11049), False, 'from django.db import models\n'), ((11082, 11138), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (11098, 11138), False, 'from django.db import models\n'), ((11173, 11229), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (11189, 11229), False, 'from django.db import models\n'), ((11267, 11323), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (11283, 11323), False, 'from django.db import models\n'), ((11356, 11412), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (11372, 11412), False, 'from django.db import models\n'), ((11445, 11501), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (11461, 11501), False, 'from django.db import models\n'), ((11532, 11588), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (11548, 11588), False, 'from django.db import models\n'), ((11620, 11676), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (11636, 11676), False, 'from django.db import models\n'), ((11696, 11752), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (11712, 11752), False, 'from django.db import models\n'), ((11783, 11839), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (11799, 11839), False, 'from django.db import models\n'), ((12036, 12103), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Element'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE'}), "(Element, to_field='id', on_delete=models.CASCADE)\n", (12053, 12103), False, 'from django.db import models\n'), ((12182, 12238), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (12198, 12238), False, 'from django.db import models\n'), ((12257, 12311), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'default': 'None', 'blank': '(True)'}), '(null=True, default=None, blank=True)\n', (12274, 12311), False, 'from django.db import models\n'), ((12330, 12384), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'default': 'None', 'blank': '(True)'}), '(null=True, default=None, blank=True)\n', (12347, 12384), False, 'from django.db import models\n'), ((12403, 12443), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (12420, 12443), False, 'from django.db import models\n'), ((12470, 12510), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (12487, 12510), False, 'from django.db import models\n'), ((12537, 12577), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (12554, 12577), False, 'from django.db import models\n'), ((12609, 12649), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (12626, 12649), False, 'from django.db import models\n'), ((12670, 12710), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (12687, 12710), False, 'from django.db import models\n'), ((12731, 12771), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (12748, 12771), False, 'from django.db import models\n'), ((12789, 12845), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (12805, 12845), False, 'from django.db import models\n'), ((13038, 13059), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (13057, 13059), False, 'from django.db import models\n'), ((13074, 13140), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Sample'], {'to_field': '"""id"""', 'on_delete': 'models.CASCADE'}), "(Sample, to_field='id', on_delete=models.CASCADE)\n", (13091, 13140), False, 'from django.db import models\n'), ((13157, 13213), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (13173, 13213), False, 'from django.db import models\n'), ((13233, 13289), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'max_length': 'None', 'blank': '(True)'}), '(null=True, max_length=None, blank=True)\n', (13249, 13289), False, 'from django.db import models\n'), ((13522, 13540), 'os.path.splitext', 'splitext', (['filename'], {}), '(filename)\n', (13530, 13540), False, 'from os.path import join, splitext\n'), ((13603, 13627), 'os.path.join', 'join', (['upload_to', 'outfile'], {}), '(upload_to, outfile)\n', (13607, 13627), False, 'from os.path import join, splitext\n'), ((14070, 14112), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': 'rename_receipt'}), '(upload_to=rename_receipt)\n', (14086, 14112), False, 'from django.db import models\n'), ((14125, 14143), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (14141, 14143), False, 'from django.db import models\n'), ((14157, 14208), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(9)', 'decimal_places': '(2)'}), '(max_digits=9, decimal_places=2)\n', (14176, 14208), False, 'from django.db import models\n'), ((14225, 14256), 'django.db.models.TextField', 'models.TextField', ([], {'default': '"""AUD"""'}), "(default='AUD')\n", (14241, 14256), False, 'from django.db import models\n'), ((15758, 15843), 'django.db.models.TextField', 'models.TextField', ([], {'choices': 'RECEIPT_CHOICE', 'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(choices=RECEIPT_CHOICE, max_length=None, blank=True, null=True\n )\n', (15774, 15843), False, 'from django.db import models\n'), ((15858, 15914), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (15874, 15914), False, 'from django.db import models\n'), ((15927, 15983), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': 'None', 'blank': '(True)', 'null': '(True)'}), '(max_length=None, blank=True, null=True)\n', (15943, 15983), False, 'from django.db import models\n'), ((2886, 2927), 'django.urls.reverse', 'reverse', (['"""old:job_detail"""'], {'args': '[self.id]'}), "('old:job_detail', args=[self.id])\n", (2893, 2927), False, 'from django.urls import reverse\n'), ((13570, 13577), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (13575, 13577), False, 'from uuid import uuid4\n')] |
import binascii
from winsniffer.gui.parsing.default_parser import DefaultParser
def prettify_mac_address(mac_address):
return ':'.join(map(binascii.hexlify, mac_address))
def get_protocol_stack(frame):
protocols = []
while hasattr(frame, 'data'):
protocols.append(frame.__class__.__name__)
frame = frame.data
return protocols
def find_parser(frame, data, parsers):
protocol_stack_set = set(get_protocol_stack(frame))
for parser in parsers:
if parser.condition(protocol_stack_set, data):
return parser
return DefaultParser()
def get_unparsed_frame_data(frame):
while not isinstance(frame, str):
frame = frame.data
return frame
def get_frame_data_preview(frame, parsers):
data = get_unparsed_frame_data(frame)
parser = find_parser(frame, data, parsers)
try:
parsed_data = parser.parse(frame, data)
except Exception as e:
import traceback
parsed_data = traceback.format_exc()
return len(data), parsed_data
| [
"traceback.format_exc",
"winsniffer.gui.parsing.default_parser.DefaultParser"
] | [((579, 594), 'winsniffer.gui.parsing.default_parser.DefaultParser', 'DefaultParser', ([], {}), '()\n', (592, 594), False, 'from winsniffer.gui.parsing.default_parser import DefaultParser\n'), ((981, 1003), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1001, 1003), False, 'import traceback\n')] |
# Copyright 2014 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import logging
from logging import handlers
from typing import List, Tuple, Optional
from dataclasses import asdict, dataclass
from dataclasses_json import dataclass_json, LetterCase
from aiohttp.web import View, Application, run_app
from aiohttp import web
import markdown
from jinja2 import Environment, FileSystemLoader, FileSystemBytecodeCache
import pymdownx.emoji
import files
import pictures
import search
import appconf
if 'RIKI_CONF_PATH' in os.environ:
conf_path = os.environ['RIKI_CONF_PATH']
else:
conf_path = os.path.realpath(os.path.join(os.path.dirname(__file__), 'config.json'))
conf = appconf.load_conf(conf_path)
APP_NAME = conf.app_name
APP_PATH = conf.app_path
logger = logging.getLogger('')
def setup_logger(path, debug=False):
"""
Sets-up Python logger with file rotation
Arguments:
path -- where the log files will be written
debug -- debug mode on/off (bool)
"""
if path == '#stderr':
hdlr = logging.StreamHandler(sys.stderr)
elif path == '#stdout':
hdlr = logging.StreamHandler(sys.stdout)
else:
hdlr = handlers.RotatingFileHandler(path, maxBytes=(1 << 23), backupCount=50)
hdlr.setFormatter(logging.Formatter('%(asctime)s [%(name)s] %(levelname)s: %(message)s'))
logger.addHandler(hdlr)
logger.setLevel(logging.INFO if not debug else logging.DEBUG)
setup_logger(str(conf.log_path))
logging.getLogger(__name__).info(f'using Riki configuration {conf_path}')
markdown_config = {
'pymdownx.emoji': {
'emoji_index': pymdownx.emoji.twemoji,
'emoji_generator': pymdownx.emoji.to_svg,
'options': {
'image_path': conf.emoji_cdn_url
}
},
'pymdownx.arithmatex': {
'generic': True,
'preview': False
}
}
def path_dir_elms(path: str) -> List[Tuple[str, str]]:
items = [x for x in path.split('/') if x != '']
cumul = []
ans = []
for elm in items:
cumul.append(elm)
ans.append((elm, '/'.join(cumul[:])))
return ans
def load_markdown(path: str) -> str:
"""
Loads a markdown file and returns an HTML code
arguments:
path -- path to a markdown file
returns:
a string containing output HTML
"""
with open(path) as page_file:
return markdown.markdown(
page_file.read(),
extensions=conf.markdown_extensions,
extension_configs=markdown_config)
routes = web.RouteTableDef()
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class DirMetadata:
directory_type: Optional[str] = 'page'
description: Optional[str] = None
class ActionHelper:
def __init__(self, conf: appconf.Conf, assets_url: str):
self._dir_metadata = {}
self._cache = FileSystemBytecodeCache(conf.template_cache_dir) if conf.template_cache_dir else None
self._assets_url = assets_url
self._template_env: Environment = Environment(
loader=FileSystemLoader(os.path.realpath(os.path.join(os.path.dirname(__file__), 'templates'))),
bytecode_cache=self._cache,
trim_blocks=True,
lstrip_blocks=True)
def response_html(self, template, data):
values = dict(
app_name=APP_NAME,
app_path=APP_PATH,
enable_search=True) # TODO
values.update(data)
template_object = self._template_env.get_template(template)
return web.Response(text=template_object.render(values), content_type='text/html')
def response_file(self, path: str):
return web.FileResponse(path)
def dir_metadata(self, page_fs_path: str) -> DirMetadata:
try:
dir_path = page_fs_path if files.page_is_dir(page_fs_path) else os.path.dirname(page_fs_path)
except FileNotFoundError as ex:
if os.path.basename(page_fs_path) == 'index': # 'index' is an acceptable virtual page
dir_path = os.path.dirname(page_fs_path)
else:
raise ex
if dir_path not in self._dir_metadata:
try:
with open(os.path.join(dir_path, 'metadata.json'), 'rb') as fr:
self._dir_metadata[dir_path] = DirMetadata.from_json(fr.read())
except IOError:
self._dir_metadata[dir_path] = DirMetadata()
return self._dir_metadata[dir_path]
class BaseAction(View):
@property
def _ctx(self) -> ActionHelper:
return self.request.app['helper']
def response_html(self, template, data):
return self._ctx.response_html(template, data)
def response_file(self, path: bytes):
return self._ctx.response_file(path)
@property
def riki_path(self):
return self.request.match_info['path']
def url_arg(self, k):
return self.request.rel_url.query.get(k)
class Action(BaseAction):
@property
def data_dir(self):
return conf.data_dir
@staticmethod
def get_current_dirname(path):
ans = os.path.basename(path)
if ans == 'index':
ans = os.path.basename(os.path.dirname(path))
return ans
def generate_page_list(self, curr_dir_fs):
page_list = files.list_files(curr_dir_fs, None, recursive=False, include_dirs=True)
return [(
files.strip_prefix(x, self.data_dir),
os.path.basename(files.strip_prefix(x, self.data_dir)),
os.path.isdir(x)
) for x in page_list]
@property
def dir_metadata(self) -> DirMetadata:
return self._ctx.dir_metadata(os.path.join(self.data_dir, self.riki_path))
@routes.view('/')
class Index(Action):
"""
Homepage
"""
async def get(self):
raise web.HTTPSeeOther(f'{APP_PATH}page/index')
@routes.view('/page')
class PageNoSpec(View):
async def get(self):
raise web.HTTPSeeOther(f'{APP_PATH}page/index')
@routes.view('/page/{path:.+\.(txt|pdf|json|xml|yml|yaml)}')
class Plain(Action):
async def get(self):
return self.response_file(os.path.join(self.data_dir, self.riki_path))
@routes.view('/page/{path:.+\.(jpg|JPG|jpeg|JPEG|png|PNG|gif|GIF)}')
class Picture(Action):
"""
Provides access to images
"""
async def get(self):
fs_path = os.path.join(self.data_dir, self.riki_path)
width = self.request.rel_url.query.get('width')
normalize = bool(int(self.request.rel_url.query.get('normalize', '0')))
if width is not None:
fs_path = pictures.get_resized_image(
cache_dir=conf.picture_cache_dir,
path=fs_path,
width=width,
normalize=normalize)
return self.response_file(fs_path)
@routes.view('/page/{path:.*}')
class Page(Action):
"""
A riki page
"""
async def get(self):
if not self.riki_path:
raise web.HTTPSeeOther(f'{APP_PATH}page/index')
page_fs_path = os.path.join(self.data_dir, self.riki_path)
pelms = page_fs_path.rsplit('.', 1)
page_suff = None if len(pelms) < 2 else pelms[-1]
if self.dir_metadata.directory_type == 'gallery':
raise web.HTTPSeeOther(f'{APP_PATH}gallery/{self.riki_path}/index')
elif files.page_is_dir(page_fs_path):
if self.dir_metadata.directory_type == 'page':
raise web.HTTPSeeOther(f'{APP_PATH}page/{self.riki_path}/index')
else:
raise web.HTTPServerError('Unknown page type')
elif page_suff and page_suff in appconf.RAW_FILES:
with open(page_fs_path, 'rb') as fr:
web.header('Content-Type', appconf.RAW_FILES[page_suff])
return fr.read()
else:
page_fs_path = f'{page_fs_path}.md'
curr_dir = os.path.dirname(self.riki_path)
page_name = os.path.basename(self.riki_path)
# setup the directory information
if curr_dir:
path_elms = path_dir_elms(curr_dir)
curr_dir_fs = os.path.join(self.data_dir, curr_dir)
else:
curr_dir = ''
path_elms = []
curr_dir_fs = self.data_dir
# transform the page
if files.page_exists(page_fs_path):
page_info = files.get_version_info(
self.data_dir, page_fs_path, info_encoding=conf.hg_info_encoding)
inner_html = load_markdown(page_fs_path)
page_template = 'page.html'
else:
inner_html = ''
page_info = files.RevisionInfo()
page_template = 'dummy_page.html'
data = dict(
html=inner_html,
page_list=self.generate_page_list(curr_dir_fs),
path_elms=path_elms,
page_info=page_info,
page_name=page_name,
curr_dir_name=self.get_current_dirname(curr_dir))
return self.response_html(page_template, data)
@routes.view('/_images')
class Images(Action):
"""
A page displaying list of all images
"""
async def get(self):
images = files.list_files(self.data_dir, files.file_is_image, recursive=True)
extended = []
for img in images:
extended.append(files.get_file_info(img, path_prefix=self.data_dir))
return self.response_html('files.html', dict(files=extended))
@routes.view('/gallery/{path:.*}')
class Gallery(Action):
async def get_num_files(self, path: str):
return len(os.listdir(path)) - 1 # minus metadata.json which is required for a gallery page
async def get(self):
gallery_fs_dir = os.path.join(self.data_dir, self.riki_path)
if files.page_is_dir(gallery_fs_dir):
if self.dir_metadata.directory_type == 'page':
raise web.HTTPSeeOther(f'{APP_PATH}page/{self.riki_path}/index')
elif self.dir_metadata.directory_type == 'gallery':
raise web.HTTPSeeOther(f'{APP_PATH}gallery/{self.riki_path}/index')
else:
raise web.HTTPServerError('Unknown page type')
elif os.path.isfile(gallery_fs_dir):
raise web.HTTPInternalServerError('Gallery directory malformed')
elif os.path.basename(gallery_fs_dir) == 'index':
gallery_fs_dir = os.path.dirname(gallery_fs_dir)
else:
raise web.HTTPNotFound()
try:
images = files.list_files(gallery_fs_dir, files.file_is_image, recursive=False)
except FileNotFoundError:
raise web.HTTPNotFound()
extended: List[files.FileInfo] = []
for img in images:
info = files.get_file_info(img, path_prefix=self.data_dir)
info.metadata = pictures.get_metadata(img)
extended.append(info)
values = dict(
files=extended,
page_list=[],
path_elms=path_dir_elms(self.riki_path),
curr_dir_name=self.get_current_dirname(self.riki_path),
num_files=await self.get_num_files(gallery_fs_dir),
description=self.dir_metadata.description)
return self.response_html('gallery.html', values)
@routes.view('/_search')
class Search(Action):
"""
Search results page
"""
async def get(self):
srch = search.FulltextSearcher(conf.search_index_dir, conf.data_dir)
rows = srch.search(self.url_arg('query'))
values = dict(query=self.url_arg('query'), rows=rows)
return self.response_html('search.html', values)
app = Application()
app.add_routes(routes)
async def setup_runtime(app):
app['helper'] = ActionHelper(conf, assets_url=None) # TODO
app.on_startup.append(setup_runtime)
async def factory():
return app
if __name__ == '__main__':
app.update(asdict(conf))
run_app(app, port='8080')
| [
"logging.getLogger",
"logging.StreamHandler",
"files.list_files",
"files.RevisionInfo",
"files.page_exists",
"aiohttp.web.Application",
"files.strip_prefix",
"dataclasses_json.dataclass_json",
"jinja2.FileSystemBytecodeCache",
"aiohttp.web.FileResponse",
"os.listdir",
"dataclasses.asdict",
"... | [((1243, 1271), 'appconf.load_conf', 'appconf.load_conf', (['conf_path'], {}), '(conf_path)\n', (1260, 1271), False, 'import appconf\n'), ((1332, 1353), 'logging.getLogger', 'logging.getLogger', (['""""""'], {}), "('')\n", (1349, 1353), False, 'import logging\n'), ((3073, 3092), 'aiohttp.web.RouteTableDef', 'web.RouteTableDef', ([], {}), '()\n', (3090, 3092), False, 'from aiohttp import web\n'), ((3096, 3140), 'dataclasses_json.dataclass_json', 'dataclass_json', ([], {'letter_case': 'LetterCase.CAMEL'}), '(letter_case=LetterCase.CAMEL)\n', (3110, 3140), False, 'from dataclasses_json import dataclass_json, LetterCase\n'), ((12146, 12159), 'aiohttp.web.Application', 'Application', ([], {}), '()\n', (12157, 12159), False, 'from aiohttp.web import View, Application, run_app\n'), ((12414, 12439), 'aiohttp.web.run_app', 'run_app', (['app'], {'port': '"""8080"""'}), "(app, port='8080')\n", (12421, 12439), False, 'from aiohttp.web import View, Application, run_app\n'), ((1597, 1630), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stderr'], {}), '(sys.stderr)\n', (1618, 1630), False, 'import logging\n'), ((1826, 1896), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s [%(name)s] %(levelname)s: %(message)s"""'], {}), "('%(asctime)s [%(name)s] %(levelname)s: %(message)s')\n", (1843, 1896), False, 'import logging\n'), ((2027, 2054), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2044, 2054), False, 'import logging\n'), ((4195, 4217), 'aiohttp.web.FileResponse', 'web.FileResponse', (['path'], {}), '(path)\n', (4211, 4217), False, 'from aiohttp import web\n'), ((5635, 5657), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (5651, 5657), False, 'import os\n'), ((5830, 5901), 'files.list_files', 'files.list_files', (['curr_dir_fs', 'None'], {'recursive': '(False)', 'include_dirs': '(True)'}), '(curr_dir_fs, None, recursive=False, include_dirs=True)\n', (5846, 5901), False, 'import files\n'), ((6363, 6404), 'aiohttp.web.HTTPSeeOther', 'web.HTTPSeeOther', (['f"""{APP_PATH}page/index"""'], {}), "(f'{APP_PATH}page/index')\n", (6379, 6404), False, 'from aiohttp import web\n'), ((6493, 6534), 'aiohttp.web.HTTPSeeOther', 'web.HTTPSeeOther', (['f"""{APP_PATH}page/index"""'], {}), "(f'{APP_PATH}page/index')\n", (6509, 6534), False, 'from aiohttp import web\n'), ((6908, 6951), 'os.path.join', 'os.path.join', (['self.data_dir', 'self.riki_path'], {}), '(self.data_dir, self.riki_path)\n', (6920, 6951), False, 'import os\n'), ((7583, 7626), 'os.path.join', 'os.path.join', (['self.data_dir', 'self.riki_path'], {}), '(self.data_dir, self.riki_path)\n', (7595, 7626), False, 'import os\n'), ((8851, 8882), 'files.page_exists', 'files.page_exists', (['page_fs_path'], {}), '(page_fs_path)\n', (8868, 8882), False, 'import files\n'), ((9716, 9784), 'files.list_files', 'files.list_files', (['self.data_dir', 'files.file_is_image'], {'recursive': '(True)'}), '(self.data_dir, files.file_is_image, recursive=True)\n', (9732, 9784), False, 'import files\n'), ((10243, 10286), 'os.path.join', 'os.path.join', (['self.data_dir', 'self.riki_path'], {}), '(self.data_dir, self.riki_path)\n', (10255, 10286), False, 'import os\n'), ((10298, 10331), 'files.page_is_dir', 'files.page_is_dir', (['gallery_fs_dir'], {}), '(gallery_fs_dir)\n', (10315, 10331), False, 'import files\n'), ((11907, 11968), 'search.FulltextSearcher', 'search.FulltextSearcher', (['conf.search_index_dir', 'conf.data_dir'], {}), '(conf.search_index_dir, conf.data_dir)\n', (11930, 11968), False, 'import search\n'), ((12396, 12408), 'dataclasses.asdict', 'asdict', (['conf'], {}), '(conf)\n', (12402, 12408), False, 'from dataclasses import asdict, dataclass\n'), ((1193, 1218), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1208, 1218), False, 'import os\n'), ((1674, 1707), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (1695, 1707), False, 'import logging\n'), ((1733, 1801), 'logging.handlers.RotatingFileHandler', 'handlers.RotatingFileHandler', (['path'], {'maxBytes': '(1 << 23)', 'backupCount': '(50)'}), '(path, maxBytes=1 << 23, backupCount=50)\n', (1761, 1801), False, 'from logging import handlers\n'), ((3392, 3440), 'jinja2.FileSystemBytecodeCache', 'FileSystemBytecodeCache', (['conf.template_cache_dir'], {}), '(conf.template_cache_dir)\n', (3415, 3440), False, 'from jinja2 import Environment, FileSystemLoader, FileSystemBytecodeCache\n'), ((6209, 6252), 'os.path.join', 'os.path.join', (['self.data_dir', 'self.riki_path'], {}), '(self.data_dir, self.riki_path)\n', (6221, 6252), False, 'import os\n'), ((6679, 6722), 'os.path.join', 'os.path.join', (['self.data_dir', 'self.riki_path'], {}), '(self.data_dir, self.riki_path)\n', (6691, 6722), False, 'import os\n'), ((7140, 7252), 'pictures.get_resized_image', 'pictures.get_resized_image', ([], {'cache_dir': 'conf.picture_cache_dir', 'path': 'fs_path', 'width': 'width', 'normalize': 'normalize'}), '(cache_dir=conf.picture_cache_dir, path=fs_path,\n width=width, normalize=normalize)\n', (7166, 7252), False, 'import pictures\n'), ((7517, 7558), 'aiohttp.web.HTTPSeeOther', 'web.HTTPSeeOther', (['f"""{APP_PATH}page/index"""'], {}), "(f'{APP_PATH}page/index')\n", (7533, 7558), False, 'from aiohttp import web\n'), ((7810, 7871), 'aiohttp.web.HTTPSeeOther', 'web.HTTPSeeOther', (['f"""{APP_PATH}gallery/{self.riki_path}/index"""'], {}), "(f'{APP_PATH}gallery/{self.riki_path}/index')\n", (7826, 7871), False, 'from aiohttp import web\n'), ((7885, 7916), 'files.page_is_dir', 'files.page_is_dir', (['page_fs_path'], {}), '(page_fs_path)\n', (7902, 7916), False, 'import files\n'), ((8665, 8702), 'os.path.join', 'os.path.join', (['self.data_dir', 'curr_dir'], {}), '(self.data_dir, curr_dir)\n', (8677, 8702), False, 'import os\n'), ((8908, 9001), 'files.get_version_info', 'files.get_version_info', (['self.data_dir', 'page_fs_path'], {'info_encoding': 'conf.hg_info_encoding'}), '(self.data_dir, page_fs_path, info_encoding=conf.\n hg_info_encoding)\n', (8930, 9001), False, 'import files\n'), ((9173, 9193), 'files.RevisionInfo', 'files.RevisionInfo', ([], {}), '()\n', (9191, 9193), False, 'import files\n'), ((10715, 10745), 'os.path.isfile', 'os.path.isfile', (['gallery_fs_dir'], {}), '(gallery_fs_dir)\n', (10729, 10745), False, 'import os\n'), ((11029, 11099), 'files.list_files', 'files.list_files', (['gallery_fs_dir', 'files.file_is_image'], {'recursive': '(False)'}), '(gallery_fs_dir, files.file_is_image, recursive=False)\n', (11045, 11099), False, 'import files\n'), ((11262, 11313), 'files.get_file_info', 'files.get_file_info', (['img'], {'path_prefix': 'self.data_dir'}), '(img, path_prefix=self.data_dir)\n', (11281, 11313), False, 'import files\n'), ((11342, 11368), 'pictures.get_metadata', 'pictures.get_metadata', (['img'], {}), '(img)\n', (11363, 11368), False, 'import pictures\n'), ((4333, 4364), 'files.page_is_dir', 'files.page_is_dir', (['page_fs_path'], {}), '(page_fs_path)\n', (4350, 4364), False, 'import files\n'), ((4370, 4399), 'os.path.dirname', 'os.path.dirname', (['page_fs_path'], {}), '(page_fs_path)\n', (4385, 4399), False, 'import os\n'), ((5720, 5741), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (5735, 5741), False, 'import os\n'), ((5936, 5972), 'files.strip_prefix', 'files.strip_prefix', (['x', 'self.data_dir'], {}), '(x, self.data_dir)\n', (5954, 5972), False, 'import files\n'), ((6062, 6078), 'os.path.isdir', 'os.path.isdir', (['x'], {}), '(x)\n', (6075, 6078), False, 'import os\n'), ((9862, 9913), 'files.get_file_info', 'files.get_file_info', (['img'], {'path_prefix': 'self.data_dir'}), '(img, path_prefix=self.data_dir)\n', (9881, 9913), False, 'import files\n'), ((10111, 10127), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (10121, 10127), False, 'import os\n'), ((10414, 10472), 'aiohttp.web.HTTPSeeOther', 'web.HTTPSeeOther', (['f"""{APP_PATH}page/{self.riki_path}/index"""'], {}), "(f'{APP_PATH}page/{self.riki_path}/index')\n", (10430, 10472), False, 'from aiohttp import web\n'), ((10765, 10823), 'aiohttp.web.HTTPInternalServerError', 'web.HTTPInternalServerError', (['"""Gallery directory malformed"""'], {}), "('Gallery directory malformed')\n", (10792, 10823), False, 'from aiohttp import web\n'), ((11152, 11170), 'aiohttp.web.HTTPNotFound', 'web.HTTPNotFound', ([], {}), '()\n', (11168, 11170), False, 'from aiohttp import web\n'), ((4455, 4485), 'os.path.basename', 'os.path.basename', (['page_fs_path'], {}), '(page_fs_path)\n', (4471, 4485), False, 'import os\n'), ((4566, 4595), 'os.path.dirname', 'os.path.dirname', (['page_fs_path'], {}), '(page_fs_path)\n', (4581, 4595), False, 'import os\n'), ((6007, 6043), 'files.strip_prefix', 'files.strip_prefix', (['x', 'self.data_dir'], {}), '(x, self.data_dir)\n', (6025, 6043), False, 'import files\n'), ((7999, 8057), 'aiohttp.web.HTTPSeeOther', 'web.HTTPSeeOther', (['f"""{APP_PATH}page/{self.riki_path}/index"""'], {}), "(f'{APP_PATH}page/{self.riki_path}/index')\n", (8015, 8057), False, 'from aiohttp import web\n'), ((8098, 8138), 'aiohttp.web.HTTPServerError', 'web.HTTPServerError', (['"""Unknown page type"""'], {}), "('Unknown page type')\n", (8117, 8138), False, 'from aiohttp import web\n'), ((8438, 8469), 'os.path.dirname', 'os.path.dirname', (['self.riki_path'], {}), '(self.riki_path)\n', (8453, 8469), False, 'import os\n'), ((8494, 8526), 'os.path.basename', 'os.path.basename', (['self.riki_path'], {}), '(self.riki_path)\n', (8510, 8526), False, 'import os\n'), ((10559, 10620), 'aiohttp.web.HTTPSeeOther', 'web.HTTPSeeOther', (['f"""{APP_PATH}gallery/{self.riki_path}/index"""'], {}), "(f'{APP_PATH}gallery/{self.riki_path}/index')\n", (10575, 10620), False, 'from aiohttp import web\n'), ((10661, 10701), 'aiohttp.web.HTTPServerError', 'web.HTTPServerError', (['"""Unknown page type"""'], {}), "('Unknown page type')\n", (10680, 10701), False, 'from aiohttp import web\n'), ((10837, 10869), 'os.path.basename', 'os.path.basename', (['gallery_fs_dir'], {}), '(gallery_fs_dir)\n', (10853, 10869), False, 'import os\n'), ((10911, 10942), 'os.path.dirname', 'os.path.dirname', (['gallery_fs_dir'], {}), '(gallery_fs_dir)\n', (10926, 10942), False, 'import os\n'), ((10975, 10993), 'aiohttp.web.HTTPNotFound', 'web.HTTPNotFound', ([], {}), '()\n', (10991, 10993), False, 'from aiohttp import web\n'), ((4729, 4768), 'os.path.join', 'os.path.join', (['dir_path', '"""metadata.json"""'], {}), "(dir_path, 'metadata.json')\n", (4741, 4768), False, 'import os\n'), ((8263, 8319), 'aiohttp.web.header', 'web.header', (['"""Content-Type"""', 'appconf.RAW_FILES[page_suff]'], {}), "('Content-Type', appconf.RAW_FILES[page_suff])\n", (8273, 8319), False, 'from aiohttp import web\n'), ((3637, 3662), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3652, 3662), False, 'import os\n')] |
import io
import torchvision.transforms as transforms
from PIL import Image
import onnxruntime as ort
import numpy as np
class_map = {
0: "10 Reais Frente",
1: "10 Reais Verso",
2: "20 Reais Frente",
3: "20 Reais Verso",
4: "2 Reais Frente",
5: "2 Reais Verso",
6: "50 Reais Frente",
7: "50 Reais Verso",
8: "5 Reais Frente",
9: "5 Reais Verso"
}
def transform_image(image_bytes):
my_transforms = transforms.Compose([
transforms.Resize([224, 224]),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
image = Image.open(io.BytesIO(image_bytes))
return my_transforms(image).unsqueeze_(0)
def get_prediction(image_bytes, inference_session):
tensor = transform_image(image_bytes=image_bytes)
outputs = inference_session.run(None, {'input.1': tensor.numpy()})
y_hat = np.argmax(outputs[0], axis=1)[0]
return class_map[y_hat]
if __name__ == "__main__":
ort_session = ort.InferenceSession('app/models/banknote_best.onnx')
filename = [
"data/validation/2reaisFrente/compressed_0_1835891.jpeg",
'data/validation/2reaisVerso/compressed_0_3752849.jpeg',
"data/validation/5reaisFrente/compressed_0_1986857.jpeg",
"data/validation/5reaisVerso/compressed_0_4651610.jpeg",
"data/validation/10reaisFrente/compressed_0_2854543.jpeg",
"data/validation/10reaisVerso/compressed_0_2175135.jpeg",
'data/validation/20reaisFrente/compressed_0_1516768.jpeg',
'data/validation/20reaisVerso/compressed_0_3080811.jpeg',
'data/validation/50reaisFrente/compressed_0_1478513.jpeg',
'data/validation/50reaisVerso/compressed_0_3923784.jpeg']
for img in filename:
with open(img, 'rb') as f:
image_bytes = f.read()
tensor = get_prediction(image_bytes, ort_session)
print(tensor)
| [
"onnxruntime.InferenceSession",
"numpy.argmax",
"io.BytesIO",
"torchvision.transforms.Normalize",
"torchvision.transforms.Resize",
"torchvision.transforms.ToTensor"
] | [((1044, 1097), 'onnxruntime.InferenceSession', 'ort.InferenceSession', (['"""app/models/banknote_best.onnx"""'], {}), "('app/models/banknote_best.onnx')\n", (1064, 1097), True, 'import onnxruntime as ort\n'), ((669, 692), 'io.BytesIO', 'io.BytesIO', (['image_bytes'], {}), '(image_bytes)\n', (679, 692), False, 'import io\n'), ((931, 960), 'numpy.argmax', 'np.argmax', (['outputs[0]'], {'axis': '(1)'}), '(outputs[0], axis=1)\n', (940, 960), True, 'import numpy as np\n'), ((490, 519), 'torchvision.transforms.Resize', 'transforms.Resize', (['[224, 224]'], {}), '([224, 224])\n', (507, 519), True, 'import torchvision.transforms as transforms\n'), ((533, 554), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (552, 554), True, 'import torchvision.transforms as transforms\n'), ((568, 634), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['[0.485, 0.456, 0.406]', '[0.229, 0.224, 0.225]'], {}), '([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n', (588, 634), True, 'import torchvision.transforms as transforms\n')] |
import arcpy
arcpy.env.workspace = "c:/temp/Donnees.gdb"
arcpy.env.overwriteOutput = True
listes = arcpy.ListDatasets()
for d in listes:
print(d) | [
"arcpy.ListDatasets"
] | [((100, 120), 'arcpy.ListDatasets', 'arcpy.ListDatasets', ([], {}), '()\n', (118, 120), False, 'import arcpy\n')] |
import random
import socket
import time
class client:
def __init__(self, name, address, socket, color):
self.name = name
self.address = address
self.socket = socket
self.color = color
sep = '\n'
def dice_roll():
return (str(random.randint(1, 6)) + ',' + str(random.randint(1, 6)))
def readfromBuffer(sock):
totalMessage = ""
while sep not in totalMessage:
totalMessage += sock.recv(1).decode('utf-8')
return totalMessage[:-1]
clients = []
Colors = ['red', 'cyan', 'orange', 'blue', 'green', 'pink', 'yellow']
random.shuffle(Colors)
ServerSocket = socket.socket()
host = '127.0.0.1'
port = 1233
ThreadCount = 0
try:
ServerSocket.bind((host, port))
except socket.error as e:
print(str(e))
def sendToAll(message):
for cli in clients:
cli.socket.send(str.encode(message + "\n"))
def sendToAllButOne(message, cli2):
for cli in clients:
if (cli != cli2):
cli.socket.send(str.encode(message + "\n"))
print('Waitiing for a Connection..')
ServerSocket.listen(5)
Client, address = ServerSocket.accept()
res = readfromBuffer(Client)
name = res
print(name)
Client.send(str.encode("host\n"))
clients.append(client(name, address, Client, Colors[0]))
Colors.pop(0)
numplayers = readfromBuffer(Client)
print(int(numplayers))
for x in range(int(numplayers) - 1):
Client, address = ServerSocket.accept()
res = readfromBuffer(Client)
name = res
print(name)
# for cli in clients:
# i =0
# cli.socket.send(str.encode(name+" connected\n"))
clients.append(client(name, address, Client, Colors[0]))
Colors.pop(0)
random.shuffle(clients)
for cli in clients:
sendToAllButOne("enemy," + cli.name + "," + cli.color, cli)
for cli in clients:
sendstring = "color,"
sendstring = sendstring + cli.color + '\n'
cli.socket.send(str.encode(sendstring))
# board randomizer
resource_list = ['Wheat'] * 4 + \
['Sheep'] * 4 + \
['Ore'] * 3 + \
['Brick'] * 3 + \
['Wood'] * 4 + \
['Desert'] * 1
number_list = [2, 12]
for index in range(3, 12):
if index == 7:
pass
else:
number_list.append(index)
number_list.append(index)
port_list = ['Wheat'] + \
['Sheep'] + \
['Ore'] + \
['Brick'] + \
['Wood'] + \
['None'] * 4
developmentDeck = ['knight'] * 15 + \
['roadBuilding'] * 2 + \
['yearOfPlenty'] * 2 + \
['monopoly'] * 2 + \
['victoryPoint'] * 5
random.shuffle(developmentDeck)
random.shuffle(number_list)
random.shuffle(resource_list)
random.shuffle(port_list)
numberstring = 'board|' + ','.join([str(elem) for elem in number_list]) + '|' + ','.join(
resource_list) + '|' + ','.join(port_list)
sendToAll(numberstring)
winner = False
# setup
for cli in clients:
cli.socket.send(str.encode("set\n"))
coordinates = readfromBuffer(cli.socket)
sendToAllButOne("set," + coordinates + "," + cli.color, cli)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
cli.socket.send(str.encode('startroad\n'))
coordinates = readfromBuffer(cli.socket)
sendToAllButOne("road," + coordinates + "," + cli.color, cli)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
for cli in reversed(clients):
cli.socket.send(str.encode("set\n"))
coordinates = readfromBuffer(cli.socket)
sendToAllButOne("set," + coordinates + "," + cli.color, cli)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
cli.socket.send(str.encode('startroad\n'))
coordinates = readfromBuffer(cli.socket)
sendToAllButOne("road," + coordinates + "," + cli.color, cli)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
for cli in clients:
cli.socket.send(str.encode('getstart\n'))
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
while (not winner):
for cli in clients:
dice = dice_roll()
sendToAll('dice,' + dice)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
if (int(dice.split(',')[0]) + int(dice.split(',')[1]) == 7):
print("afjgsadkfjsad")
cli.socket.send(str.encode('robber\n'))
else:
cli.socket.send(str.encode('turn\n'))
while True:
message = readfromBuffer(cli.socket)
print(message)
if (message == "end"):
break
if (message.split(',')[0] == "winner"):
print(message)
winner=True
sendToAll(message)
break
elif (message == "dev"):
card = developmentDeck.pop(0)
cli.socket.send(str.encode(card + '\n'))
else:
sendToAllButOne(message, cli)
print("here")
cli.socket.send(str.encode('notturn\n'))
# message=readfromBuffer(cli.socket)
# print(message)
# sendToAllButOne(message, cli)
time.sleep(10)
# game loop
for cli in clients:
cli.socket.send(str.encode("quit\n"))
ServerSocket.close()
| [
"time.sleep",
"random.randint",
"random.shuffle",
"socket.socket"
] | [((579, 601), 'random.shuffle', 'random.shuffle', (['Colors'], {}), '(Colors)\n', (593, 601), False, 'import random\n'), ((617, 632), 'socket.socket', 'socket.socket', ([], {}), '()\n', (630, 632), False, 'import socket\n'), ((1652, 1675), 'random.shuffle', 'random.shuffle', (['clients'], {}), '(clients)\n', (1666, 1675), False, 'import random\n'), ((2627, 2658), 'random.shuffle', 'random.shuffle', (['developmentDeck'], {}), '(developmentDeck)\n', (2641, 2658), False, 'import random\n'), ((2659, 2686), 'random.shuffle', 'random.shuffle', (['number_list'], {}), '(number_list)\n', (2673, 2686), False, 'import random\n'), ((2687, 2716), 'random.shuffle', 'random.shuffle', (['resource_list'], {}), '(resource_list)\n', (2701, 2716), False, 'import random\n'), ((2717, 2742), 'random.shuffle', 'random.shuffle', (['port_list'], {}), '(port_list)\n', (2731, 2742), False, 'import random\n'), ((5310, 5324), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (5320, 5324), False, 'import time\n'), ((304, 324), 'random.randint', 'random.randint', (['(1)', '(6)'], {}), '(1, 6)\n', (318, 324), False, 'import random\n'), ((270, 290), 'random.randint', 'random.randint', (['(1)', '(6)'], {}), '(1, 6)\n', (284, 290), False, 'import random\n')] |
import os
import numpy as np
from matplotlib import pyplot as plt
class DrawGraphs:
def __init__(self,path_ONLY):
self.path_ONLY=path_ONLY
if not os.path.exists("./MakeGraph/graphs/"):
os.makedirs("./MakeGraph/graphs/")
def DrawEmotion(self,emotiondataarray):
colors = ["#ff0000", "#ffff00", "#000000", "#0000ff", "#00ff00"]
ylist = [[], [], [], [], []]
for i in range(5):
for j in range(len(emotiondataarray)):
ylist[i].append(emotiondataarray[j][i])
x=list(range(len(emotiondataarray)))
print(x)
fig=plt.figure()
ax = fig.add_subplot(1, 1, 1)
linetype='-'
title='detected emotions (Face only) ' + self.path_ONLY
for i in range(5):
ax.plot(x,ylist[i],linetype,c=colors[i],linewidth=1)
# 汎用要素
ax.grid(True)
ax.set_xlabel('frame [?]')
ax.set_ylabel('exist rate')
ax.set_title(title)
ax.legend(['angry','happy','neutral','sad','surprise'])
fig.canvas.draw()
im = np.array(fig.canvas.renderer.buffer_rgba())
return im | [
"os.path.exists",
"matplotlib.pyplot.figure",
"os.makedirs"
] | [((618, 630), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (628, 630), True, 'from matplotlib import pyplot as plt\n'), ((169, 206), 'os.path.exists', 'os.path.exists', (['"""./MakeGraph/graphs/"""'], {}), "('./MakeGraph/graphs/')\n", (183, 206), False, 'import os\n'), ((220, 254), 'os.makedirs', 'os.makedirs', (['"""./MakeGraph/graphs/"""'], {}), "('./MakeGraph/graphs/')\n", (231, 254), False, 'import os\n')] |
#!/usr/bin/env python
from setuptools import setup
from os.path import abspath, dirname, join
from codecs import open
here = abspath(dirname(__file__))
long_description = ''
with open(join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='flask_logging_decorator',
version='0.0.5',
description='Simple logging decorator for Flask.',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/sgykfjsm/flask-logging-decorator',
author='<NAME>',
author_email='<EMAIL>',
python_requires=">=3.5, !=2.*.*, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
],
keywords='flask logging decorator',
py_modules=('flask-logging-decorator',),
packages=['flask_logging_decorator']
)
| [
"os.path.join",
"os.path.dirname",
"setuptools.setup"
] | [((268, 879), 'setuptools.setup', 'setup', ([], {'name': '"""flask_logging_decorator"""', 'version': '"""0.0.5"""', 'description': '"""Simple logging decorator for Flask."""', 'long_description': 'long_description', 'long_description_content_type': '"""text/markdown"""', 'url': '"""https://github.com/sgykfjsm/flask-logging-decorator"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'python_requires': '""">=3.5, !=2.*.*, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"""', 'classifiers': "['License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3.6']", 'keywords': '"""flask logging decorator"""', 'py_modules': "('flask-logging-decorator',)", 'packages': "['flask_logging_decorator']"}), "(name='flask_logging_decorator', version='0.0.5', description=\n 'Simple logging decorator for Flask.', long_description=\n long_description, long_description_content_type='text/markdown', url=\n 'https://github.com/sgykfjsm/flask-logging-decorator', author='<NAME>',\n author_email='<EMAIL>', python_requires=\n '>=3.5, !=2.*.*, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=[\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3.6'], keywords=\n 'flask logging decorator', py_modules=('flask-logging-decorator',),\n packages=['flask_logging_decorator'])\n", (273, 879), False, 'from setuptools import setup\n'), ((134, 151), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (141, 151), False, 'from os.path import abspath, dirname, join\n'), ((186, 209), 'os.path.join', 'join', (['here', '"""README.md"""'], {}), "(here, 'README.md')\n", (190, 209), False, 'from os.path import abspath, dirname, join\n')] |
from web3 import Web3
import contracts.doe_token_abi as doe_token_abi
def get_main_balance(w3, wallet):
contract_address = "0xf8E9F10c22840b613cdA05A0c5Fdb59A4d6cd7eF"
contract = w3.eth.contract(address=contract_address, abi=doe_token_abi.get_abi())
balanceOf = contract.functions.balanceOf(wallet).call()
return Web3.fromWei(balanceOf, 'ether')
def get_arb_balance(w3, wallet):
contract_address = "0xE71Db7a96daB25cDb9f4cbC7F686da02192B0E88"
contract = w3.eth.contract(address=contract_address, abi=doe_token_abi.get_abi())
balanceOf = contract.functions.balanceOf(wallet).call()
return Web3.fromWei(balanceOf, 'ether')
| [
"web3.Web3.fromWei",
"contracts.doe_token_abi.get_abi"
] | [((331, 363), 'web3.Web3.fromWei', 'Web3.fromWei', (['balanceOf', '"""ether"""'], {}), "(balanceOf, 'ether')\n", (343, 363), False, 'from web3 import Web3\n'), ((623, 655), 'web3.Web3.fromWei', 'Web3.fromWei', (['balanceOf', '"""ether"""'], {}), "(balanceOf, 'ether')\n", (635, 655), False, 'from web3 import Web3\n'), ((235, 258), 'contracts.doe_token_abi.get_abi', 'doe_token_abi.get_abi', ([], {}), '()\n', (256, 258), True, 'import contracts.doe_token_abi as doe_token_abi\n'), ((527, 550), 'contracts.doe_token_abi.get_abi', 'doe_token_abi.get_abi', ([], {}), '()\n', (548, 550), True, 'import contracts.doe_token_abi as doe_token_abi\n')] |
import os, sys
thisdir = os.path.dirname(os.path.abspath(__file__))
libdir = os.path.abspath(os.path.join(thisdir, '../../../'))
if libdir not in sys.path:
sys.path.insert(0, libdir)
| [
"os.path.abspath",
"sys.path.insert",
"os.path.join"
] | [((41, 66), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (56, 66), False, 'import os, sys\n'), ((93, 127), 'os.path.join', 'os.path.join', (['thisdir', '"""../../../"""'], {}), "(thisdir, '../../../')\n", (105, 127), False, 'import os, sys\n'), ((161, 187), 'sys.path.insert', 'sys.path.insert', (['(0)', 'libdir'], {}), '(0, libdir)\n', (176, 187), False, 'import os, sys\n')] |
import datetime as dt
import re
from abc import ABC, abstractmethod
from decimal import Decimal
from typing import Any, Callable, Generator, Optional
from uuid import UUID
from aiochclient.exceptions import ChClientError
try:
import ciso8601
datetime_parse = date_parse = ciso8601.parse_datetime
except ImportError:
def datetime_parse(string):
return dt.datetime.strptime(string, '%Y-%m-%d %H:%M:%S')
def date_parse(string):
return dt.datetime.strptime(string, '%Y-%m-%d')
__all__ = ["what_py_converter", "rows2ch"]
RE_TUPLE = re.compile(r"^Tuple\((.*)\)$")
RE_ARRAY = re.compile(r"^Array\((.*)\)$")
RE_NULLABLE = re.compile(r"^Nullable\((.*)\)$")
RE_LOW_CARDINALITY = re.compile(r"^LowCardinality\((.*)\)$")
class BaseType(ABC):
__slots__ = ("name", "container")
ESC_CHR_MAPPING = {
b"b": b"\b",
b"N": b"\\N", # NULL
b"f": b"\f",
b"r": b"\r",
b"n": b"\n",
b"t": b"\t",
b"0": b" ",
b"'": b"'",
b"\\": b"\\",
}
DQ = "'"
CM = ","
TUP_OP = '('
TUP_CLS = ')'
ARR_OP = '['
ARR_CLS = ']'
def __init__(self, name: str, container: bool = False):
self.name = name
self.container = container
@abstractmethod
def p_type(self, string):
""" Function for implementing specific actions for each type """
@classmethod
def decode(cls, val: bytes) -> str:
"""
Converting bytes from clickhouse with
backslash-escaped special characters
to pythonic string format
"""
n = val.find(b"\\")
if n < 0:
return val.decode()
n += 1
d = val[:n]
b = val[n:]
while b:
d = d[:-1] + cls.ESC_CHR_MAPPING.get(b[0:1], b[0:1])
b = b[1:]
n = b.find(b"\\")
if n < 0:
d = d + b
break
n += 1
d = d + b[:n]
b = b[n:]
return d.decode()
@classmethod
def seq_parser(cls, raw: str) -> Generator[str, None, None]:
"""
Generator for parsing tuples and arrays.
Returns elements one by one
"""
cur = []
in_str = False
in_arr = False
in_tup = False
if not raw:
return None
for sym in raw:
if not (in_str or in_arr or in_tup):
if sym == cls.CM:
yield "".join(cur)
cur = []
continue
elif sym == cls.DQ:
in_str = not in_str
elif sym == cls.ARR_OP:
in_arr = True
elif sym == cls.TUP_OP:
in_tup = True
elif in_str and sym == cls.DQ:
in_str = not in_str
elif in_arr and sym == cls.ARR_CLS:
in_arr = False
elif in_tup and sym == cls.TUP_CLS:
in_tup = False
cur.append(sym)
yield "".join(cur)
def convert(self, value: bytes) -> Any:
return self.p_type(self.decode(value))
@staticmethod
def unconvert(value) -> bytes:
return b"%a" % value
class StrType(BaseType):
def p_type(self, string: str):
if self.container:
return string.strip("'")
return string
@staticmethod
def unconvert(value: str) -> bytes:
value = value.replace("\\", "\\\\").replace("'", "\\'")
return f"'{value}'".encode()
class IntType(BaseType):
p_type = int
def convert(self, value: bytes) -> Any:
return self.p_type(value)
@staticmethod
def unconvert(value: int) -> bytes:
return b"%d" % value
class FloatType(IntType):
p_type = float
@staticmethod
def unconvert(value: float) -> bytes:
return b"%r" % value
class DateType(BaseType):
def p_type(self, string: str):
string = string.strip("'")
try:
return date_parse(string).date()
except ValueError:
# In case of 0000-00-00
if string == "0000-00-00":
return None
raise
def convert(self, value: bytes) -> Optional[dt.date]:
return self.p_type(value.decode())
@staticmethod
def unconvert(value: dt.date) -> bytes:
return b"%a" % str(value)
class DateTimeType(BaseType):
def p_type(self, string: str):
string = string.strip("'")
try:
return datetime_parse(string)
except ValueError:
# In case of 0000-00-00 00:00:00
if string == "0000-00-00 00:00:00":
return None
raise
def convert(self, value: bytes) -> Optional[dt.datetime]:
return self.p_type(value.decode())
@staticmethod
def unconvert(value: dt.datetime) -> bytes:
return b"%a" % str(value.replace(microsecond=0))
class UUIDType(BaseType):
def p_type(self, string):
return UUID(string.strip("'"))
def convert(self, value: bytes) -> UUID:
return self.p_type(value.decode())
@staticmethod
def unconvert(value: UUID) -> bytes:
return b"%a" % str(value)
class TupleType(BaseType):
__slots__ = ("name", "types")
def __init__(self, name: str, **kwargs):
super().__init__(name, **kwargs)
tps = RE_TUPLE.findall(name)[0]
self.types = tuple(what_py_type(tp, container=True) for tp in tps.split(","))
def p_type(self, string: str) -> tuple:
return tuple(
tp.p_type(val)
for tp, val in zip(self.types, self.seq_parser(string.strip("()")))
)
@staticmethod
def unconvert(value) -> bytes:
return b"(" + b",".join(py2ch(elem) for elem in value) + b")"
class ArrayType(BaseType):
__slots__ = ("name", "type")
def __init__(self, name: str, **kwargs):
super().__init__(name, **kwargs)
self.type = what_py_type(RE_ARRAY.findall(name)[0], container=True)
def p_type(self, string: str) -> list:
return [self.type.p_type(val) for val in self.seq_parser(string[1:-1])]
@staticmethod
def unconvert(value) -> bytes:
return b"[" + b",".join(py2ch(elem) for elem in value) + b"]"
class NullableType(BaseType):
__slots__ = ("name", "type")
NULLABLE = {r"\N", "NULL"}
def __init__(self, name: str, **kwargs):
super().__init__(name, **kwargs)
self.type = what_py_type(RE_NULLABLE.findall(name)[0])
def p_type(self, string: str) -> Any:
if string in self.NULLABLE:
return None
return self.type.p_type(string)
@staticmethod
def unconvert(value) -> bytes:
return b"NULL"
class NothingType(BaseType):
def p_type(self, string: str) -> None:
return None
def convert(self, value: bytes) -> None:
return None
class LowCardinalityType(BaseType):
__slots__ = ("name", "type")
def __init__(self, name: str, **kwargs):
super().__init__(name, **kwargs)
self.type = what_py_type(RE_LOW_CARDINALITY.findall(name)[0])
def p_type(self, string: str) -> Any:
return self.type.p_type(string)
class DecimalType(BaseType):
p_type = Decimal
def convert(self, value: bytes) -> Decimal:
return self.p_type(value.decode())
@staticmethod
def unconvert(value: Decimal) -> bytes:
return str(value).encode()
CH_TYPES_MAPPING = {
"UInt8": IntType,
"UInt16": IntType,
"UInt32": IntType,
"UInt64": IntType,
"Int8": IntType,
"Int16": IntType,
"Int32": IntType,
"Int64": IntType,
"Float32": FloatType,
"Float64": FloatType,
"String": StrType,
"FixedString": StrType,
"Enum8": StrType,
"Enum16": StrType,
"Date": DateType,
"DateTime": DateTimeType,
"Tuple": TupleType,
"Array": ArrayType,
"Nullable": NullableType,
"Nothing": NothingType,
"UUID": UUIDType,
"LowCardinality": LowCardinalityType,
"Decimal": DecimalType,
"Decimal32": DecimalType,
"Decimal64": DecimalType,
"Decimal128": DecimalType,
}
PY_TYPES_MAPPING = {
int: IntType.unconvert,
float: FloatType.unconvert,
str: StrType.unconvert,
dt.date: DateType.unconvert,
dt.datetime: DateTimeType.unconvert,
tuple: TupleType.unconvert,
list: ArrayType.unconvert,
type(None): NullableType.unconvert,
UUID: UUIDType.unconvert,
Decimal: DecimalType.unconvert,
}
def what_py_type(name: str, container: bool = False) -> BaseType:
""" Returns needed type class from clickhouse type name """
name = name.strip()
try:
return CH_TYPES_MAPPING[name.split("(")[0]](name, container=container)
except KeyError:
raise ChClientError(f"Unrecognized type name: '{name}'")
def what_py_converter(name: str, container: bool = False) -> Callable:
""" Returns needed type class from clickhouse type name """
return what_py_type(name, container).convert
def py2ch(value):
try:
return PY_TYPES_MAPPING[type(value)](value)
except KeyError:
raise ChClientError(
f"Unrecognized type: '{type(value)}'. "
f"The value type should be exactly one of "
f"int, float, str, dt.date, dt.datetime, tuple, list, uuid.UUID (or None). "
f"No subclasses yet."
)
def rows2ch(*rows):
return b",".join(TupleType.unconvert(row) for row in rows)
def json2ch(*records, dumps: Callable[[Any], bytes]):
return dumps(records)[1:-1]
| [
"datetime.datetime.strptime",
"aiochclient.exceptions.ChClientError",
"re.compile"
] | [((593, 624), 're.compile', 're.compile', (['"""^Tuple\\\\((.*)\\\\)$"""'], {}), "('^Tuple\\\\((.*)\\\\)$')\n", (603, 624), False, 'import re\n'), ((636, 667), 're.compile', 're.compile', (['"""^Array\\\\((.*)\\\\)$"""'], {}), "('^Array\\\\((.*)\\\\)$')\n", (646, 667), False, 'import re\n'), ((682, 716), 're.compile', 're.compile', (['"""^Nullable\\\\((.*)\\\\)$"""'], {}), "('^Nullable\\\\((.*)\\\\)$')\n", (692, 716), False, 'import re\n'), ((738, 778), 're.compile', 're.compile', (['"""^LowCardinality\\\\((.*)\\\\)$"""'], {}), "('^LowCardinality\\\\((.*)\\\\)$')\n", (748, 778), False, 'import re\n'), ((391, 440), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['string', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(string, '%Y-%m-%d %H:%M:%S')\n", (411, 440), True, 'import datetime as dt\n'), ((488, 528), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['string', '"""%Y-%m-%d"""'], {}), "(string, '%Y-%m-%d')\n", (508, 528), True, 'import datetime as dt\n'), ((9138, 9188), 'aiochclient.exceptions.ChClientError', 'ChClientError', (['f"""Unrecognized type name: \'{name}\'"""'], {}), '(f"Unrecognized type name: \'{name}\'")\n', (9151, 9188), False, 'from aiochclient.exceptions import ChClientError\n')] |
from space_api.utils import generate_find, AND
from space_api.transport import Transport
from space_api.response import Response
class Update:
"""
The DB Update Class
::
from space_api import API, AND, OR, COND
api = API("My-Project", "localhost:4124")
db = api.mongo() # For a MongoDB interface
response = db.update('posts').where(AND(COND('title', '==', 'Title1'))).set({'title':'Title2'}).apply()
:param transport: (Transport) The API's transport instance
:param collection: (str) The collection name
:param db_type: (str) The database type
:param operation: (str) The (optional) operation (one/all/upsert) (Defaults to 'all')
"""
def __init__(self, transport: Transport, collection: str, db_type: str, operation: str = 'all'):
self.transport = transport
self.collection = collection
self.db_type = db_type
self.operation = operation
self.params = {'find': {}, 'update': {}}
def where(self, *conditions) -> 'Update':
"""
Prepares the find parameters
:param conditions: (*) The conditions to find by
"""
self.params['find'] = generate_find(AND(*conditions))
return self
def set(self, obj) -> 'Update':
"""
Prepares the updated values
::
response = db.update('posts').set({'author': 'Drake'}).apply()
:param obj: An object containing the fields to set
"""
self.params['update']['$set'] = obj
return self
def push(self, obj) -> 'Update':
"""
Adds an item to an list
::
response = db.update('posts').push({'author': 'Drake'}).apply()
:param obj: An object containing the fields to set
"""
self.params['update']['$push'] = obj
return self
def remove(self, *fields) -> 'Update':
"""
Removes the specified fields from a document
::
response = db.update('posts').remove('age', 'likes').apply()
:param fields: (*) The fields to be removed
"""
self.params['update']['$unset'] = {x: '' for x in fields}
return self
def rename(self, obj) -> 'Update':
"""
Renames the specified fields
::
response = db.update('posts').rename({'mobile': 'contact'}).apply()
:param obj: An object containing the fields to rename
"""
self.params['update']['$rename'] = obj
return self
def inc(self, obj) -> 'Update':
"""
Increments the value of a field by a specified amount
::
response = db.update('posts').inc({'views': 1}).apply()
:param obj: An object containing the fields to increment, along with the increment value
"""
self.params['update']['$inc'] = obj
return self
def mul(self, obj) -> 'Update':
"""
Multiplies the value of a field by a specified amount
::
response = db.update('posts').mul({'amount': 4}).apply()
:param obj: An object containing the fields to multiply, along with the multiplier value
"""
self.params['update']['$mul'] = obj
return self
def max(self, obj) -> 'Update':
"""
Updates the field if the specified value is greater than the existing field value
::
response = db.update('posts').max({'highScore': 1200}).apply()
:param obj: An object containing the fields to set
"""
self.params['update']['$max'] = obj
return self
def min(self, obj) -> 'Update':
"""
Updates the field if the specified value is lesser than the existing field value
::
response = db.update('posts').min({'lowestScore': 300}).apply()
:param obj: An object containing the fields to set
"""
self.params['update']['$min'] = obj
return self
def current_timestamp(self, *values) -> 'Update':
"""
Sets the value of a field(s) to the current timestamp
::
response = db.update('posts').current_timestamp('lastModified').apply()
:param values: (*) A list containing the fields to set
"""
if self.params['update'].get('$currentDate') is None:
self.params['update']['$currentDate'] = {}
self.params['update']['$currentDate'].update({x: {'$type': 'timestamp'} for x in values})
return self
def current_date(self, *values) -> 'Update':
"""
Sets the value of a field(s) to the date
::
response = db.update('posts').current_date('lastModified').apply()
:param values: (*) A list containing the fields to set
"""
if self.params['update'].get('$currentDate') is None:
self.params['update']['$currentDate'] = {}
self.params['update']['$currentDate'].update({x: {'$type': 'date'} for x in values})
return self
def apply(self) -> Response:
"""
Triggers the update request
:return: (Response) The response object containing values corresponding to the request
"""
return self.transport.update(self.params['find'], self.operation, self.params['update'], self.db_type,
self.collection)
__all__ = ['Update']
| [
"space_api.utils.AND"
] | [((1201, 1217), 'space_api.utils.AND', 'AND', (['*conditions'], {}), '(*conditions)\n', (1204, 1217), False, 'from space_api.utils import generate_find, AND\n')] |
#!/usr/bin/python
import socket
import struct
import math
import time
import Keithley_PS228xS_Sockets_Driver as ps
echoCmd = 1
#===== MAIN PROGRAM STARTS HERE =====
ipAddress1 = "192.168.127.12"
ipAddress2 = "172.16.17.32"
ipAddress3 = "192.168.127.12"
port = 5025
timeout = 20.0
t1 = time.time()
#ps.instrConnect(s1, ipAddress1, port, timeout, 0, 0)
s1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s1, idStr = ps.PowerSupply_Connect(s1, ipAddress1, port, timeout, echoCmd, 1, 1)
print(idStr)
ps.PowerSupply_SetVoltage(s1, 10.0)
ps.PowerSupply_SetCurrent(s1, 1.5)
ps.PowerSupply_SetVoltageProtection(s1, 33.0)
ps.PowerSupply_SetCurrentProtection(s1, 2.0)
print(ps.PowerSupply_GetVoltage(s1))
print(ps.PowerSupply_GetCurrent(s1))
ps.PowerSupply_SetDataFormat(s1, 1, 0, 0)
ps.PowerSupply_SetOutputState(s1, 1)
ps.PowerSupply_SetDisplayText(s1, "Powering On DUT...")
print(ps.PowerSupply_GetOutputState(s1))
time.sleep(3.0)
print(ps.PowerSupply_MeasureCurrent(s1))
print(ps.PowerSupply_MeasureVoltage(s1))
time.sleep(1.0)
ps.PowerSupply_SetOutputState(s1, 0)
ps.PowerSupply_SetDisplayText(s1, "Powering Off DUT...")
ps.PowerSupply_Disconnect(s1)
t2 = time.time()
# Notify the user of completion and the test time achieved.
print("done")
print("{0:.6f} s".format(t2-t1))
input("Press Enter to continue...")
exit()
exit()
| [
"Keithley_PS228xS_Sockets_Driver.PowerSupply_MeasureVoltage",
"socket.socket",
"Keithley_PS228xS_Sockets_Driver.PowerSupply_GetCurrent",
"Keithley_PS228xS_Sockets_Driver.PowerSupply_Connect",
"Keithley_PS228xS_Sockets_Driver.PowerSupply_SetOutputState",
"Keithley_PS228xS_Sockets_Driver.PowerSupply_GetOutp... | [((289, 300), 'time.time', 'time.time', ([], {}), '()\n', (298, 300), False, 'import time\n'), ((362, 411), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (375, 411), False, 'import socket\n'), ((424, 492), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_Connect', 'ps.PowerSupply_Connect', (['s1', 'ipAddress1', 'port', 'timeout', 'echoCmd', '(1)', '(1)'], {}), '(s1, ipAddress1, port, timeout, echoCmd, 1, 1)\n', (446, 492), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((506, 541), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_SetVoltage', 'ps.PowerSupply_SetVoltage', (['s1', '(10.0)'], {}), '(s1, 10.0)\n', (531, 541), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((542, 576), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_SetCurrent', 'ps.PowerSupply_SetCurrent', (['s1', '(1.5)'], {}), '(s1, 1.5)\n', (567, 576), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((578, 623), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_SetVoltageProtection', 'ps.PowerSupply_SetVoltageProtection', (['s1', '(33.0)'], {}), '(s1, 33.0)\n', (613, 623), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((624, 668), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_SetCurrentProtection', 'ps.PowerSupply_SetCurrentProtection', (['s1', '(2.0)'], {}), '(s1, 2.0)\n', (659, 668), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((745, 786), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_SetDataFormat', 'ps.PowerSupply_SetDataFormat', (['s1', '(1)', '(0)', '(0)'], {}), '(s1, 1, 0, 0)\n', (773, 786), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((788, 824), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_SetOutputState', 'ps.PowerSupply_SetOutputState', (['s1', '(1)'], {}), '(s1, 1)\n', (817, 824), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((825, 880), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_SetDisplayText', 'ps.PowerSupply_SetDisplayText', (['s1', '"""Powering On DUT..."""'], {}), "(s1, 'Powering On DUT...')\n", (854, 880), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((922, 937), 'time.sleep', 'time.sleep', (['(3.0)'], {}), '(3.0)\n', (932, 937), False, 'import time\n'), ((1021, 1036), 'time.sleep', 'time.sleep', (['(1.0)'], {}), '(1.0)\n', (1031, 1036), False, 'import time\n'), ((1038, 1074), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_SetOutputState', 'ps.PowerSupply_SetOutputState', (['s1', '(0)'], {}), '(s1, 0)\n', (1067, 1074), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((1075, 1131), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_SetDisplayText', 'ps.PowerSupply_SetDisplayText', (['s1', '"""Powering Off DUT..."""'], {}), "(s1, 'Powering Off DUT...')\n", (1104, 1131), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((1132, 1161), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_Disconnect', 'ps.PowerSupply_Disconnect', (['s1'], {}), '(s1)\n', (1157, 1161), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((1168, 1179), 'time.time', 'time.time', ([], {}), '()\n', (1177, 1179), False, 'import time\n'), ((676, 705), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_GetVoltage', 'ps.PowerSupply_GetVoltage', (['s1'], {}), '(s1)\n', (701, 705), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((713, 742), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_GetCurrent', 'ps.PowerSupply_GetCurrent', (['s1'], {}), '(s1)\n', (738, 742), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((887, 920), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_GetOutputState', 'ps.PowerSupply_GetOutputState', (['s1'], {}), '(s1)\n', (916, 920), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((945, 978), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_MeasureCurrent', 'ps.PowerSupply_MeasureCurrent', (['s1'], {}), '(s1)\n', (974, 978), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n'), ((986, 1019), 'Keithley_PS228xS_Sockets_Driver.PowerSupply_MeasureVoltage', 'ps.PowerSupply_MeasureVoltage', (['s1'], {}), '(s1)\n', (1015, 1019), True, 'import Keithley_PS228xS_Sockets_Driver as ps\n')] |
from django.db.models import Q
from hier.search import SearchResult
from hier.grp_lst import search as hier_search
from hier.params import get_search_mode
from .models import app_name, Task
def search(user, query):
result = SearchResult(query)
search_mode = get_search_mode(query)
lookups = None
if (search_mode == 1):
lookups = Q(name__icontains=query) | Q(info__icontains=query) | Q(url__icontains=query)
elif (search_mode == 2):
lookups = Q(categories__icontains=query[1:])
items = Task.objects.filter(user = user.id).filter(lookups).distinct()
for item in items:
result.add(app_name, 'task', item.id, item.created.date(), item.name, item.info)
result.items += hier_search(user, app_name, query)
return result.items
| [
"django.db.models.Q",
"hier.grp_lst.search",
"hier.params.get_search_mode",
"hier.search.SearchResult"
] | [((229, 248), 'hier.search.SearchResult', 'SearchResult', (['query'], {}), '(query)\n', (241, 248), False, 'from hier.search import SearchResult\n'), ((268, 290), 'hier.params.get_search_mode', 'get_search_mode', (['query'], {}), '(query)\n', (283, 290), False, 'from hier.params import get_search_mode\n'), ((727, 761), 'hier.grp_lst.search', 'hier_search', (['user', 'app_name', 'query'], {}), '(user, app_name, query)\n', (738, 761), True, 'from hier.grp_lst import search as hier_search\n'), ((409, 432), 'django.db.models.Q', 'Q', ([], {'url__icontains': 'query'}), '(url__icontains=query)\n', (410, 432), False, 'from django.db.models import Q\n'), ((480, 514), 'django.db.models.Q', 'Q', ([], {'categories__icontains': 'query[1:]'}), '(categories__icontains=query[1:])\n', (481, 514), False, 'from django.db.models import Q\n'), ((355, 379), 'django.db.models.Q', 'Q', ([], {'name__icontains': 'query'}), '(name__icontains=query)\n', (356, 379), False, 'from django.db.models import Q\n'), ((382, 406), 'django.db.models.Q', 'Q', ([], {'info__icontains': 'query'}), '(info__icontains=query)\n', (383, 406), False, 'from django.db.models import Q\n')] |
# Script used to read all help text from Neato.
# Simply connect Neato, update your port
# name ('/dev/neato') and run this script.
# All help markdown is written to a file in the
# same directory called neato_help.md
# Author: <NAME> <EMAIL>
# License: MIT
# Run this script: python api.py
# Note: This script does not save your serial numbers. #Prevent Serial Write parts below prevent the write out serial numbers.
import neato_driver as robot
robot.init('/dev/tty.usbmodem14601')
commands = []
toc = "\n## Table of Contents\n"
def add_section(level, title):
global toc
toc += "\n" + ((len(level)-2) * " ") + \
"- [" + title + "](#" + "-".join(title.lower().split()) + ")"
return "\n\n" + level + " " + title
def print_lines(text=None):
markdown = "\n\n```"
if text:
lines = text.split('\n')
for line in lines:
# all lines line.find('Serial') != -1 or
if line == '' or line.startswith("Help Strlen"):
# ignore serial numbers and blank lines
markdown = markdown # do nothing
else:
markdown += "\n" + line
return markdown + "\n```"
# print help output
help = robot.Help()
main = ""
for line in help.split('\n')[1:]:
if line.find(' - ') != -1:
parts = line.split(" - ")
commands.append(parts[0])
# iterate help output to request command specific output
for command in commands:
# command
if command == "SetMotor":
main += add_section(
"##", "SetMotorBrush, SetMotorVacuum, SetMotorWheelsEnable, and SetMotorWheels")
else:
main += add_section("##", command)
# command help
desc = "\n\nThe below description from the Neato vacuum maps to the `neato_driver." + \
command + "()` function"
if command == "SetMotor":
desc = "\n\nThe below description from the Neato vacuum maps to the `neato_driver.`SetMotorBrush()`, `neato_driver.SetMotorVacuum()`, `neato_driver.SetMotorWheelsEnable()`, and `neato_driver.SetMotorWheels()` functions. These were divided to make it easier to integrate to the Neato."
if command == "SetIEC":
desc = "\n\nThe SetIEC function is not supported by this driver."
if command == "GetSysLog":
desc = "\n\nThe GetSysLog function was not implemented in the test Neato. The raw results are returned."
if command == "Upload":
desc = "\n\nThe Upload function is not supported by this driver."
main += desc
main += print_lines(robot.Help(command))
# command example
if command.startswith('Get') or command.startswith('Help'):
fn = getattr(robot, command)
result = fn()
if type(result) is dict:
for key in result:
if str(key).find("Serial") > -1:
result[key] = "SERIAL-EXCLUDED"
example = str(result)
main += "\n\nReturns: " + "`" + str(type(result)) + "`"
main += "\n\n**Data Example:**"
main += print_lines(example)
header = "# API\n"
header += '\n'
header += "This describes the `neato_driver.py` API. The documentation is ordered and grouped according to the Neato API which matches the `neato_driver.py` API in all but a few cases. Any differences are noted.\n\n"
header += "Each of the `neato_driver` functions are described below along with the Neato vacuum supplied help description that describes the function and if the function returns data, the data type returned and an example is provided.\n\n"
header += "This was generated using the `api.py` script. To produce the documentation, adjust the serial port to match your Neato's and run the script.\n"
# write out file, overwrites any existing file
helpResponseProcessor = open("api.md", "w")
helpResponseProcessor.write(header+main) # +toc
helpResponseProcessor.close()
print("Done creating api.md document")
| [
"neato_driver.init",
"neato_driver.Help"
] | [((452, 488), 'neato_driver.init', 'robot.init', (['"""/dev/tty.usbmodem14601"""'], {}), "('/dev/tty.usbmodem14601')\n", (462, 488), True, 'import neato_driver as robot\n'), ((1212, 1224), 'neato_driver.Help', 'robot.Help', ([], {}), '()\n', (1222, 1224), True, 'import neato_driver as robot\n'), ((2535, 2554), 'neato_driver.Help', 'robot.Help', (['command'], {}), '(command)\n', (2545, 2554), True, 'import neato_driver as robot\n')] |
import sublime, sublime_plugin
import json
import useutil
class UseImportJumpCommand(sublime_plugin.TextCommand):
def description(self):
return 'Jump to File (Use-Import)'
def is_enabled(self):
return self.is_javascript_view()
def is_visible(self):
return self.is_javascript_view() and self.is_use_import_name()
def run(self, edit):
if self.is_javascript_view():
name = self.find_use_import_name()
if (name != False):
data = self.get_config()
if name in data:
relpath = data.get(name)
configpath = self.view.settings().get('UseImport_use_json_path')
abspath = useutil.get_abs_filepath(relpath, configpath)
if abspath != False:
self.view.window().open_file(abspath)
def is_javascript_view(self):
file_syntax = self.view.settings().get('syntax')
return useutil.is_javascript_syntax(file_syntax)
def is_use_import_name(self):
sels = self.view.sel()
for sel in sels:
curline = self.view.substr(self.view.line(sel))
m = useutil.parse_use_import_name(curline)
if (m != False):
return True
return False
def find_use_import_name(self):
sels = self.view.sel()
for sel in sels:
curline = self.view.substr(self.view.line(sel))
m = useutil.parse_use_import_name(curline)
if (m != False):
return m
return False
def get_config(self):
if self.view.settings().has('UseImport_use_json_path'):
filepath = self.view.settings().get('UseImport_use_json_path')
else:
filepath = useutil.search(self.view.file_name())
self.view.settings().set('UseImport_use_json_path', filepath)
if filepath != False:
return self.load_file(filepath)
return False
def load_file(self, filepath):
with open(filepath, 'r') as myfile:
rawdata = myfile.read()
return json.loads(rawdata)
| [
"useutil.get_abs_filepath",
"json.loads",
"useutil.is_javascript_syntax",
"useutil.parse_use_import_name"
] | [((992, 1033), 'useutil.is_javascript_syntax', 'useutil.is_javascript_syntax', (['file_syntax'], {}), '(file_syntax)\n', (1020, 1033), False, 'import useutil\n'), ((2142, 2161), 'json.loads', 'json.loads', (['rawdata'], {}), '(rawdata)\n', (2152, 2161), False, 'import json\n'), ((1201, 1239), 'useutil.parse_use_import_name', 'useutil.parse_use_import_name', (['curline'], {}), '(curline)\n', (1230, 1239), False, 'import useutil\n'), ((1487, 1525), 'useutil.parse_use_import_name', 'useutil.parse_use_import_name', (['curline'], {}), '(curline)\n', (1516, 1525), False, 'import useutil\n'), ((736, 781), 'useutil.get_abs_filepath', 'useutil.get_abs_filepath', (['relpath', 'configpath'], {}), '(relpath, configpath)\n', (760, 781), False, 'import useutil\n')] |
#创建数据库并把txt文件的数据存进数据库
import sqlite3 #导入sqlite3
cx = sqlite3.connect('FaceRes.db') #创建数据库,如果数据库已经存在,则链接数据库;如果数据库不存在,则先创建数据库,再链接该数据库。
cu = cx.cursor() #定义一个游标,以便获得查询对象。
#cu.execute('create table if not exists train4 (id integer primary key,name text)') #创建表
fr = open('log.txt') #打开要读取的txt文件
for line in fr.readlines(): #将数据按行插入数据库的表VisitRecord中。
line_list = line.split(" ")
time = line_list[0]+" "+line_list[1]
name = line_list[2]
print(time)
print(name)
cu.execute('insert into VisitRecord values(?,?)',(time,name))
cu.close() #关闭游标
cx.commit() #事务提交
cx.close() #关闭数据库 | [
"sqlite3.connect"
] | [((59, 88), 'sqlite3.connect', 'sqlite3.connect', (['"""FaceRes.db"""'], {}), "('FaceRes.db')\n", (74, 88), False, 'import sqlite3\n')] |
from wsgiref.simple_server import make_server
from nlabel.io.carenero.schema import create_session_factory, \
Text, ResultStatus, Result, Tagger, Vector, Vectors
from nlabel.io.carenero.common import ExternalKey
from nlabel.io.common import ArchiveInfo, text_hash_code
from nlabel.io.carenero.common import json_to_result
from nlabel.io.guid import text_guid, tagger_guid
from sqlalchemy.orm import load_only, lazyload
from falcon_auth2 import AuthMiddleware
from falcon_auth2.backends import BasicAuthBackend
import falcon
import click
import json
import functools
import nlabel.version
def user_loader(attributes, user, password, config):
if user == config['user'] and password == config['password']:
return True
else:
return False
class PingResource:
def on_get(self, req, resp):
resp.text = json.dumps({
'version': nlabel.version.__version__
})
resp.status = falcon.HTTP_200
class TaggersByIdResource:
def __init__(self, new_session):
self._new_session = new_session
def on_get(self, req, resp, tagger_id):
session = self._new_session()
try:
tagger = session.query(Tagger).filter(
Tagger.id == tagger_id).first()
if tagger is None:
resp.status = falcon.HTTP_204
else:
resp.status = falcon.HTTP_200
resp.text = json.dumps(tagger.signature)
finally:
session.close()
resp.status = falcon.HTTP_200
class TaggersResource:
def __init__(self, new_session):
self._new_session = new_session
def on_post(self, req, resp):
tagger_data = req.media
session = self._new_session()
try:
tagger_json = json.dumps(
tagger_data, sort_keys=True)
tagger = session.query(Tagger).filter_by(
signature=tagger_json).first()
if tagger is None:
tagger = Tagger(
guid=tagger_guid(),
signature=tagger_json)
session.add(tagger)
session.commit()
session.refresh(tagger)
resp.status = falcon.HTTP_200
resp.text = json.dumps({
'id': tagger.id
})
finally:
session.close()
class TextsResource:
def __init__(self, new_session):
self._new_session = new_session
def on_post(self, req, resp):
text_data = req.media
invalid_keys = set(text_data.keys()) - {
'external_key', 'text', 'meta'}
if invalid_keys:
raise falcon.HTTPInvalidParam(
"media", str(invalid_keys))
external_key = ExternalKey.from_value(
text_data.get('external_key'))
text_key = text_data.get('text')
meta_key = text_data.get('meta')
if meta_key is None:
meta_key = ''
else:
meta_key = json.dumps(meta_key, sort_keys=True)
session = self._new_session()
try:
text_query = session.query(Text)
if external_key is not None:
text = text_query.filter(
Text.external_key == external_key.str,
Text.external_key_type == external_key.type).options(
lazyload('results'),
load_only('id', 'text', 'meta')).first()
if text is not None:
if text.text != text_key:
raise falcon.HTTPConflict(
f"mismatch in stored text data for external key '{external_key.raw}'")
if text.meta != meta_key:
raise falcon.HTTPConflict(
f"mismatch in stored meta data for external key '{external_key.raw}'")
elif text_key is not None:
text = text_query.filter(
Text.text_hash_code == text_hash_code(text_key)).filter(
Text.text == text_key, Text.meta == meta_key).options(
load_only('id')).first().first()
else:
resp.status = falcon.HTTP_422
return
if text is None:
new_text_guid = text_guid()
if external_key is None:
external_key = new_text_guid
if text_key is None:
raise falcon.HTTPInvalidParam(
"media", "missing text")
text = Text(
guid=new_text_guid,
external_key=external_key.str,
external_key_type=external_key.type,
text=text_key,
text_hash_code=text_hash_code(text_key),
meta=meta_key)
session.add(text)
session.commit()
session.refresh(text)
resp.status = falcon.HTTP_200
resp.text = json.dumps({
'id': text.id
})
finally:
session.close()
class ResultsResource:
def __init__(self, new_session):
self._new_session = new_session
def on_get(self, req, resp, tagger_id, text_id):
fields = req.params.get("fields")
session = self._new_session()
try:
result = session.query(Result).filter(
Result.tagger_id == tagger_id, Result.text_id == text_id).first()
if result is None:
resp.status = falcon.HTTP_404
return
data_acc = {
'id': lambda: result.id,
'status': lambda: result.status.name,
'data': lambda: result.data,
'time_created': lambda: result.time_created.isoformat()
}
if fields is not None:
data = {}
for f in fields.split(","):
k = f.strip()
if k not in data_acc:
raise falcon.HTTPInvalidParam(
"fields", f"illegal field {k}")
data[k] = data_acc[k]()
else:
data = dict((k, data_acc[k]()) for k in data_acc.keys())
resp.status = falcon.HTTP_200
resp.text = json.dumps(data)
finally:
session.close()
def on_post(self, req, resp, tagger_id, text_id):
result_data = req.media
session = self._new_session()
try:
if session.query(Result).filter(
Result.tagger_id == tagger_id, Result.text_id == text_id).count() > 0:
raise falcon.HTTPConflict(
f"Result for tagger {tagger_id}, text {text_id} is already in db.")
tagger = session.query(Tagger).filter(Tagger.id == tagger_id).first()
text = session.query(Text).filter(Text.id == text_id).first()
result = json_to_result(
tagger=tagger,
text=text,
status=ResultStatus[result_data['status']],
json_data=result_data['data'])
vectors = result_data.get('vectors')
if vectors is not None:
dtype = vectors['dtype']
for k, v in vectors['data'].items():
x_vectors = [Vector(index=i, data=bytes.fromhex(x)) for i, x in enumerate(v)]
result.vectors.append(Vectors(name=k, dtype=dtype, vectors=x_vectors))
session.add(result)
session.commit()
session.refresh(result)
resp.status = falcon.HTTP_200
resp.text = json.dumps({'id': result.id})
finally:
session.close()
@click.command()
@click.argument('path', type=click.Path(exists=False))
@click.option('--port', default=8000, help='Port to serve on.')
@click.option('--user', default="user", help='Username for basic auth.')
@click.option('--password', required=True, help='Password for basic auth.')
def run(path, port, user, password):
"""Run a server on the given carenero archive."""
info = ArchiveInfo(path, engine='carenero')
new_session = create_session_factory(info.base_path)
auth_backend = BasicAuthBackend(functools.partial(user_loader, config={
'user': user,
'password': password
}))
auth_middleware = AuthMiddleware(auth_backend)
app = falcon.App(middleware=[auth_middleware])
app.add_route('/ping', PingResource())
app.add_route('/taggers', TaggersResource(new_session))
app.add_route('/taggers/{tagger_id:int}', TaggersByIdResource(new_session))
app.add_route('/texts', TextsResource(new_session))
app.add_route('/taggers/{tagger_id:int}/texts/{text_id:int}/results', ResultsResource(new_session))
with make_server('', port, app) as httpd:
print(f'Serving on port {port}...')
# Serve until process is killed
httpd.serve_forever()
if __name__ == '__main__':
run()
| [
"falcon.HTTPConflict",
"sqlalchemy.orm.load_only",
"click.option",
"json.dumps",
"sqlalchemy.orm.lazyload",
"nlabel.io.guid.text_guid",
"nlabel.io.carenero.schema.create_session_factory",
"nlabel.io.guid.tagger_guid",
"click.command",
"wsgiref.simple_server.make_server",
"nlabel.io.common.Archiv... | [((7833, 7848), 'click.command', 'click.command', ([], {}), '()\n', (7846, 7848), False, 'import click\n'), ((7905, 7967), 'click.option', 'click.option', (['"""--port"""'], {'default': '(8000)', 'help': '"""Port to serve on."""'}), "('--port', default=8000, help='Port to serve on.')\n", (7917, 7967), False, 'import click\n'), ((7969, 8040), 'click.option', 'click.option', (['"""--user"""'], {'default': '"""user"""', 'help': '"""Username for basic auth."""'}), "('--user', default='user', help='Username for basic auth.')\n", (7981, 8040), False, 'import click\n'), ((8042, 8116), 'click.option', 'click.option', (['"""--password"""'], {'required': '(True)', 'help': '"""Password for basic auth."""'}), "('--password', required=True, help='Password for basic auth.')\n", (8054, 8116), False, 'import click\n'), ((8220, 8256), 'nlabel.io.common.ArchiveInfo', 'ArchiveInfo', (['path'], {'engine': '"""carenero"""'}), "(path, engine='carenero')\n", (8231, 8256), False, 'from nlabel.io.common import ArchiveInfo, text_hash_code\n'), ((8275, 8313), 'nlabel.io.carenero.schema.create_session_factory', 'create_session_factory', (['info.base_path'], {}), '(info.base_path)\n', (8297, 8313), False, 'from nlabel.io.carenero.schema import create_session_factory, Text, ResultStatus, Result, Tagger, Vector, Vectors\n'), ((8472, 8500), 'falcon_auth2.AuthMiddleware', 'AuthMiddleware', (['auth_backend'], {}), '(auth_backend)\n', (8486, 8500), False, 'from falcon_auth2 import AuthMiddleware\n'), ((8511, 8551), 'falcon.App', 'falcon.App', ([], {'middleware': '[auth_middleware]'}), '(middleware=[auth_middleware])\n', (8521, 8551), False, 'import falcon\n'), ((842, 893), 'json.dumps', 'json.dumps', (["{'version': nlabel.version.__version__}"], {}), "({'version': nlabel.version.__version__})\n", (852, 893), False, 'import json\n'), ((8351, 8426), 'functools.partial', 'functools.partial', (['user_loader'], {'config': "{'user': user, 'password': password}"}), "(user_loader, config={'user': user, 'password': password})\n", (8368, 8426), False, 'import functools\n'), ((8906, 8932), 'wsgiref.simple_server.make_server', 'make_server', (['""""""', 'port', 'app'], {}), "('', port, app)\n", (8917, 8932), False, 'from wsgiref.simple_server import make_server\n'), ((7878, 7902), 'click.Path', 'click.Path', ([], {'exists': '(False)'}), '(exists=False)\n', (7888, 7902), False, 'import click\n'), ((1784, 1823), 'json.dumps', 'json.dumps', (['tagger_data'], {'sort_keys': '(True)'}), '(tagger_data, sort_keys=True)\n', (1794, 1823), False, 'import json\n'), ((2266, 2295), 'json.dumps', 'json.dumps', (["{'id': tagger.id}"], {}), "({'id': tagger.id})\n", (2276, 2295), False, 'import json\n'), ((3008, 3044), 'json.dumps', 'json.dumps', (['meta_key'], {'sort_keys': '(True)'}), '(meta_key, sort_keys=True)\n', (3018, 3044), False, 'import json\n'), ((5052, 5079), 'json.dumps', 'json.dumps', (["{'id': text.id}"], {}), "({'id': text.id})\n", (5062, 5079), False, 'import json\n'), ((6390, 6406), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (6400, 6406), False, 'import json\n'), ((7038, 7158), 'nlabel.io.carenero.common.json_to_result', 'json_to_result', ([], {'tagger': 'tagger', 'text': 'text', 'status': "ResultStatus[result_data['status']]", 'json_data': "result_data['data']"}), "(tagger=tagger, text=text, status=ResultStatus[result_data[\n 'status']], json_data=result_data['data'])\n", (7052, 7158), False, 'from nlabel.io.carenero.common import json_to_result\n'), ((7753, 7782), 'json.dumps', 'json.dumps', (["{'id': result.id}"], {}), "({'id': result.id})\n", (7763, 7782), False, 'import json\n'), ((1424, 1452), 'json.dumps', 'json.dumps', (['tagger.signature'], {}), '(tagger.signature)\n', (1434, 1452), False, 'import json\n'), ((4329, 4340), 'nlabel.io.guid.text_guid', 'text_guid', ([], {}), '()\n', (4338, 4340), False, 'from nlabel.io.guid import text_guid, tagger_guid\n'), ((6750, 6842), 'falcon.HTTPConflict', 'falcon.HTTPConflict', (['f"""Result for tagger {tagger_id}, text {text_id} is already in db."""'], {}), "(\n f'Result for tagger {tagger_id}, text {text_id} is already in db.')\n", (6769, 6842), False, 'import falcon\n'), ((4496, 4544), 'falcon.HTTPInvalidParam', 'falcon.HTTPInvalidParam', (['"""media"""', '"""missing text"""'], {}), "('media', 'missing text')\n", (4519, 4544), False, 'import falcon\n'), ((2032, 2045), 'nlabel.io.guid.tagger_guid', 'tagger_guid', ([], {}), '()\n', (2043, 2045), False, 'from nlabel.io.guid import text_guid, tagger_guid\n'), ((3575, 3670), 'falcon.HTTPConflict', 'falcon.HTTPConflict', (['f"""mismatch in stored text data for external key \'{external_key.raw}\'"""'], {}), '(\n f"mismatch in stored text data for external key \'{external_key.raw}\'")\n', (3594, 3670), False, 'import falcon\n'), ((3772, 3867), 'falcon.HTTPConflict', 'falcon.HTTPConflict', (['f"""mismatch in stored meta data for external key \'{external_key.raw}\'"""'], {}), '(\n f"mismatch in stored meta data for external key \'{external_key.raw}\'")\n', (3791, 3867), False, 'import falcon\n'), ((4818, 4842), 'nlabel.io.common.text_hash_code', 'text_hash_code', (['text_key'], {}), '(text_key)\n', (4832, 4842), False, 'from nlabel.io.common import ArchiveInfo, text_hash_code\n'), ((6103, 6158), 'falcon.HTTPInvalidParam', 'falcon.HTTPInvalidParam', (['"""fields"""', 'f"""illegal field {k}"""'], {}), "('fields', f'illegal field {k}')\n", (6126, 6158), False, 'import falcon\n'), ((7539, 7586), 'nlabel.io.carenero.schema.Vectors', 'Vectors', ([], {'name': 'k', 'dtype': 'dtype', 'vectors': 'x_vectors'}), '(name=k, dtype=dtype, vectors=x_vectors)\n', (7546, 7586), False, 'from nlabel.io.carenero.schema import create_session_factory, Text, ResultStatus, Result, Tagger, Vector, Vectors\n'), ((3379, 3398), 'sqlalchemy.orm.lazyload', 'lazyload', (['"""results"""'], {}), "('results')\n", (3387, 3398), False, 'from sqlalchemy.orm import load_only, lazyload\n'), ((3420, 3451), 'sqlalchemy.orm.load_only', 'load_only', (['"""id"""', '"""text"""', '"""meta"""'], {}), "('id', 'text', 'meta')\n", (3429, 3451), False, 'from sqlalchemy.orm import load_only, lazyload\n'), ((4146, 4161), 'sqlalchemy.orm.load_only', 'load_only', (['"""id"""'], {}), "('id')\n", (4155, 4161), False, 'from sqlalchemy.orm import load_only, lazyload\n'), ((4017, 4041), 'nlabel.io.common.text_hash_code', 'text_hash_code', (['text_key'], {}), '(text_key)\n', (4031, 4041), False, 'from nlabel.io.common import ArchiveInfo, text_hash_code\n')] |
# -*- coding: utf-8 -*-
# * Copyright (c) 2009-2018. Authors: see NOTICE file.
# *
# * Licensed under the Apache License, Version 2.0 (the "License");
# * you may not use this file except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import sys
from argparse import ArgumentParser
from shapely.geometry import Point, box
from cytomine import Cytomine
from cytomine.models import Annotation, AnnotationCollection, \
ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink
logging.basicConfig()
logger = logging.getLogger("cytomine.client")
logger.setLevel(logging.INFO)
if __name__ == '__main__':
parser = ArgumentParser(prog="Cytomine Python client example")
# Cytomine
parser.add_argument('--cytomine_host', dest='host',
default='demo.cytomine.be', help="The Cytomine host")
parser.add_argument('--cytomine_public_key', dest='public_key',
help="The Cytomine public key")
parser.add_argument('--cytomine_private_key', dest='private_key',
help="The Cytomine private key")
parser.add_argument('--cytomine_id_project', dest='id_project',
help="The project from which we want the images")
parser.add_argument('--cytomine_id_image_instance1', dest='id_image_instance1',
help="The image to which the annotation will be added"),
parser.add_argument('--cytomine_id_image_instance2', dest='id_image_instance2',
help="The image to which the linked annotation will be added"),
params, other = parser.parse_known_args(sys.argv[1:])
with Cytomine(host=params.host, public_key=params.public_key, private_key=params.private_key) as cytomine:
# Sanity check: the 2 images must be in the same image group.
igii1 = ImageGroupImageInstanceCollection().fetch_with_filter("imageinstance", params.id_image_instance1)
igii2 = ImageGroupImageInstanceCollection().fetch_with_filter("imageinstance", params.id_image_instance2)
if len(igii1) != 1 or len(igii2) != 1 or igii1[0].group != igii2[0].group:
raise ValueError("Images are not in the same image group !")
id_image_group = igii1[0].group
# We first add a point in (10,10) in both images
point = Point(10, 10)
annotation_point1 = Annotation(location=point.wkt, id_image=params.id_image_instance1).save()
annotation_point2 = Annotation(location=point.wkt, id_image=params.id_image_instance2).save()
# Now we will link them.
# 1) First I need to create an annotation group
annotation_group = AnnotationGroup(id_project=params.id_project, id_image_group=id_image_group).save()
print(annotation_group)
# 2) I add the 2 annotations into the group to create links
al1 = AnnotationLink(id_annotation=annotation_point1.id, id_annotation_group=annotation_group.id).save()
print(al1)
al2 = AnnotationLink(id_annotation=annotation_point2.id, id_annotation_group=annotation_group.id).save()
print(al2)
# List all annotations in that annotation group:
annots = AnnotationCollection()
annots.project = 682669
annots.showLink = True
annots.group = annotation_group.id
annots.fetch()
print(annots)
for annot in annots:
n_links = len(annot.annotationLink)
# n_links will be 2 as it contains links al1->annotation_group and al2->annotation_group
linked_annot_ids = [al['annotation'] for al in annot.annotationLink]
print("Annotation {} in image {} has {} links (annotations: {})"
.format(annot.id, annot.image, n_links, linked_annot_ids))
# ---------------
# How to speed up the process when we have more data ?
# We will create points (5, 5) in every image and link them
# We will create rectangle (20, 20, 100, 100) in every image and link them
point = Point(5, 5)
rectangle = box(20, 20, 100, 100)
# I need 2 annotation groups:
annot_group_ids = []
for i in range(2):
ag = AnnotationGroup(id_project=params.id_project, id_image_group=id_image_group).save()
annot_group_ids.append(ag.id)
# We will create all annotations in one request.
annotations = AnnotationCollection()
image_ids = [params.id_image_instance1, params.id_image_instance2]
for image_id in image_ids:
for i, geometry in enumerate([point, rectangle]):
annotations.append(
Annotation(location=geometry.wkt, id_project=params.id_project, id_image=image_id,
id_group=annot_group_ids[i])
)
annotations.save()
# In the end, we have:
# - a point in image 1, linked to a point in image 2
# - a rectangle in image 1, linked to a rectangle in image 2
# - a point in image 2, linked to a point in image 1
# - a rectangle in image 2, linked to a rectangle in image 1
| [
"logging.basicConfig",
"cytomine.Cytomine",
"logging.getLogger",
"cytomine.models.ImageGroupImageInstanceCollection",
"cytomine.models.AnnotationCollection",
"argparse.ArgumentParser",
"cytomine.models.Annotation",
"shapely.geometry.box",
"shapely.geometry.Point",
"cytomine.models.AnnotationLink",... | [((1072, 1093), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (1091, 1093), False, 'import logging\n'), ((1103, 1139), 'logging.getLogger', 'logging.getLogger', (['"""cytomine.client"""'], {}), "('cytomine.client')\n", (1120, 1139), False, 'import logging\n'), ((1211, 1264), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'prog': '"""Cytomine Python client example"""'}), "(prog='Cytomine Python client example')\n", (1225, 1264), False, 'from argparse import ArgumentParser\n'), ((2214, 2307), 'cytomine.Cytomine', 'Cytomine', ([], {'host': 'params.host', 'public_key': 'params.public_key', 'private_key': 'params.private_key'}), '(host=params.host, public_key=params.public_key, private_key=params\n .private_key)\n', (2222, 2307), False, 'from cytomine import Cytomine\n'), ((2885, 2898), 'shapely.geometry.Point', 'Point', (['(10)', '(10)'], {}), '(10, 10)\n', (2890, 2898), False, 'from shapely.geometry import Point, box\n'), ((3744, 3766), 'cytomine.models.AnnotationCollection', 'AnnotationCollection', ([], {}), '()\n', (3764, 3766), False, 'from cytomine.models import Annotation, AnnotationCollection, ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink\n'), ((4589, 4600), 'shapely.geometry.Point', 'Point', (['(5)', '(5)'], {}), '(5, 5)\n', (4594, 4600), False, 'from shapely.geometry import Point, box\n'), ((4621, 4642), 'shapely.geometry.box', 'box', (['(20)', '(20)', '(100)', '(100)'], {}), '(20, 20, 100, 100)\n', (4624, 4642), False, 'from shapely.geometry import Point, box\n'), ((4961, 4983), 'cytomine.models.AnnotationCollection', 'AnnotationCollection', ([], {}), '()\n', (4981, 4983), False, 'from cytomine.models import Annotation, AnnotationCollection, ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink\n'), ((2403, 2438), 'cytomine.models.ImageGroupImageInstanceCollection', 'ImageGroupImageInstanceCollection', ([], {}), '()\n', (2436, 2438), False, 'from cytomine.models import Annotation, AnnotationCollection, ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink\n'), ((2517, 2552), 'cytomine.models.ImageGroupImageInstanceCollection', 'ImageGroupImageInstanceCollection', ([], {}), '()\n', (2550, 2552), False, 'from cytomine.models import Annotation, AnnotationCollection, ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink\n'), ((2927, 2993), 'cytomine.models.Annotation', 'Annotation', ([], {'location': 'point.wkt', 'id_image': 'params.id_image_instance1'}), '(location=point.wkt, id_image=params.id_image_instance1)\n', (2937, 2993), False, 'from cytomine.models import Annotation, AnnotationCollection, ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink\n'), ((3029, 3095), 'cytomine.models.Annotation', 'Annotation', ([], {'location': 'point.wkt', 'id_image': 'params.id_image_instance2'}), '(location=point.wkt, id_image=params.id_image_instance2)\n', (3039, 3095), False, 'from cytomine.models import Annotation, AnnotationCollection, ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink\n'), ((3220, 3296), 'cytomine.models.AnnotationGroup', 'AnnotationGroup', ([], {'id_project': 'params.id_project', 'id_image_group': 'id_image_group'}), '(id_project=params.id_project, id_image_group=id_image_group)\n', (3235, 3296), False, 'from cytomine.models import Annotation, AnnotationCollection, ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink\n'), ((3419, 3515), 'cytomine.models.AnnotationLink', 'AnnotationLink', ([], {'id_annotation': 'annotation_point1.id', 'id_annotation_group': 'annotation_group.id'}), '(id_annotation=annotation_point1.id, id_annotation_group=\n annotation_group.id)\n', (3433, 3515), False, 'from cytomine.models import Annotation, AnnotationCollection, ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink\n'), ((3551, 3647), 'cytomine.models.AnnotationLink', 'AnnotationLink', ([], {'id_annotation': 'annotation_point2.id', 'id_annotation_group': 'annotation_group.id'}), '(id_annotation=annotation_point2.id, id_annotation_group=\n annotation_group.id)\n', (3565, 3647), False, 'from cytomine.models import Annotation, AnnotationCollection, ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink\n'), ((4755, 4831), 'cytomine.models.AnnotationGroup', 'AnnotationGroup', ([], {'id_project': 'params.id_project', 'id_image_group': 'id_image_group'}), '(id_project=params.id_project, id_image_group=id_image_group)\n', (4770, 4831), False, 'from cytomine.models import Annotation, AnnotationCollection, ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink\n'), ((5212, 5328), 'cytomine.models.Annotation', 'Annotation', ([], {'location': 'geometry.wkt', 'id_project': 'params.id_project', 'id_image': 'image_id', 'id_group': 'annot_group_ids[i]'}), '(location=geometry.wkt, id_project=params.id_project, id_image=\n image_id, id_group=annot_group_ids[i])\n', (5222, 5328), False, 'from cytomine.models import Annotation, AnnotationCollection, ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink\n')] |
# Copyright 2015 Cray
# Copyright 2016 FUJITSU LIMITED
# Copyright 2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import time
import fixtures
from oslo_config import cfg
from oslo_db.sqlalchemy.engines import create_engine
from sqlalchemy import delete, MetaData, insert, bindparam
from monasca_api.common.repositories.sqla import models
from monasca_api.tests import base
CONF = cfg.CONF
class TestAlarmRepoDB(base.BaseTestCase):
@classmethod
def setUpClass(cls):
engine = create_engine('sqlite://')
qry = open('monasca_api/tests/sqlite_alarm.sql', 'r').read()
sconn = engine.raw_connection()
c = sconn.cursor()
c.executescript(qry)
sconn.commit()
c.close()
cls.engine = engine
def _fake_engine_from_config(*args, **kw):
return cls.engine
cls.fixture = fixtures.MonkeyPatch(
'sqlalchemy.create_engine', _fake_engine_from_config)
cls.fixture.setUp()
metadata = MetaData()
cls.aa = models.create_aa_model(metadata)
cls._delete_aa_query = delete(cls.aa)
cls._insert_aa_query = (insert(cls.aa)
.values(
alarm_definition_id=bindparam('alarm_definition_id'),
alarm_state=bindparam('alarm_state'),
action_id=bindparam('action_id')))
cls.ad = models.create_ad_model(metadata)
cls._delete_ad_query = delete(cls.ad)
cls._insert_ad_query = (insert(cls.ad)
.values(
id=bindparam('id'),
tenant_id=bindparam('tenant_id'),
name=bindparam('name'),
severity=bindparam('severity'),
expression=bindparam('expression'),
match_by=bindparam('match_by'),
actions_enabled=bindparam('actions_enabled'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at'),
deleted_at=bindparam('deleted_at')))
cls.sad = models.create_sad_model(metadata)
cls._delete_sad_query = delete(cls.sad)
cls._insert_sad_query = (insert(cls.sad)
.values(
id=bindparam('id'),
alarm_definition_id=bindparam('alarm_definition_id'),
function=bindparam('function'),
metric_name=bindparam('metric_name'),
operator=bindparam('operator'),
threshold=bindparam('threshold'),
period=bindparam('period'),
periods=bindparam('periods'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at')))
cls.sadd = models.create_sadd_model(metadata)
cls._delete_sadd_query = delete(cls.sadd)
cls._insert_sadd_query = (insert(cls.sadd)
.values(
sub_alarm_definition_id=bindparam('sub_alarm_definition_id'),
dimension_name=bindparam('dimension_name'),
value=bindparam('value')))
cls.nm = models.create_nm_model(metadata)
cls._delete_nm_query = delete(cls.nm)
cls._insert_nm_query = (insert(cls.nm)
.values(
id=bindparam('id'),
tenant_id=bindparam('tenant_id'),
name=bindparam('name'),
type=bindparam('type'),
address=bindparam('address'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at')))
cls.a = models.create_a_model(metadata)
cls._delete_a_query = delete(cls.a)
cls._insert_a_query = (insert(cls.a)
.values(
id=bindparam('id'),
alarm_definition_id=bindparam('alarm_definition_id'),
state=bindparam('state'),
lifecycle_state=bindparam('lifecycle_state'),
link=bindparam('link'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at'),
state_updated_at=bindparam('state_updated_at')))
cls.sa = models.create_sa_model(metadata)
cls._delete_sa_query = delete(cls.sa)
cls._insert_sa_query = (insert(cls.sa)
.values(
id=bindparam('id'),
sub_expression_id=bindparam('sub_expression_id'),
alarm_id=bindparam('alarm_id'),
expression=bindparam('expression'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at')))
cls.am = models.create_am_model(metadata)
cls._delete_am_query = delete(cls.am)
cls._insert_am_query = (insert(cls.am)
.values(
alarm_id=bindparam('alarm_id'),
metric_definition_dimensions_id=bindparam(
'metric_definition_dimensions_id')))
cls.md = models.create_md_model(metadata)
cls._delete_md_query = delete(cls.md)
cls._insert_md_query = (insert(cls.md)
.values(
dimension_set_id=bindparam('dimension_set_id'),
name=bindparam('name'),
value=bindparam('value')))
cls.mdd = models.create_mdd_model(metadata)
cls._delete_mdd_query = delete(cls.mdd)
cls._insert_mdd_query = (insert(cls.mdd)
.values(
id=bindparam('id'),
metric_definition_id=bindparam('metric_definition_id'),
metric_dimension_set_id=bindparam('metric_dimension_set_id')))
cls.mde = models.create_mde_model(metadata)
cls._delete_mde_query = delete(cls.mde)
cls._insert_mde_query = (insert(cls.mde)
.values(
id=bindparam('id'),
name=bindparam('name'),
tenant_id=bindparam('tenant_id'),
region=bindparam('region')))
@classmethod
def tearDownClass(cls):
cls.fixture.cleanUp()
if hasattr(CONF, 'sql_engine'):
delattr(CONF, 'sql_engine')
def setUp(self):
super(TestAlarmRepoDB, self).setUp()
self.conf_override(connection='sqlite://', group='database')
from monasca_api.common.repositories.sqla import alarms_repository as ar
self.repo = ar.AlarmsRepository()
timestamp1 = datetime.datetime(2015, 3, 14, 9, 26, 53)
timestamp2 = datetime.datetime(2015, 3, 14, 9, 26, 54)
timestamp3 = datetime.datetime(2015, 3, 14, 9, 26, 55)
timestamp4 = datetime.datetime(2015, 3, 15, 9, 26, 53)
self.default_as = [{'id': '1',
'alarm_definition_id': '1',
'state': 'OK',
'lifecycle_state': 'OPEN',
'link': 'http://somesite.com/this-alarm-info',
'created_at': timestamp1,
'updated_at': timestamp1,
'state_updated_at': timestamp1},
{'id': '2',
'alarm_definition_id': '1',
'state': 'UNDETERMINED',
'lifecycle_state': 'OPEN',
'link': 'http://somesite.com/this-alarm-info',
'created_at': timestamp2,
'updated_at': timestamp2,
'state_updated_at': timestamp2},
{'id': '3',
'alarm_definition_id': '1',
'state': 'ALARM',
'lifecycle_state': None,
'link': 'http://somesite.com/this-alarm-info',
'created_at': timestamp3,
'updated_at': timestamp3,
'state_updated_at': timestamp3},
{'id': '234111',
'alarm_definition_id': '234',
'state': 'UNDETERMINED',
'lifecycle_state': None,
'link': None,
'created_at': timestamp4,
'updated_at': timestamp4,
'state_updated_at': timestamp4}]
self.default_ads = [{'id': '1',
'tenant_id': 'bob',
'name': '90% CPU',
'severity': 'LOW',
'expression': 'AVG(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'match_by': 'flavor_id,image_id',
'actions_enabled': False,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now(),
'deleted_at': None},
{'id': '234',
'tenant_id': 'bob',
'name': '50% CPU',
'severity': 'CRITICAL',
'expression': 'AVG(cpu.sys_mem'
'{service=monitoring})'
' > 20 and AVG(cpu.idle_perc'
'{service=monitoring}) < 10',
'match_by': 'hostname,region',
'actions_enabled': False,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now(),
'deleted_at': None}]
self.default_sadds = [{'sub_alarm_definition_id': '111',
'dimension_name': 'flavor_id',
'value': '777'},
{'sub_alarm_definition_id': '111',
'dimension_name': 'image_id',
'value': '888'},
{'sub_alarm_definition_id': '111',
'dimension_name': 'metric_name',
'value': 'cpu'},
{'sub_alarm_definition_id': '111',
'dimension_name': 'device',
'value': '1'},
{'sub_alarm_definition_id': '222',
'dimension_name': 'flavor_id',
'value': '777'},
{'sub_alarm_definition_id': '222',
'dimension_name': 'image_id',
'value': '888'},
{'sub_alarm_definition_id': '222',
'dimension_name': 'metric_name',
'value': 'mem'}]
self.default_nms = [{'id': '29387234',
'tenant_id': 'alarm-test',
'name': 'MyEmail',
'type': 'EMAIL',
'address': 'a@b',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '77778687',
'tenant_id': 'alarm-test',
'name': 'OtherEmail',
'type': 'EMAIL',
'address': 'a@b',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()}]
self.default_aas = [{'alarm_definition_id': '123',
'alarm_state': 'ALARM',
'action_id': '29387234'},
{'alarm_definition_id': '123',
'alarm_state': 'ALARM',
'action_id': '77778687'},
{'alarm_definition_id': '234',
'alarm_state': 'ALARM',
'action_id': '29387234'},
{'alarm_definition_id': '234',
'alarm_state': 'ALARM',
'action_id': '77778687'}]
self.default_sads = [{'id': '43',
'alarm_definition_id': '234',
'function': 'f_43',
'metric_name': 'm_43',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '45',
'alarm_definition_id': '234',
'function': 'f_45',
'metric_name': 'm_45',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '47',
'alarm_definition_id': '234',
'function': 'f_47',
'metric_name': 'm_47',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '8484',
'alarm_definition_id': '234',
'function': 'f_49',
'metric_name': 'm_49',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '8686',
'alarm_definition_id': '234',
'function': 'f_51',
'metric_name': 'm_51',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()}]
self.default_sas = [{'sub_expression_id': '43',
'id': '42',
'alarm_id': '1',
'expression': 'avg(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'sub_expression_id': '45',
'id': '43',
'alarm_id': '2',
'expression': 'avg(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'sub_expression_id': '47',
'id': '44',
'alarm_id': '3',
'expression': 'avg(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()}]
self.default_ams = [{'alarm_id': '1',
'metric_definition_dimensions_id': b'11'},
{'alarm_id': '1',
'metric_definition_dimensions_id': b'22'},
{'alarm_id': '2',
'metric_definition_dimensions_id': b'11'},
{'alarm_id': '3',
'metric_definition_dimensions_id': b'22'},
{'alarm_id': '234111',
'metric_definition_dimensions_id': b'31'},
{'alarm_id': '234111',
'metric_definition_dimensions_id': b'32'}]
self.default_mdes = [{'id': b'1',
'name': 'cpu.idle_perc',
'tenant_id': 'bob',
'region': 'west'},
{'id': b'111',
'name': 'cpu.sys_mem',
'tenant_id': 'bob',
'region': 'west'},
{'id': b'112',
'name': 'cpu.idle_perc',
'tenant_id': 'bob',
'region': 'west'}]
self.default_mdds = [{'id': b'11',
'metric_definition_id': b'1',
'metric_dimension_set_id': b'1'},
{'id': b'22',
'metric_definition_id': b'1',
'metric_dimension_set_id': b'2'},
{'id': b'31',
'metric_definition_id': b'111',
'metric_dimension_set_id': b'21'},
{'id': b'32',
'metric_definition_id': b'112',
'metric_dimension_set_id': b'22'}]
self.default_mds = [{'dimension_set_id': b'1',
'name': 'instance_id',
'value': '123'},
{'dimension_set_id': b'1',
'name': 'service',
'value': 'monitoring'},
{'dimension_set_id': b'2',
'name': 'flavor_id',
'value': '222'},
{'dimension_set_id': b'22',
'name': 'flavor_id',
'value': '333'},
{'dimension_set_id': b'21',
'name': 'service',
'value': 'monitoring'},
{'dimension_set_id': b'22',
'name': 'service',
'value': 'monitoring'},
{'dimension_set_id': b'21',
'name': 'hostname',
'value': 'roland'},
{'dimension_set_id': b'22',
'name': 'hostname',
'value': 'roland'},
{'dimension_set_id': b'21',
'name': 'region',
'value': 'colorado'},
{'dimension_set_id': b'22',
'name': 'region',
'value': 'colorado'},
{'dimension_set_id': b'22',
'name': 'extra',
'value': 'vivi'}]
self.alarm1 = {'alarm_definition': {'id': '1',
'name': '90% CPU',
'severity': 'LOW'},
'created_timestamp': '2015-03-14T09:26:53Z',
'id': '1',
'lifecycle_state': 'OPEN',
'link': 'http://somesite.com/this-alarm-info',
'metrics': [{'dimensions': {'instance_id': '123',
'service': 'monitoring'},
'name': 'cpu.idle_perc'},
{'dimensions': {'flavor_id': '222'},
'name': 'cpu.idle_perc'}],
'state': 'OK',
'state_updated_timestamp': '2015-03-14T09:26:53Z',
'updated_timestamp': '2015-03-14T09:26:53Z'}
self.alarm2 = {'alarm_definition': {'id': '1',
'name': '90% CPU',
'severity': 'LOW'},
'created_timestamp': '2015-03-14T09:26:54Z',
'id': '2',
'lifecycle_state': 'OPEN',
'link': 'http://somesite.com/this-alarm-info',
'metrics': [{'dimensions': {'instance_id': '123',
'service': 'monitoring'},
'name': 'cpu.idle_perc'}],
'state': 'UNDETERMINED',
'state_updated_timestamp': '2015-03-14T09:26:54Z',
'updated_timestamp': '2015-03-14T09:26:54Z'}
self.alarm_compound = {'alarm_definition': {'id': '234',
'name': '50% CPU',
'severity': 'CRITICAL'},
'created_timestamp': '2015-03-15T09:26:53Z',
'id': '234111',
'lifecycle_state': None,
'link': None,
'metrics': [
{'dimensions': {'hostname': 'roland',
'region': 'colorado',
'service': 'monitoring'},
'name': 'cpu.sys_mem'},
{'dimensions': {'extra': 'vivi',
'flavor_id': '333',
'hostname': 'roland',
'region': 'colorado',
'service': 'monitoring'},
'name': 'cpu.idle_perc'}],
'state': 'UNDETERMINED',
'state_updated_timestamp':
'2015-03-15T09:26:53Z',
'updated_timestamp': '2015-03-15T09:26:53Z'}
self.alarm3 = {'alarm_definition': {'id': '1',
'name': '90% CPU',
'severity': 'LOW'},
'created_timestamp': '2015-03-14T09:26:55Z',
'id': '3',
'lifecycle_state': None,
'link': 'http://somesite.com/this-alarm-info',
'metrics': [{'dimensions': {'flavor_id': '222'},
'name': 'cpu.idle_perc'}],
'state': 'ALARM',
'state_updated_timestamp': '2015-03-14T09:26:55Z',
'updated_timestamp': '2015-03-14T09:26:55Z'}
with self.engine.begin() as conn:
conn.execute(self._delete_am_query)
conn.execute(self._insert_am_query, self.default_ams)
conn.execute(self._delete_md_query)
conn.execute(self._insert_md_query, self.default_mds)
conn.execute(self._delete_mdd_query)
conn.execute(self._insert_mdd_query, self.default_mdds)
conn.execute(self._delete_a_query)
conn.execute(self._insert_a_query, self.default_as)
conn.execute(self._delete_sa_query)
conn.execute(self._insert_sa_query, self.default_sas)
conn.execute(self._delete_mde_query)
conn.execute(self._insert_mde_query, self.default_mdes)
conn.execute(self._delete_ad_query)
conn.execute(self._insert_ad_query, self.default_ads)
conn.execute(self._delete_sad_query)
conn.execute(self._insert_sad_query, self.default_sads)
conn.execute(self._delete_sadd_query)
conn.execute(self._insert_sadd_query, self.default_sadds)
conn.execute(self._delete_nm_query)
conn.execute(self._insert_nm_query, self.default_nms)
conn.execute(self._delete_aa_query)
conn.execute(self._insert_aa_query, self.default_aas)
def helper_builder_result(self, alarm_rows):
result = []
if not alarm_rows:
return result
# Forward declaration
alarm = {}
prev_alarm_id = None
for alarm_row in alarm_rows:
if prev_alarm_id != alarm_row['alarm_id']:
if prev_alarm_id is not None:
result.append(alarm)
ad = {u'id': alarm_row['alarm_definition_id'],
u'name': alarm_row['alarm_definition_name'],
u'severity': alarm_row['severity'], }
metrics = []
alarm = {u'id': alarm_row['alarm_id'], u'metrics': metrics,
u'state': alarm_row['state'],
u'lifecycle_state': alarm_row['lifecycle_state'],
u'link': alarm_row['link'],
u'state_updated_timestamp':
alarm_row['state_updated_timestamp'].isoformat() +
'Z',
u'updated_timestamp':
alarm_row['updated_timestamp'].isoformat() + 'Z',
u'created_timestamp':
alarm_row['created_timestamp'].isoformat() + 'Z',
u'alarm_definition': ad}
prev_alarm_id = alarm_row['alarm_id']
dimensions = {}
metric = {u'name': alarm_row['metric_name'],
u'dimensions': dimensions}
if alarm_row['metric_dimensions']:
for dimension in alarm_row['metric_dimensions'].split(','):
parsed_dimension = dimension.split('=')
dimensions[parsed_dimension[0]] = parsed_dimension[1]
metrics.append(metric)
result.append(alarm)
return result
def test_should_delete(self):
tenant_id = 'bob'
alarm_id = '1'
alarm1 = self.repo.get_alarm(tenant_id, alarm_id)
alarm1 = self.helper_builder_result(alarm1)
self.assertEqual(alarm1[0], self.alarm1)
self.repo.delete_alarm(tenant_id, alarm_id)
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.get_alarm, tenant_id, alarm_id)
def test_should_throw_exception_on_delete(self):
tenant_id = 'bob'
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.delete_alarm, tenant_id, 'Not an alarm ID')
def test_should_find_alarm_def(self):
tenant_id = 'bob'
alarm_id = '1'
expected = {'actions_enabled': False,
'deleted_at': None,
'description': None,
'expression': 'AVG(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'id': '1',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'severity': 'LOW',
'tenant_id': 'bob'}
alarm_def = self.repo.get_alarm_definition(tenant_id, alarm_id)
expected['created_at'] = alarm_def['created_at']
expected['updated_at'] = alarm_def['updated_at']
self.assertEqual(alarm_def, expected)
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.get_alarm_definition,
tenant_id, 'Not an alarm ID')
def test_should_find(self):
res = self.repo.get_alarms(tenant_id='Not a tenant id', limit=1)
self.assertEqual(res, [])
tenant_id = 'bob'
res = self.repo.get_alarms(tenant_id=tenant_id, limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
alarm_def_id = self.alarm_compound['alarm_definition']['id']
query_parms = {'alarm_definition_id': alarm_def_id}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.sys_mem'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.idle_perc'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': {'flavor_id': '222'}}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'metric_dimensions': {'flavor_id': '333'}}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_dimensions': {'flavor_id': '222|333'}}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'metric_dimensions': {'flavor_id': ''}}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': {'service': 'monitoring',
'hostname': 'roland'}}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'state': 'UNDETERMINED'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm2,
self.alarm_compound]
self.assertEqual(res, expected)
alarm_def_id = self.alarm1['alarm_definition']['id']
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': {'service': 'monitoring'},
'alarm_definition_id': alarm_def_id}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2]
self.assertEqual(res, expected)
alarm_def_id = self.alarm1['alarm_definition']['id']
query_parms = {'metric_name': 'cpu.idle_perc',
'alarm_definition_id': alarm_def_id}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm3]
self.assertEqual(res, expected)
alarm_def_id = self.alarm_compound['alarm_definition']['id']
query_parms = {'alarm_definition_id': alarm_def_id,
'state': 'UNDETERMINED'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.sys_mem',
'state': 'UNDETERMINED'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': {'service': 'monitoring'},
'state': 'UNDETERMINED'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm2,
self.alarm_compound]
self.assertEqual(res, expected)
time_now = datetime.datetime.now().isoformat() + 'Z'
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': {'service': 'monitoring'},
'state': 'UNDETERMINED',
'state_updated_start_time': time_now}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = []
self.assertEqual(res, expected)
time_now = '2015-03-15T00:00:00.0Z'
query_parms = {'state_updated_start_time': time_now}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
time_now = '2015-03-14T00:00:00.0Z'
query_parms = {'state_updated_start_time': time_now}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'state_updated_start_time': time_now,
'link': 'http://google.com',
'lifecycle_state': 'OPEN'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=None,
offset='10')
res = self.helper_builder_result(res)
expected = []
self.assertEqual(res, expected)
query_parms = {'severity': 'LOW'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm3]
self.assertEqual(expected, res)
query_parms = {'severity': 'CRITICAL'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(expected, res)
query_parms = {'severity': 'LOW|CRITICAL'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(expected, res)
def test_should_count(self):
tenant_id = 'bob'
res = self.repo.get_alarms_count(tenant_id=tenant_id)
self.assertEqual([{'count': 4}], res)
res = self.repo.get_alarms_count(tenant_id=tenant_id,
limit=1000)
self.assertEqual([{'count': 4}], res)
res = self.repo.get_alarms_count(tenant_id=tenant_id,
limit=1000,
offset=10)
self.assertEqual([], res)
alarm_def_id = self.alarm_compound['alarm_definition']['id']
query_parms = {'alarm_definition_id': alarm_def_id}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 1}], res)
query_parms = {'metric_name': 'cpu.sys_mem'}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 1}], res)
query_parms = {'state': 'UNDETERMINED'}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 2}], res)
time_now = '2015-03-15T00:00:00.0Z'
query_parms = {'state_updated_start_time': time_now}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 1}], res)
query_parms = {'severity': 'LOW'}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 3}], res)
query_parms = {'lifecycle_state': 'OPEN'}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 2}], res)
query_parms = {'link': 'http://somesite.com/this-alarm-info'}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 3}], res)
query_parms = {'metric_dimensions': {'flavor_id': '222'}}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 2}], res)
query_parms = {'group_by': ['metric_name']}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
expected = [{'count': 4, 'metric_name': 'cpu.idle_perc'},
{'count': 1, 'metric_name': 'cpu.sys_mem'}]
self.assertEqual(expected, res)
query_parms = {'group_by': ['dimension_name']}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
expected = [{'count': 1, 'dimension_name': 'extra'},
{'count': 3, 'dimension_name': 'flavor_id'},
{'count': 1, 'dimension_name': 'hostname'},
{'count': 2, 'dimension_name': 'instance_id'},
{'count': 1, 'dimension_name': 'region'},
{'count': 3, 'dimension_name': 'service'}]
self.assertEqual(expected, res)
query_parms = {'group_by': ['dimension_value']}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
expected = [{'count': 2, 'dimension_value': '123'},
{'count': 2, 'dimension_value': '222'},
{'count': 1, 'dimension_value': '333'},
{'count': 1, 'dimension_value': 'colorado'},
{'count': 3, 'dimension_value': 'monitoring'},
{'count': 1, 'dimension_value': 'roland'},
{'count': 1, 'dimension_value': 'vivi'}]
self.assertEqual(expected, res)
query_parms = {'group_by': []}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 4}], res)
def test_should_sort_and_find(self):
tenant_id = 'bob'
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['alarm_id']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['alarm_definition_id']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm3,
self.alarm_compound]
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['alarm_definition_name']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
expected = [self.alarm_compound,
self.alarm1,
self.alarm2,
self.alarm3]
res = self.helper_builder_result(res)
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['severity']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
expected = [self.alarm1,
self.alarm2,
self.alarm3,
self.alarm_compound]
res = self.helper_builder_result(res)
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['state']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
res = self.helper_builder_result(res)
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['alarm_id asc']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['alarm_id desc']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm3,
self.alarm_compound,
self.alarm2,
self.alarm1]
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['alarm_id nfl']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(expected, res)
def test_should_update(self):
tenant_id = 'bob'
alarm_id = '2'
alarm = self.repo.get_alarm(tenant_id, alarm_id)
alarm = self.helper_builder_result(alarm)[0]
original_state_updated_date = alarm['state_updated_timestamp']
original_updated_timestamp = alarm['updated_timestamp']
self.assertEqual(alarm['state'], 'UNDETERMINED')
prev_state, _ = self.repo.update_alarm(tenant_id, alarm_id, 'OK', None, None)
alarm_new = self.repo.get_alarm(tenant_id, alarm_id)
alarm_new = self.helper_builder_result(alarm_new)[0]
new_state_updated_date = alarm_new['state_updated_timestamp']
new_updated_timestamp = alarm_new['updated_timestamp']
self.assertNotEqual(original_updated_timestamp,
new_updated_timestamp,
'updated_at did not change')
self.assertNotEqual(original_state_updated_date,
new_state_updated_date,
'state_updated_at did not change')
alarm_tmp = tuple(alarm[k] for k in ('state', 'link', 'lifecycle_state'))
self.assertEqual(alarm_tmp, prev_state)
alarm['state_updated_timestamp'] = alarm_new['state_updated_timestamp']
alarm['updated_timestamp'] = alarm_new['updated_timestamp']
alarm['state'] = alarm_new['state']
alarm['link'] = alarm_new['link']
alarm['lifecycle_state'] = alarm_new['lifecycle_state']
self.assertEqual(alarm, alarm_new)
time.sleep(1)
prev_state, _ = self.repo.update_alarm(tenant_id, alarm_id, 'OK', None, None)
alarm_unchanged = self.repo.get_alarm(tenant_id, alarm_id)
alarm_unchanged = self.helper_builder_result(alarm_unchanged)[0]
unchanged_state_updated_date = alarm_unchanged['state_updated_timestamp']
unchanged_updated_timestamp = alarm_unchanged['updated_timestamp']
self.assertNotEqual(unchanged_updated_timestamp,
new_updated_timestamp,
'updated_at did not change')
self.assertEqual(unchanged_state_updated_date,
new_state_updated_date,
'state_updated_at did change')
alarm_new_tmp = tuple(alarm_new[k] for k in ('state', 'link', 'lifecycle_state'))
self.assertEqual(alarm_new_tmp, prev_state)
def test_should_throw_exception_on_update(self):
tenant_id = 'bob'
alarm_id = 'Not real alarm id'
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.update_alarm,
tenant_id,
alarm_id,
'UNDETERMINED',
None,
None)
def test_get_alarm_metrics(self):
alarm_id = '2'
alarm_metrics = self.repo.get_alarm_metrics(alarm_id)
expected = [{'alarm_id': '2',
'dimensions': 'instance_id=123,service=monitoring',
'name': 'cpu.idle_perc'}]
self.assertEqual(alarm_metrics, expected)
def test_get_subalarms(self):
tenant_id = 'bob'
alarm_id = '2'
sub_alarms = self.repo.get_sub_alarms(tenant_id, alarm_id)
expected = [{'alarm_definition_id': '1',
'alarm_id': '2',
'expression': 'avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10',
'sub_alarm_id': '43'}]
self.assertEqual(sub_alarms, expected)
| [
"oslo_db.sqlalchemy.engines.create_engine",
"sqlalchemy.delete",
"time.sleep",
"sqlalchemy.MetaData",
"datetime.datetime",
"monasca_api.common.repositories.sqla.models.create_sad_model",
"sqlalchemy.insert",
"fixtures.MonkeyPatch",
"monasca_api.common.repositories.sqla.models.create_mdd_model",
"m... | [((1055, 1081), 'oslo_db.sqlalchemy.engines.create_engine', 'create_engine', (['"""sqlite://"""'], {}), "('sqlite://')\n", (1068, 1081), False, 'from oslo_db.sqlalchemy.engines import create_engine\n'), ((1421, 1495), 'fixtures.MonkeyPatch', 'fixtures.MonkeyPatch', (['"""sqlalchemy.create_engine"""', '_fake_engine_from_config'], {}), "('sqlalchemy.create_engine', _fake_engine_from_config)\n", (1441, 1495), False, 'import fixtures\n'), ((1557, 1567), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (1565, 1567), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((1586, 1618), 'monasca_api.common.repositories.sqla.models.create_aa_model', 'models.create_aa_model', (['metadata'], {}), '(metadata)\n', (1608, 1618), False, 'from monasca_api.common.repositories.sqla import models\n'), ((1650, 1664), 'sqlalchemy.delete', 'delete', (['cls.aa'], {}), '(cls.aa)\n', (1656, 1664), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2006, 2038), 'monasca_api.common.repositories.sqla.models.create_ad_model', 'models.create_ad_model', (['metadata'], {}), '(metadata)\n', (2028, 2038), False, 'from monasca_api.common.repositories.sqla import models\n'), ((2070, 2084), 'sqlalchemy.delete', 'delete', (['cls.ad'], {}), '(cls.ad)\n', (2076, 2084), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2884, 2917), 'monasca_api.common.repositories.sqla.models.create_sad_model', 'models.create_sad_model', (['metadata'], {}), '(metadata)\n', (2907, 2917), False, 'from monasca_api.common.repositories.sqla import models\n'), ((2950, 2965), 'sqlalchemy.delete', 'delete', (['cls.sad'], {}), '(cls.sad)\n', (2956, 2965), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((3788, 3822), 'monasca_api.common.repositories.sqla.models.create_sadd_model', 'models.create_sadd_model', (['metadata'], {}), '(metadata)\n', (3812, 3822), False, 'from monasca_api.common.repositories.sqla import models\n'), ((3856, 3872), 'sqlalchemy.delete', 'delete', (['cls.sadd'], {}), '(cls.sadd)\n', (3862, 3872), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4232, 4264), 'monasca_api.common.repositories.sqla.models.create_nm_model', 'models.create_nm_model', (['metadata'], {}), '(metadata)\n', (4254, 4264), False, 'from monasca_api.common.repositories.sqla import models\n'), ((4296, 4310), 'sqlalchemy.delete', 'delete', (['cls.nm'], {}), '(cls.nm)\n', (4302, 4310), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4873, 4904), 'monasca_api.common.repositories.sqla.models.create_a_model', 'models.create_a_model', (['metadata'], {}), '(metadata)\n', (4894, 4904), False, 'from monasca_api.common.repositories.sqla import models\n'), ((4935, 4948), 'sqlalchemy.delete', 'delete', (['cls.a'], {}), '(cls.a)\n', (4941, 4948), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5623, 5655), 'monasca_api.common.repositories.sqla.models.create_sa_model', 'models.create_sa_model', (['metadata'], {}), '(metadata)\n', (5645, 5655), False, 'from monasca_api.common.repositories.sqla import models\n'), ((5687, 5701), 'sqlalchemy.delete', 'delete', (['cls.sa'], {}), '(cls.sa)\n', (5693, 5701), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((6231, 6263), 'monasca_api.common.repositories.sqla.models.create_am_model', 'models.create_am_model', (['metadata'], {}), '(metadata)\n', (6253, 6263), False, 'from monasca_api.common.repositories.sqla import models\n'), ((6295, 6309), 'sqlalchemy.delete', 'delete', (['cls.am'], {}), '(cls.am)\n', (6301, 6309), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((6640, 6672), 'monasca_api.common.repositories.sqla.models.create_md_model', 'models.create_md_model', (['metadata'], {}), '(metadata)\n', (6662, 6672), False, 'from monasca_api.common.repositories.sqla import models\n'), ((6704, 6718), 'sqlalchemy.delete', 'delete', (['cls.md'], {}), '(cls.md)\n', (6710, 6718), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((7033, 7066), 'monasca_api.common.repositories.sqla.models.create_mdd_model', 'models.create_mdd_model', (['metadata'], {}), '(metadata)\n', (7056, 7066), False, 'from monasca_api.common.repositories.sqla import models\n'), ((7099, 7114), 'sqlalchemy.delete', 'delete', (['cls.mdd'], {}), '(cls.mdd)\n', (7105, 7114), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((7475, 7508), 'monasca_api.common.repositories.sqla.models.create_mde_model', 'models.create_mde_model', (['metadata'], {}), '(metadata)\n', (7498, 7508), False, 'from monasca_api.common.repositories.sqla import models\n'), ((7541, 7556), 'sqlalchemy.delete', 'delete', (['cls.mde'], {}), '(cls.mde)\n', (7547, 7556), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((8298, 8319), 'monasca_api.common.repositories.sqla.alarms_repository.AlarmsRepository', 'ar.AlarmsRepository', ([], {}), '()\n', (8317, 8319), True, 'from monasca_api.common.repositories.sqla import alarms_repository as ar\n'), ((8342, 8383), 'datetime.datetime', 'datetime.datetime', (['(2015)', '(3)', '(14)', '(9)', '(26)', '(53)'], {}), '(2015, 3, 14, 9, 26, 53)\n', (8359, 8383), False, 'import datetime\n'), ((8405, 8446), 'datetime.datetime', 'datetime.datetime', (['(2015)', '(3)', '(14)', '(9)', '(26)', '(54)'], {}), '(2015, 3, 14, 9, 26, 54)\n', (8422, 8446), False, 'import datetime\n'), ((8468, 8509), 'datetime.datetime', 'datetime.datetime', (['(2015)', '(3)', '(14)', '(9)', '(26)', '(55)'], {}), '(2015, 3, 14, 9, 26, 55)\n', (8485, 8509), False, 'import datetime\n'), ((8531, 8572), 'datetime.datetime', 'datetime.datetime', (['(2015)', '(3)', '(15)', '(9)', '(26)', '(53)'], {}), '(2015, 3, 15, 9, 26, 53)\n', (8548, 8572), False, 'import datetime\n'), ((51054, 51067), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (51064, 51067), False, 'import time\n'), ((1697, 1711), 'sqlalchemy.insert', 'insert', (['cls.aa'], {}), '(cls.aa)\n', (1703, 1711), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((1809, 1841), 'sqlalchemy.bindparam', 'bindparam', (['"""alarm_definition_id"""'], {}), "('alarm_definition_id')\n", (1818, 1841), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((1891, 1915), 'sqlalchemy.bindparam', 'bindparam', (['"""alarm_state"""'], {}), "('alarm_state')\n", (1900, 1915), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((1963, 1985), 'sqlalchemy.bindparam', 'bindparam', (['"""action_id"""'], {}), "('action_id')\n", (1972, 1985), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2117, 2131), 'sqlalchemy.insert', 'insert', (['cls.ad'], {}), '(cls.ad)\n', (2123, 2131), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2212, 2227), 'sqlalchemy.bindparam', 'bindparam', (['"""id"""'], {}), "('id')\n", (2221, 2227), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2275, 2297), 'sqlalchemy.bindparam', 'bindparam', (['"""tenant_id"""'], {}), "('tenant_id')\n", (2284, 2297), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2340, 2357), 'sqlalchemy.bindparam', 'bindparam', (['"""name"""'], {}), "('name')\n", (2349, 2357), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2404, 2425), 'sqlalchemy.bindparam', 'bindparam', (['"""severity"""'], {}), "('severity')\n", (2413, 2425), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2474, 2497), 'sqlalchemy.bindparam', 'bindparam', (['"""expression"""'], {}), "('expression')\n", (2483, 2497), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2544, 2565), 'sqlalchemy.bindparam', 'bindparam', (['"""match_by"""'], {}), "('match_by')\n", (2553, 2565), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2619, 2647), 'sqlalchemy.bindparam', 'bindparam', (['"""actions_enabled"""'], {}), "('actions_enabled')\n", (2628, 2647), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2696, 2719), 'sqlalchemy.bindparam', 'bindparam', (['"""created_at"""'], {}), "('created_at')\n", (2705, 2719), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2768, 2791), 'sqlalchemy.bindparam', 'bindparam', (['"""updated_at"""'], {}), "('updated_at')\n", (2777, 2791), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2840, 2863), 'sqlalchemy.bindparam', 'bindparam', (['"""deleted_at"""'], {}), "('deleted_at')\n", (2849, 2863), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((2999, 3014), 'sqlalchemy.insert', 'insert', (['cls.sad'], {}), '(cls.sad)\n', (3005, 3014), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((3097, 3112), 'sqlalchemy.bindparam', 'bindparam', (['"""id"""'], {}), "('id')\n", (3106, 3112), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((3171, 3203), 'sqlalchemy.bindparam', 'bindparam', (['"""alarm_definition_id"""'], {}), "('alarm_definition_id')\n", (3180, 3203), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((3251, 3272), 'sqlalchemy.bindparam', 'bindparam', (['"""function"""'], {}), "('function')\n", (3260, 3272), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((3323, 3347), 'sqlalchemy.bindparam', 'bindparam', (['"""metric_name"""'], {}), "('metric_name')\n", (3332, 3347), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((3395, 3416), 'sqlalchemy.bindparam', 'bindparam', (['"""operator"""'], {}), "('operator')\n", (3404, 3416), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((3465, 3487), 'sqlalchemy.bindparam', 'bindparam', (['"""threshold"""'], {}), "('threshold')\n", (3474, 3487), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((3533, 3552), 'sqlalchemy.bindparam', 'bindparam', (['"""period"""'], {}), "('period')\n", (3542, 3552), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((3599, 3619), 'sqlalchemy.bindparam', 'bindparam', (['"""periods"""'], {}), "('periods')\n", (3608, 3619), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((3669, 3692), 'sqlalchemy.bindparam', 'bindparam', (['"""created_at"""'], {}), "('created_at')\n", (3678, 3692), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((3742, 3765), 'sqlalchemy.bindparam', 'bindparam', (['"""updated_at"""'], {}), "('updated_at')\n", (3751, 3765), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((3907, 3923), 'sqlalchemy.insert', 'insert', (['cls.sadd'], {}), '(cls.sadd)\n', (3913, 3923), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4029, 4065), 'sqlalchemy.bindparam', 'bindparam', (['"""sub_alarm_definition_id"""'], {}), "('sub_alarm_definition_id')\n", (4038, 4065), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4120, 4147), 'sqlalchemy.bindparam', 'bindparam', (['"""dimension_name"""'], {}), "('dimension_name')\n", (4129, 4147), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4193, 4211), 'sqlalchemy.bindparam', 'bindparam', (['"""value"""'], {}), "('value')\n", (4202, 4211), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4343, 4357), 'sqlalchemy.insert', 'insert', (['cls.nm'], {}), '(cls.nm)\n', (4349, 4357), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4438, 4453), 'sqlalchemy.bindparam', 'bindparam', (['"""id"""'], {}), "('id')\n", (4447, 4453), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4501, 4523), 'sqlalchemy.bindparam', 'bindparam', (['"""tenant_id"""'], {}), "('tenant_id')\n", (4510, 4523), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4566, 4583), 'sqlalchemy.bindparam', 'bindparam', (['"""name"""'], {}), "('name')\n", (4575, 4583), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4626, 4643), 'sqlalchemy.bindparam', 'bindparam', (['"""type"""'], {}), "('type')\n", (4635, 4643), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4689, 4709), 'sqlalchemy.bindparam', 'bindparam', (['"""address"""'], {}), "('address')\n", (4698, 4709), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4758, 4781), 'sqlalchemy.bindparam', 'bindparam', (['"""created_at"""'], {}), "('created_at')\n", (4767, 4781), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4830, 4853), 'sqlalchemy.bindparam', 'bindparam', (['"""updated_at"""'], {}), "('updated_at')\n", (4839, 4853), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((4980, 4993), 'sqlalchemy.insert', 'insert', (['cls.a'], {}), '(cls.a)\n', (4986, 4993), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5072, 5087), 'sqlalchemy.bindparam', 'bindparam', (['"""id"""'], {}), "('id')\n", (5081, 5087), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5144, 5176), 'sqlalchemy.bindparam', 'bindparam', (['"""alarm_definition_id"""'], {}), "('alarm_definition_id')\n", (5153, 5176), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5219, 5237), 'sqlalchemy.bindparam', 'bindparam', (['"""state"""'], {}), "('state')\n", (5228, 5237), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5290, 5318), 'sqlalchemy.bindparam', 'bindparam', (['"""lifecycle_state"""'], {}), "('lifecycle_state')\n", (5299, 5318), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5360, 5377), 'sqlalchemy.bindparam', 'bindparam', (['"""link"""'], {}), "('link')\n", (5369, 5377), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5425, 5448), 'sqlalchemy.bindparam', 'bindparam', (['"""created_at"""'], {}), "('created_at')\n", (5434, 5448), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5496, 5519), 'sqlalchemy.bindparam', 'bindparam', (['"""updated_at"""'], {}), "('updated_at')\n", (5505, 5519), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5573, 5602), 'sqlalchemy.bindparam', 'bindparam', (['"""state_updated_at"""'], {}), "('state_updated_at')\n", (5582, 5602), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5734, 5748), 'sqlalchemy.insert', 'insert', (['cls.sa'], {}), '(cls.sa)\n', (5740, 5748), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5825, 5840), 'sqlalchemy.bindparam', 'bindparam', (['"""id"""'], {}), "('id')\n", (5834, 5840), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5896, 5926), 'sqlalchemy.bindparam', 'bindparam', (['"""sub_expression_id"""'], {}), "('sub_expression_id')\n", (5905, 5926), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((5973, 5994), 'sqlalchemy.bindparam', 'bindparam', (['"""alarm_id"""'], {}), "('alarm_id')\n", (5982, 5994), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((6043, 6066), 'sqlalchemy.bindparam', 'bindparam', (['"""expression"""'], {}), "('expression')\n", (6052, 6066), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((6115, 6138), 'sqlalchemy.bindparam', 'bindparam', (['"""created_at"""'], {}), "('created_at')\n", (6124, 6138), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((6187, 6210), 'sqlalchemy.bindparam', 'bindparam', (['"""updated_at"""'], {}), "('updated_at')\n", (6196, 6210), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((6342, 6356), 'sqlalchemy.insert', 'insert', (['cls.am'], {}), '(cls.am)\n', (6348, 6356), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((6443, 6464), 'sqlalchemy.bindparam', 'bindparam', (['"""alarm_id"""'], {}), "('alarm_id')\n", (6452, 6464), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((6534, 6578), 'sqlalchemy.bindparam', 'bindparam', (['"""metric_definition_dimensions_id"""'], {}), "('metric_definition_dimensions_id')\n", (6543, 6578), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((6751, 6765), 'sqlalchemy.insert', 'insert', (['cls.md'], {}), '(cls.md)\n', (6757, 6765), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((6860, 6889), 'sqlalchemy.bindparam', 'bindparam', (['"""dimension_set_id"""'], {}), "('dimension_set_id')\n", (6869, 6889), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((6932, 6949), 'sqlalchemy.bindparam', 'bindparam', (['"""name"""'], {}), "('name')\n", (6941, 6949), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((6993, 7011), 'sqlalchemy.bindparam', 'bindparam', (['"""value"""'], {}), "('value')\n", (7002, 7011), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((7148, 7163), 'sqlalchemy.insert', 'insert', (['cls.mdd'], {}), '(cls.mdd)\n', (7154, 7163), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((7246, 7261), 'sqlalchemy.bindparam', 'bindparam', (['"""id"""'], {}), "('id')\n", (7255, 7261), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((7321, 7354), 'sqlalchemy.bindparam', 'bindparam', (['"""metric_definition_id"""'], {}), "('metric_definition_id')\n", (7330, 7354), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((7417, 7453), 'sqlalchemy.bindparam', 'bindparam', (['"""metric_dimension_set_id"""'], {}), "('metric_dimension_set_id')\n", (7426, 7453), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((7590, 7605), 'sqlalchemy.insert', 'insert', (['cls.mde'], {}), '(cls.mde)\n', (7596, 7605), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((7688, 7703), 'sqlalchemy.bindparam', 'bindparam', (['"""id"""'], {}), "('id')\n", (7697, 7703), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((7747, 7764), 'sqlalchemy.bindparam', 'bindparam', (['"""name"""'], {}), "('name')\n", (7756, 7764), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((7813, 7835), 'sqlalchemy.bindparam', 'bindparam', (['"""tenant_id"""'], {}), "('tenant_id')\n", (7822, 7835), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((7881, 7900), 'sqlalchemy.bindparam', 'bindparam', (['"""region"""'], {}), "('region')\n", (7890, 7900), False, 'from sqlalchemy import delete, MetaData, insert, bindparam\n'), ((10803, 10826), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (10824, 10826), False, 'import datetime\n'), ((10871, 10894), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (10892, 10894), False, 'import datetime\n'), ((11528, 11551), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (11549, 11551), False, 'import datetime\n'), ((11596, 11619), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (11617, 11619), False, 'import datetime\n'), ((13182, 13205), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (13203, 13205), False, 'import datetime\n'), ((13250, 13273), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (13271, 13273), False, 'import datetime\n'), ((13566, 13589), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (13587, 13589), False, 'import datetime\n'), ((13634, 13657), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (13655, 13657), False, 'import datetime\n'), ((14760, 14783), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (14781, 14783), False, 'import datetime\n'), ((14829, 14852), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (14850, 14852), False, 'import datetime\n'), ((15285, 15308), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (15306, 15308), False, 'import datetime\n'), ((15354, 15377), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (15375, 15377), False, 'import datetime\n'), ((15810, 15833), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (15831, 15833), False, 'import datetime\n'), ((15879, 15902), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (15900, 15902), False, 'import datetime\n'), ((16337, 16360), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (16358, 16360), False, 'import datetime\n'), ((16406, 16429), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (16427, 16429), False, 'import datetime\n'), ((16864, 16887), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (16885, 16887), False, 'import datetime\n'), ((16933, 16956), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (16954, 16956), False, 'import datetime\n'), ((17287, 17310), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (17308, 17310), False, 'import datetime\n'), ((17355, 17378), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (17376, 17378), False, 'import datetime\n'), ((17708, 17731), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (17729, 17731), False, 'import datetime\n'), ((17776, 17799), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (17797, 17799), False, 'import datetime\n'), ((18129, 18152), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (18150, 18152), False, 'import datetime\n'), ((18197, 18220), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (18218, 18220), False, 'import datetime\n'), ((37403, 37426), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (37424, 37426), False, 'import datetime\n')] |
"""ГОСТ Р 51777-2001 Кабели для установок погружных электронасосов.
Общие технические условия (с Поправкой) """
import math
from scipy.optimize import fsolve
# TODO реализовать нормально ГОСТ, отрефакторить, учитывать разные формы кабеля
# TODO толщины слоев сделать
# TODO рисунок кабеля при инициализации
class Cable():
def __init__(self):
# T - длительно допустимая температура нагрева жил кабеля, C
# Tср - температура окружающей среды
# У.T.С.Т. - удельное тепловое сопротивление теплоперехода
self.sigma_liquid__Ccm_V = 104 # У.Т.С.Т. от поверхности кабеля в воду и от воды к ОТ, C * см^ 2 /Вт
self.sigma_oil = 425 # У.Т.С.Т. от поверхности кабеля в скважинную жидкость (нефть) и от нефти к ОТ, C*см^ 2/Вт
self.sigma_gas = 1100 # У.Т.С. теплоизлучению от поверхности кабеля в воздушную среду
self.sigma_polyethylene_Ccm_V = 400 # У.Т.С. полиэтилена, композиции полипропилена и сополимеров пропилена
self.sigma_thermoplastic_elastomers_Ccm_V = 600 # У.Т.С. термоэластопласта
self.sigma_rubber_Ccm_V = 750 # У.Т.С. резины
self.sigma_fluorocopolymers_Ccm_V = 1000 # У.Т.С. фторсополимеров
self.sigma_braids_ribbons_Ccm_V = 650 # У.Т.С. материалов оплеток и лент для наложения бандажей и подушек
self.sigma_plumbum_Ccm_V = 3 # У.Т.С. свинца и его сплавов
self.sigma_1isolation_Ccm_V = self.sigma_polyethylene_Ccm_V # удельное сопротивеление изоляции на C*см/Вт
self.sigma_2isolation_Ccm_V = self.sigma_polyethylene_Ccm_V
self.sigma_shell_Ccm_V = self.sigma_polyethylene_Ccm_V
self.sigma_bandage_Ccm_V = self.sigma_braids_ribbons_Ccm_V
self.sigma_pillow_Ccm_V = self.sigma_braids_ribbons_Ccm_V
self.sigma_o = 750 # материала оболочки
self.sigma_b = 1000 # материала бандажа поверх оболочки
self.sigma_p = 3 # материала подушки под броней
self.t_permanently_permissible_c = 120 # длительно допустимая температура нагрева жилы
self.R = 1.15 # электрическое сопротивление токопроводящей жилы
self.d_mm = 4.5 # номинальный диаметр токопроводящей жилы, мм
self.d1_first_isolation_mm = 7.5 # номинальный наружный диаметр первого слоя изоляции жилы, мм
self.d2_second_isolation_mm = 7.5 # номинальный наружный диаметр второго слоя изоляции жилы, мм
self.do_shell_mm = 10 # номинальный диаметр оболочки жилы, мм
self.db_bandage_mm = 11 # номинальный наружный диаметр бандажа поверх оболочки жилы
self.Dc_twist_mm = 20 # номинальный диаметр по скрутке жил, мм
self.Dp_pillow_mm = 12 # номинальный наружный диаметр подушки под броней
self.D_round_cable_mm = 30 # максимальный наружный диаметр круглого кабеля
# максимальные наружные размеры плоского кабеля
self.H_flat_cable_mm = 12.5 # толщина
self.B_flat_cable_mm = 36 # ширина
self.di_casing_mm = 120 # внутренний диаметр обсадной трубы скважины
self.alpha_1C = 0.0038 # температурный коэффициент электрического сопротивления материала
# токопроводящей жилы, С-1
self.cabel_type = 'Round' # Или 'Flat'
self.environment_type = 'Oil' # в нефти , 'Water' - в воде
def __thermal_resistance_cable__(self):
"""Расчет теплового сопротивления кабеля"""
result = (1 / 6 / math.pi *(self.sigma_1isolation_Ccm_V * math.log(self.d1_first_isolation_mm / self.d_mm) +
self.sigma_shell_Ccm_V * math.log(self.do_shell_mm / self.d1_first_isolation_mm ) +
self.sigma_bandage_Ccm_V * math.log(self.db_bandage_mm / self.do_shell_mm)) +
self.sigma_pillow_Ccm_V / 2 / math.pi * math.log( self.D_round_cable_mm/ self.Dc_twist_mm)) # TODO проверить диаметр подушки
return result
def __thermal_resistance_environment__(self):
"""Расчет теплового сопротивления окружающей среды"""
# Тепловое сопротивление по Б.2.2.1 в скважинной жидкости нефтяной скважины
if self.cabel_type == 'Round' and self.environment_type == 'Oil':
return (1 / 2 / math.pi * 10 * (self.sigma_oil *
(1 / self.D_round_cable_mm + 1 / self.di_casing_mm) +
self.sigma_gas / self.D_round_cable_mm))
if self.cabel_type == 'Flat' and self.environment_type == 'Oil':
return (1 / 2 * 10 * ( self.sigma_oil * (1 / (1.14 * self.H_flat_cable_mm + 2 * self.B_flat_cable_mm ) +
1 / math.pi / self.di_casing_mm) +
self.sigma_gas / (1.14 * self.H_flat_cable_mm + 2 * self.B_flat_cable_mm ) ))
def __electricial_resistance_cable_core__(self, R, t, alpha):
"""Расчет электрического сопротивления жилы кабеля"""
# электрическое сопротивление токопроводящей жилы, Ом
result = R * (1 + alpha * (t - 20))
return result
def __calc_i_a__(self, t, t_env, s_c, s_env, rt):
"""Расчет длительно допустимого тока"""
# длительно допустимый ток I, A
result = ((t - t_env) * 10 ** 5 / 3 / (s_c + s_env) / rt) ** (1 / 2)
return result
def __t_cabel_c__(self, tf_c, i, rt, s_cable, s_env):
"""Температура кабеля"""
result = (i ** 2) * (s_cable + s_env) * rt * 3 / 10 ** 5 + tf_c
return result
def calc_t_max_cable_c(self, tf_c, i_a):
"""
Расчет температуры кабеля
:param tf_c: Температура среды, С
:param i_a: Ток жилы кабеля, А
:return: температуры кабеля, С
"""
delta0 = tf_c * 0 + 10 # начальное приближение
def calc_temp_cable(val_t_cabel1):
s_c_val = self.__thermal_resistance_cable__()
s_env_val = self.__thermal_resistance_environment__()
rt_val = self.__electricial_resistance_cable_core__(self.R, val_t_cabel1, self.alpha_1C)
val_t_cabel2 = self.__t_cabel_c__(tf_c, i_a, rt_val, s_c_val, s_env_val)
return val_t_cabel2 - val_t_cabel1
result = fsolve(calc_temp_cable, delta0) # находит такое val_t_cabel1, при котором calc_temp_cable = 0
return result
def calc_i_max_a(self, t_max_c, t_env_c):
"""
Расчета максимально допустимой длительной силы тока кабеля
:param t_max_c: температурный индекс кабеля, максимальная температуры нагрева жил кабеля, С
:param t_env_c: температура среды
:return: длительно допустимый ток, А
"""
self.t_permanently_permissible_c = t_max_c
self.t_env_c = t_env_c
s_c_val = self.__thermal_resistance_cable__()
s_env_val = self.__thermal_resistance_environment__()
rt_val = self.__electricial_resistance_cable_core__(self.R, self.t_permanently_permissible_c, self.alpha_1C)
return self.__calc_i_a__(self.t_permanently_permissible_c, self.t_env_c, s_c_val, s_env_val, rt_val)
| [
"scipy.optimize.fsolve",
"math.log"
] | [((6163, 6194), 'scipy.optimize.fsolve', 'fsolve', (['calc_temp_cable', 'delta0'], {}), '(calc_temp_cable, delta0)\n', (6169, 6194), False, 'from scipy.optimize import fsolve\n'), ((3764, 3814), 'math.log', 'math.log', (['(self.D_round_cable_mm / self.Dc_twist_mm)'], {}), '(self.D_round_cable_mm / self.Dc_twist_mm)\n', (3772, 3814), False, 'import math\n'), ((3655, 3702), 'math.log', 'math.log', (['(self.db_bandage_mm / self.do_shell_mm)'], {}), '(self.db_bandage_mm / self.do_shell_mm)\n', (3663, 3702), False, 'import math\n'), ((3422, 3470), 'math.log', 'math.log', (['(self.d1_first_isolation_mm / self.d_mm)'], {}), '(self.d1_first_isolation_mm / self.d_mm)\n', (3430, 3470), False, 'import math\n'), ((3533, 3588), 'math.log', 'math.log', (['(self.do_shell_mm / self.d1_first_isolation_mm)'], {}), '(self.do_shell_mm / self.d1_first_isolation_mm)\n', (3541, 3588), False, 'import math\n')] |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
import vplot
import scipy.signal as sig
#plt.rcParams["text.usetex"]=True
#plt.rcParams["text.latex.unicode"]=True
plt.rcParams.update({'font.size':16,'legend.fontsize':15})
import sys
# Check correct number of arguments
if (len(sys.argv) != 2):
print('ERROR: Incorrect number of arguments.')
print('Usage: '+sys.argv[0]+' <pdf | png>')
exit(1)
if (sys.argv[1] != 'pdf' and sys.argv[1] != 'png'):
print('ERROR: Unknown file format: '+sys.argv[1])
print('Options are: pdf, png')
exit(1)
out = vplot.GetOutput()
# Print final state
#print('Final: t=%.3f TUMan=%f TMan=%f TCMB=%f TCore=%f HflowUMan=%.1f HflowCMB=%.1f RadPowerTotal=%f RadPowerMan=%.1f RadPowerCore=%.1f MagMom=%f RIC=%f'%(out.earth.Time[-1],out.earth.TUMan[-1],out.earth.TMan[-1],out.earth.TCMB[-1],out.earth.TCore[-1],out.earth.HflowUMan[-1],out.earth.HflowCMB[-1],out.earth.RadPowerTotal[-1],out.earth.RadPowerMan[-1],out.earth.RadPowerCore[-1],out.earth.MagMom[-1],out.earth.RIC[-1]))
### Uncertainty ranges
TUMan_ra = np.array([1280.,1475.])+273. #[K] Jaupart (2015) Table 4.
TCMB_ra = np.array([3800,4200.]) #[K] Hirose (2013) Table 2.
HflowUMan_ra = np.array([35,41.]) #[TW] Jaupart (2015) Table 12.
HflowUMan_ra = np.array([35,41.]) #[TW] Jaupart (2015) Table 12.
HflowCMB_ra = np.array([5,17]) #[TW] Jaupart (2015) Table 12.
ViscUMan_ra = np.array([1.5e19,1.5e22])/3300. #[m^2/s] Paulson (2005) Fig 3.
ViscLMan_ra = np.array([3e19,1.5e22])/5200. #[m^2/s] Paulson (2005) Fig 3.
MeltMassFlux_ra = np.array([0.52,4*.52]) #[1e6 kg/s] Cogne (2004) 5-15 km^3/yr. Li (2015) ~20 km^3/yr
FMeltUMan_ra = np.array([0.07,0.15]) # refs?
### Hi/lo
TUMan_lo = np.abs(TUMan_ra[0]-out.earth.TUMan[-1])
TUMan_hi = np.abs(TUMan_ra[1]-out.earth.TUMan[-1])
TCMB_lo = np.abs(TCMB_ra[0]-out.earth.TCMB[-1])
TCMB_hi = np.abs(TCMB_ra[1]-out.earth.TCMB[-1])
HflowUMan_lo = np.abs(HflowUMan_ra[0]-out.earth.HflowUMan[-1])
HflowUMan_hi = np.abs(HflowUMan_ra[1]-out.earth.HflowUMan[-1])
HflowCMB_lo = np.abs(HflowCMB_ra[0]-out.earth.HflowCMB[-1])
HflowCMB_hi = np.abs(HflowCMB_ra[1]-out.earth.HflowCMB[-1])
ViscUMan_lo = np.abs(ViscUMan_ra[0]-out.earth.ViscUMan[-1])
ViscUMan_hi = np.abs(ViscUMan_ra[1]-out.earth.ViscUMan[-1])
ViscLMan_lo = np.abs(ViscLMan_ra[0]-out.earth.ViscLMan[-1])
ViscLMan_hi = np.abs(ViscLMan_ra[1]-out.earth.ViscLMan[-1])
MeltMassFlux_lo = np.abs(MeltMassFlux_ra[0]-out.earth.MeltMassFluxMan[-1]*1e-6)
MeltMassFlux_hi = np.abs(MeltMassFlux_ra[1]-out.earth.MeltMassFluxMan[-1]*1e-6)
FMeltUMan_lo = np.abs(FMeltUMan_ra[0]-out.earth.FMeltUMan[-1])
FMeltUMan_hi = np.abs(FMeltUMan_ra[1]-out.earth.FMeltUMan[-1])
# Plots
rows=3
cols=2
# Mantle Figure
nfig=1
fig = plt.figure(nfig, figsize=(10,15))
panel=1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.TMan,color=vplot.colors.red,linestyle='-',label=r'$T_{M}$')
plt.plot(out.earth.Time,out.earth.TUMan,color=vplot.colors.orange,linestyle='-',label=r'$T_{UM}$')
plt.errorbar(out.earth.Time[-1],out.earth.TUMan[-1],yerr=[[TUMan_lo],[TUMan_hi]],color=vplot.colors.orange,fmt='o')
plt.plot(out.earth.Time,out.earth.TLMan,color=vplot.colors.pale_blue,linestyle='-',label=r'$T_{LM}$')
plt.plot(out.earth.Time,out.earth.TCMB,color=vplot.colors.purple,linestyle='-',label=r'$T_{CMB}$')
plt.errorbar(out.earth.Time[-1],out.earth.TCMB[-1],yerr=[[TCMB_lo],[TCMB_hi]],color=vplot.colors.purple,fmt='-o')
plt.plot(out.earth.Time,out.earth.TCore,'k-',label=r'$T_{C}$')
plt.legend(loc='best',ncol=2,frameon=True,columnspacing=1)
plt.ylabel('Temperature (K)')
plt.xlabel('Time (Gyr)')
plt.ylim(0,10000)
plt.xlim(0,4.6)
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.HflowUMan,color=vplot.colors.red,linestyle='-',label=r'$Q_{UMan}$')
plt.errorbar(out.earth.Time[-1],out.earth.HflowUMan[-1],yerr=[[HflowUMan_lo],[HflowUMan_hi]],color=vplot.colors.red,fmt='o')
plt.plot(out.earth.Time,out.earth.HflowCMB,color=vplot.colors.orange,linestyle='-',label=r'$Q_{CMB}$')
plt.errorbar(out.earth.Time[-1],out.earth.HflowCMB[-1],yerr=[[HflowCMB_lo],[HflowCMB_hi]],color=vplot.colors.orange,fmt='o')
plt.plot(out.earth.Time,out.earth.RadPowerMan,color=vplot.colors.pale_blue,linestyle='-',label=r'$Q_{Rad,Man}$')
plt.plot(out.earth.Time,out.earth.RadPowerCore,'k-',label=r'$Q_{Rad,Core}$')
plt.legend(loc='best',frameon=True)
plt.ylabel('Heat Flow (TW)')
plt.xlabel('Time (Gyr)')
plt.ylim(0,150)
plt.xlim(0,4.6)
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.BLUMan,label=r'$\delta_{UM}$',color=vplot.colors.dark_blue)
plt.plot(out.earth.Time,out.earth.BLLMan,label=r'$\delta_{LM}$',color=vplot.colors.orange)
plt.legend(loc='best',frameon=True)
plt.ylabel(r'Boundary Layer Depths (km)')
plt.xlabel('Time (Gyr)')
plt.xlim(0,4.6)
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.semilogy(out.earth.Time,out.earth.ViscUMan,label=r'$\nu_{UM}$',color=vplot.colors.dark_blue)
plt.errorbar(out.earth.Time[-1],out.earth.ViscUMan[-1],yerr=[[ViscUMan_lo],[ViscUMan_hi]],color=vplot.colors.dark_blue,fmt='o')
plt.semilogy(out.earth.Time,out.earth.ViscLMan,label=r'$\nu_{LM}$',color=vplot.colors.orange)
plt.errorbar(out.earth.Time[-1],out.earth.ViscLMan[-1],yerr=[[ViscLMan_lo],[ViscLMan_hi]],color=vplot.colors.orange,fmt='o')
plt.legend(loc='best',frameon=True)
plt.ylabel(r'Mantle Viscosity ($m^2s^{-1}$)')
plt.xlabel('Time (Gyr)')
plt.ylim(1e12,1e19)
plt.xlim(0,4.6)
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.FMeltUMan,color=vplot.colors.dark_blue)
plt.errorbar(out.earth.Time[-1],out.earth.FMeltUMan[-1]*1e-6,yerr=[[FMeltUMan_lo],[FMeltUMan_hi]],color=vplot.colors.dark_blue,fmt='o')
plt.ylabel(r'Melt Fraction Upper Mantle (n.d.)')
plt.xlabel('Time (Gyr)')
plt.xlim(0,4.6)
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.MeltMassFluxMan*1e-6,color=vplot.colors.dark_blue)
plt.errorbar(out.earth.Time[-1],out.earth.MeltMassFluxMan[-1]*1e-6,yerr=[[MeltMassFlux_lo],[MeltMassFlux_hi]],color=vplot.colors.dark_blue,fmt='o')
plt.ylabel(r'Melt Mass Flux Mantle ($\times 10^6$ kg$/$s)')
plt.xlabel('Time (Gyr)')
plt.ylim(0,100)
plt.xlim(0,4.6)
plt.xticks([0,1,2,3,4])
vplot.make_pretty(fig)
if (sys.argv[1] == 'pdf'):
plt.savefig('EarthInterior%d.pdf'%nfig)
if (sys.argv[1] == 'png'):
plt.savefig('EarthInterior%d.png'%nfig)
# Core Plots
rows=2
nfig += 1
fig = plt.figure(nfig, figsize=(10,10))
panel = 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.RIC,label='RIC')
plt.ylim(0,1500)
plt.ylabel(r'Inner Core Radius (km)')
plt.xlabel('Time (Gyr)')
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.CoreBuoyTherm*1e13,label='Thermal')
plt.plot(out.earth.Time,out.earth.CoreBuoyCompo*1e13,label='Compositional')
plt.plot(out.earth.Time,out.earth.CoreBuoyTotal*1e13,label='Total')
plt.legend(loc='best',frameon=True)
plt.ylabel(r'Core Buoyancy Flux ($\times10^{-13}$ m$^2/$s$^3$)')
plt.xlabel('Time (Gyr)')
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.MagMom,label='MagMom')
plt.ylim(0,2)
plt.ylabel('Magnetic Moment (E. Units)')
plt.xlabel('Time (Gyr)')
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.MagPauseRad)
plt.ylabel(r'Magnetopause Radius (E. Units)')
plt.xlabel('Time (Gyr)')
plt.xticks([0,1,2,3,4])
#panel += 1
#plt.subplot(rows,cols,panel)
#plt.plot(out.earth.Time,out.earth.ChiOC,label='ChiOC')
#plt.plot(out.earth.Time,out.earth.ChiIC,label='ChiIC')
#plt.ylim(0,0.2)
#plt.ylabel(r'Core Light Element Concentration')
#plt.xlabel('Time (Gyr)')
#plt.legend(loc='best',frameon=False)
vplot.make_pretty(fig)
if (sys.argv[1] == 'pdf'):
plt.savefig('EarthInterior%d.pdf'%nfig)
if (sys.argv[1] == 'png'):
plt.savefig('EarthInterior%d.png'%nfig)
plt.close()
| [
"numpy.abs",
"matplotlib.pyplot.semilogy",
"vplot.make_pretty",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.close",
"matplotlib.pyplot.rcParams.update",
"numpy.array",
"matplotlib.... | [((191, 252), 'matplotlib.pyplot.rcParams.update', 'plt.rcParams.update', (["{'font.size': 16, 'legend.fontsize': 15}"], {}), "({'font.size': 16, 'legend.fontsize': 15})\n", (210, 252), True, 'import matplotlib.pyplot as plt\n'), ((595, 612), 'vplot.GetOutput', 'vplot.GetOutput', ([], {}), '()\n', (610, 612), False, 'import vplot\n'), ((1165, 1189), 'numpy.array', 'np.array', (['[3800, 4200.0]'], {}), '([3800, 4200.0])\n', (1173, 1189), True, 'import numpy as np\n'), ((1245, 1265), 'numpy.array', 'np.array', (['[35, 41.0]'], {}), '([35, 41.0])\n', (1253, 1265), True, 'import numpy as np\n'), ((1323, 1343), 'numpy.array', 'np.array', (['[35, 41.0]'], {}), '([35, 41.0])\n', (1331, 1343), True, 'import numpy as np\n'), ((1401, 1418), 'numpy.array', 'np.array', (['[5, 17]'], {}), '([5, 17])\n', (1409, 1418), True, 'import numpy as np\n'), ((1640, 1666), 'numpy.array', 'np.array', (['[0.52, 4 * 0.52]'], {}), '([0.52, 4 * 0.52])\n', (1648, 1666), True, 'import numpy as np\n'), ((1747, 1769), 'numpy.array', 'np.array', (['[0.07, 0.15]'], {}), '([0.07, 0.15])\n', (1755, 1769), True, 'import numpy as np\n'), ((1810, 1851), 'numpy.abs', 'np.abs', (['(TUMan_ra[0] - out.earth.TUMan[-1])'], {}), '(TUMan_ra[0] - out.earth.TUMan[-1])\n', (1816, 1851), True, 'import numpy as np\n'), ((1861, 1902), 'numpy.abs', 'np.abs', (['(TUMan_ra[1] - out.earth.TUMan[-1])'], {}), '(TUMan_ra[1] - out.earth.TUMan[-1])\n', (1867, 1902), True, 'import numpy as np\n'), ((1911, 1950), 'numpy.abs', 'np.abs', (['(TCMB_ra[0] - out.earth.TCMB[-1])'], {}), '(TCMB_ra[0] - out.earth.TCMB[-1])\n', (1917, 1950), True, 'import numpy as np\n'), ((1959, 1998), 'numpy.abs', 'np.abs', (['(TCMB_ra[1] - out.earth.TCMB[-1])'], {}), '(TCMB_ra[1] - out.earth.TCMB[-1])\n', (1965, 1998), True, 'import numpy as np\n'), ((2012, 2061), 'numpy.abs', 'np.abs', (['(HflowUMan_ra[0] - out.earth.HflowUMan[-1])'], {}), '(HflowUMan_ra[0] - out.earth.HflowUMan[-1])\n', (2018, 2061), True, 'import numpy as np\n'), ((2075, 2124), 'numpy.abs', 'np.abs', (['(HflowUMan_ra[1] - out.earth.HflowUMan[-1])'], {}), '(HflowUMan_ra[1] - out.earth.HflowUMan[-1])\n', (2081, 2124), True, 'import numpy as np\n'), ((2137, 2184), 'numpy.abs', 'np.abs', (['(HflowCMB_ra[0] - out.earth.HflowCMB[-1])'], {}), '(HflowCMB_ra[0] - out.earth.HflowCMB[-1])\n', (2143, 2184), True, 'import numpy as np\n'), ((2197, 2244), 'numpy.abs', 'np.abs', (['(HflowCMB_ra[1] - out.earth.HflowCMB[-1])'], {}), '(HflowCMB_ra[1] - out.earth.HflowCMB[-1])\n', (2203, 2244), True, 'import numpy as np\n'), ((2257, 2304), 'numpy.abs', 'np.abs', (['(ViscUMan_ra[0] - out.earth.ViscUMan[-1])'], {}), '(ViscUMan_ra[0] - out.earth.ViscUMan[-1])\n', (2263, 2304), True, 'import numpy as np\n'), ((2317, 2364), 'numpy.abs', 'np.abs', (['(ViscUMan_ra[1] - out.earth.ViscUMan[-1])'], {}), '(ViscUMan_ra[1] - out.earth.ViscUMan[-1])\n', (2323, 2364), True, 'import numpy as np\n'), ((2377, 2424), 'numpy.abs', 'np.abs', (['(ViscLMan_ra[0] - out.earth.ViscLMan[-1])'], {}), '(ViscLMan_ra[0] - out.earth.ViscLMan[-1])\n', (2383, 2424), True, 'import numpy as np\n'), ((2437, 2484), 'numpy.abs', 'np.abs', (['(ViscLMan_ra[1] - out.earth.ViscLMan[-1])'], {}), '(ViscLMan_ra[1] - out.earth.ViscLMan[-1])\n', (2443, 2484), True, 'import numpy as np\n'), ((2501, 2567), 'numpy.abs', 'np.abs', (['(MeltMassFlux_ra[0] - out.earth.MeltMassFluxMan[-1] * 1e-06)'], {}), '(MeltMassFlux_ra[0] - out.earth.MeltMassFluxMan[-1] * 1e-06)\n', (2507, 2567), True, 'import numpy as np\n'), ((2581, 2647), 'numpy.abs', 'np.abs', (['(MeltMassFlux_ra[1] - out.earth.MeltMassFluxMan[-1] * 1e-06)'], {}), '(MeltMassFlux_ra[1] - out.earth.MeltMassFluxMan[-1] * 1e-06)\n', (2587, 2647), True, 'import numpy as np\n'), ((2658, 2707), 'numpy.abs', 'np.abs', (['(FMeltUMan_ra[0] - out.earth.FMeltUMan[-1])'], {}), '(FMeltUMan_ra[0] - out.earth.FMeltUMan[-1])\n', (2664, 2707), True, 'import numpy as np\n'), ((2721, 2770), 'numpy.abs', 'np.abs', (['(FMeltUMan_ra[1] - out.earth.FMeltUMan[-1])'], {}), '(FMeltUMan_ra[1] - out.earth.FMeltUMan[-1])\n', (2727, 2770), True, 'import numpy as np\n'), ((2821, 2855), 'matplotlib.pyplot.figure', 'plt.figure', (['nfig'], {'figsize': '(10, 15)'}), '(nfig, figsize=(10, 15))\n', (2831, 2855), True, 'import matplotlib.pyplot as plt\n'), ((2863, 2893), 'matplotlib.pyplot.subplot', 'plt.subplot', (['rows', 'cols', 'panel'], {}), '(rows, cols, panel)\n', (2874, 2893), True, 'import matplotlib.pyplot as plt\n'), ((2892, 2993), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.TMan'], {'color': 'vplot.colors.red', 'linestyle': '"""-"""', 'label': '"""$T_{M}$"""'}), "(out.earth.Time, out.earth.TMan, color=vplot.colors.red, linestyle=\n '-', label='$T_{M}$')\n", (2900, 2993), True, 'import matplotlib.pyplot as plt\n'), ((2986, 3091), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.TUMan'], {'color': 'vplot.colors.orange', 'linestyle': '"""-"""', 'label': '"""$T_{UM}$"""'}), "(out.earth.Time, out.earth.TUMan, color=vplot.colors.orange,\n linestyle='-', label='$T_{UM}$')\n", (2994, 3091), True, 'import matplotlib.pyplot as plt\n'), ((3085, 3210), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['out.earth.Time[-1]', 'out.earth.TUMan[-1]'], {'yerr': '[[TUMan_lo], [TUMan_hi]]', 'color': 'vplot.colors.orange', 'fmt': '"""o"""'}), "(out.earth.Time[-1], out.earth.TUMan[-1], yerr=[[TUMan_lo], [\n TUMan_hi]], color=vplot.colors.orange, fmt='o')\n", (3097, 3210), True, 'import matplotlib.pyplot as plt\n'), ((3201, 3309), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.TLMan'], {'color': 'vplot.colors.pale_blue', 'linestyle': '"""-"""', 'label': '"""$T_{LM}$"""'}), "(out.earth.Time, out.earth.TLMan, color=vplot.colors.pale_blue,\n linestyle='-', label='$T_{LM}$')\n", (3209, 3309), True, 'import matplotlib.pyplot as plt\n'), ((3303, 3408), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.TCMB'], {'color': 'vplot.colors.purple', 'linestyle': '"""-"""', 'label': '"""$T_{CMB}$"""'}), "(out.earth.Time, out.earth.TCMB, color=vplot.colors.purple,\n linestyle='-', label='$T_{CMB}$')\n", (3311, 3408), True, 'import matplotlib.pyplot as plt\n'), ((3402, 3525), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['out.earth.Time[-1]', 'out.earth.TCMB[-1]'], {'yerr': '[[TCMB_lo], [TCMB_hi]]', 'color': 'vplot.colors.purple', 'fmt': '"""-o"""'}), "(out.earth.Time[-1], out.earth.TCMB[-1], yerr=[[TCMB_lo], [\n TCMB_hi]], color=vplot.colors.purple, fmt='-o')\n", (3414, 3525), True, 'import matplotlib.pyplot as plt\n'), ((3516, 3580), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.TCore', '"""k-"""'], {'label': '"""$T_{C}$"""'}), "(out.earth.Time, out.earth.TCore, 'k-', label='$T_{C}$')\n", (3524, 3580), True, 'import matplotlib.pyplot as plt\n'), ((3579, 3640), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""', 'ncol': '(2)', 'frameon': '(True)', 'columnspacing': '(1)'}), "(loc='best', ncol=2, frameon=True, columnspacing=1)\n", (3589, 3640), True, 'import matplotlib.pyplot as plt\n'), ((3638, 3667), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Temperature (K)"""'], {}), "('Temperature (K)')\n", (3648, 3667), True, 'import matplotlib.pyplot as plt\n'), ((3668, 3692), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (Gyr)"""'], {}), "('Time (Gyr)')\n", (3678, 3692), True, 'import matplotlib.pyplot as plt\n'), ((3693, 3711), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', '(10000)'], {}), '(0, 10000)\n', (3701, 3711), True, 'import matplotlib.pyplot as plt\n'), ((3711, 3727), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(4.6)'], {}), '(0, 4.6)\n', (3719, 3727), True, 'import matplotlib.pyplot as plt\n'), ((3727, 3754), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 1, 2, 3, 4]'], {}), '([0, 1, 2, 3, 4])\n', (3737, 3754), True, 'import matplotlib.pyplot as plt\n'), ((3762, 3792), 'matplotlib.pyplot.subplot', 'plt.subplot', (['rows', 'cols', 'panel'], {}), '(rows, cols, panel)\n', (3773, 3792), True, 'import matplotlib.pyplot as plt\n'), ((3791, 3899), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.HflowUMan'], {'color': 'vplot.colors.red', 'linestyle': '"""-"""', 'label': '"""$Q_{UMan}$"""'}), "(out.earth.Time, out.earth.HflowUMan, color=vplot.colors.red,\n linestyle='-', label='$Q_{UMan}$')\n", (3799, 3899), True, 'import matplotlib.pyplot as plt\n'), ((3893, 4027), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['out.earth.Time[-1]', 'out.earth.HflowUMan[-1]'], {'yerr': '[[HflowUMan_lo], [HflowUMan_hi]]', 'color': 'vplot.colors.red', 'fmt': '"""o"""'}), "(out.earth.Time[-1], out.earth.HflowUMan[-1], yerr=[[\n HflowUMan_lo], [HflowUMan_hi]], color=vplot.colors.red, fmt='o')\n", (3905, 4027), True, 'import matplotlib.pyplot as plt\n'), ((4018, 4127), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.HflowCMB'], {'color': 'vplot.colors.orange', 'linestyle': '"""-"""', 'label': '"""$Q_{CMB}$"""'}), "(out.earth.Time, out.earth.HflowCMB, color=vplot.colors.orange,\n linestyle='-', label='$Q_{CMB}$')\n", (4026, 4127), True, 'import matplotlib.pyplot as plt\n'), ((4121, 4255), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['out.earth.Time[-1]', 'out.earth.HflowCMB[-1]'], {'yerr': '[[HflowCMB_lo], [HflowCMB_hi]]', 'color': 'vplot.colors.orange', 'fmt': '"""o"""'}), "(out.earth.Time[-1], out.earth.HflowCMB[-1], yerr=[[HflowCMB_lo\n ], [HflowCMB_hi]], color=vplot.colors.orange, fmt='o')\n", (4133, 4255), True, 'import matplotlib.pyplot as plt\n'), ((4246, 4366), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.RadPowerMan'], {'color': 'vplot.colors.pale_blue', 'linestyle': '"""-"""', 'label': '"""$Q_{Rad,Man}$"""'}), "(out.earth.Time, out.earth.RadPowerMan, color=vplot.colors.\n pale_blue, linestyle='-', label='$Q_{Rad,Man}$')\n", (4254, 4366), True, 'import matplotlib.pyplot as plt\n'), ((4359, 4437), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.RadPowerCore', '"""k-"""'], {'label': '"""$Q_{Rad,Core}$"""'}), "(out.earth.Time, out.earth.RadPowerCore, 'k-', label='$Q_{Rad,Core}$')\n", (4367, 4437), True, 'import matplotlib.pyplot as plt\n'), ((4436, 4472), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""', 'frameon': '(True)'}), "(loc='best', frameon=True)\n", (4446, 4472), True, 'import matplotlib.pyplot as plt\n'), ((4472, 4500), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Heat Flow (TW)"""'], {}), "('Heat Flow (TW)')\n", (4482, 4500), True, 'import matplotlib.pyplot as plt\n'), ((4501, 4525), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (Gyr)"""'], {}), "('Time (Gyr)')\n", (4511, 4525), True, 'import matplotlib.pyplot as plt\n'), ((4526, 4542), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', '(150)'], {}), '(0, 150)\n', (4534, 4542), True, 'import matplotlib.pyplot as plt\n'), ((4542, 4558), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(4.6)'], {}), '(0, 4.6)\n', (4550, 4558), True, 'import matplotlib.pyplot as plt\n'), ((4558, 4585), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 1, 2, 3, 4]'], {}), '([0, 1, 2, 3, 4])\n', (4568, 4585), True, 'import matplotlib.pyplot as plt\n'), ((4593, 4623), 'matplotlib.pyplot.subplot', 'plt.subplot', (['rows', 'cols', 'panel'], {}), '(rows, cols, panel)\n', (4604, 4623), True, 'import matplotlib.pyplot as plt\n'), ((4622, 4723), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.BLUMan'], {'label': '"""$\\\\delta_{UM}$"""', 'color': 'vplot.colors.dark_blue'}), "(out.earth.Time, out.earth.BLUMan, label='$\\\\delta_{UM}$', color=\n vplot.colors.dark_blue)\n", (4630, 4723), True, 'import matplotlib.pyplot as plt\n'), ((4716, 4814), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.BLLMan'], {'label': '"""$\\\\delta_{LM}$"""', 'color': 'vplot.colors.orange'}), "(out.earth.Time, out.earth.BLLMan, label='$\\\\delta_{LM}$', color=\n vplot.colors.orange)\n", (4724, 4814), True, 'import matplotlib.pyplot as plt\n'), ((4807, 4843), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""', 'frameon': '(True)'}), "(loc='best', frameon=True)\n", (4817, 4843), True, 'import matplotlib.pyplot as plt\n'), ((4843, 4883), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Boundary Layer Depths (km)"""'], {}), "('Boundary Layer Depths (km)')\n", (4853, 4883), True, 'import matplotlib.pyplot as plt\n'), ((4885, 4909), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (Gyr)"""'], {}), "('Time (Gyr)')\n", (4895, 4909), True, 'import matplotlib.pyplot as plt\n'), ((4910, 4926), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(4.6)'], {}), '(0, 4.6)\n', (4918, 4926), True, 'import matplotlib.pyplot as plt\n'), ((4926, 4953), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 1, 2, 3, 4]'], {}), '([0, 1, 2, 3, 4])\n', (4936, 4953), True, 'import matplotlib.pyplot as plt\n'), ((4961, 4991), 'matplotlib.pyplot.subplot', 'plt.subplot', (['rows', 'cols', 'panel'], {}), '(rows, cols, panel)\n', (4972, 4991), True, 'import matplotlib.pyplot as plt\n'), ((4990, 5094), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (['out.earth.Time', 'out.earth.ViscUMan'], {'label': '"""$\\\\nu_{UM}$"""', 'color': 'vplot.colors.dark_blue'}), "(out.earth.Time, out.earth.ViscUMan, label='$\\\\nu_{UM}$', color\n =vplot.colors.dark_blue)\n", (5002, 5094), True, 'import matplotlib.pyplot as plt\n'), ((5087, 5224), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['out.earth.Time[-1]', 'out.earth.ViscUMan[-1]'], {'yerr': '[[ViscUMan_lo], [ViscUMan_hi]]', 'color': 'vplot.colors.dark_blue', 'fmt': '"""o"""'}), "(out.earth.Time[-1], out.earth.ViscUMan[-1], yerr=[[ViscUMan_lo\n ], [ViscUMan_hi]], color=vplot.colors.dark_blue, fmt='o')\n", (5099, 5224), True, 'import matplotlib.pyplot as plt\n'), ((5215, 5316), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (['out.earth.Time', 'out.earth.ViscLMan'], {'label': '"""$\\\\nu_{LM}$"""', 'color': 'vplot.colors.orange'}), "(out.earth.Time, out.earth.ViscLMan, label='$\\\\nu_{LM}$', color\n =vplot.colors.orange)\n", (5227, 5316), True, 'import matplotlib.pyplot as plt\n'), ((5309, 5443), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['out.earth.Time[-1]', 'out.earth.ViscLMan[-1]'], {'yerr': '[[ViscLMan_lo], [ViscLMan_hi]]', 'color': 'vplot.colors.orange', 'fmt': '"""o"""'}), "(out.earth.Time[-1], out.earth.ViscLMan[-1], yerr=[[ViscLMan_lo\n ], [ViscLMan_hi]], color=vplot.colors.orange, fmt='o')\n", (5321, 5443), True, 'import matplotlib.pyplot as plt\n'), ((5434, 5470), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""', 'frameon': '(True)'}), "(loc='best', frameon=True)\n", (5444, 5470), True, 'import matplotlib.pyplot as plt\n'), ((5470, 5514), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Mantle Viscosity ($m^2s^{-1}$)"""'], {}), "('Mantle Viscosity ($m^2s^{-1}$)')\n", (5480, 5514), True, 'import matplotlib.pyplot as plt\n'), ((5516, 5540), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (Gyr)"""'], {}), "('Time (Gyr)')\n", (5526, 5540), True, 'import matplotlib.pyplot as plt\n'), ((5541, 5573), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(1000000000000.0)', '(1e+19)'], {}), '(1000000000000.0, 1e+19)\n', (5549, 5573), True, 'import matplotlib.pyplot as plt\n'), ((5561, 5577), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(4.6)'], {}), '(0, 4.6)\n', (5569, 5577), True, 'import matplotlib.pyplot as plt\n'), ((5577, 5604), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 1, 2, 3, 4]'], {}), '([0, 1, 2, 3, 4])\n', (5587, 5604), True, 'import matplotlib.pyplot as plt\n'), ((5612, 5642), 'matplotlib.pyplot.subplot', 'plt.subplot', (['rows', 'cols', 'panel'], {}), '(rows, cols, panel)\n', (5623, 5642), True, 'import matplotlib.pyplot as plt\n'), ((5641, 5716), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.FMeltUMan'], {'color': 'vplot.colors.dark_blue'}), '(out.earth.Time, out.earth.FMeltUMan, color=vplot.colors.dark_blue)\n', (5649, 5716), True, 'import matplotlib.pyplot as plt\n'), ((5715, 5863), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['out.earth.Time[-1]', '(out.earth.FMeltUMan[-1] * 1e-06)'], {'yerr': '[[FMeltUMan_lo], [FMeltUMan_hi]]', 'color': 'vplot.colors.dark_blue', 'fmt': '"""o"""'}), "(out.earth.Time[-1], out.earth.FMeltUMan[-1] * 1e-06, yerr=[[\n FMeltUMan_lo], [FMeltUMan_hi]], color=vplot.colors.dark_blue, fmt='o')\n", (5727, 5863), True, 'import matplotlib.pyplot as plt\n'), ((5851, 5898), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Melt Fraction Upper Mantle (n.d.)"""'], {}), "('Melt Fraction Upper Mantle (n.d.)')\n", (5861, 5898), True, 'import matplotlib.pyplot as plt\n'), ((5900, 5924), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (Gyr)"""'], {}), "('Time (Gyr)')\n", (5910, 5924), True, 'import matplotlib.pyplot as plt\n'), ((5925, 5941), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(4.6)'], {}), '(0, 4.6)\n', (5933, 5941), True, 'import matplotlib.pyplot as plt\n'), ((5941, 5968), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 1, 2, 3, 4]'], {}), '([0, 1, 2, 3, 4])\n', (5951, 5968), True, 'import matplotlib.pyplot as plt\n'), ((5976, 6006), 'matplotlib.pyplot.subplot', 'plt.subplot', (['rows', 'cols', 'panel'], {}), '(rows, cols, panel)\n', (5987, 6006), True, 'import matplotlib.pyplot as plt\n'), ((6005, 6099), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', '(out.earth.MeltMassFluxMan * 1e-06)'], {'color': 'vplot.colors.dark_blue'}), '(out.earth.Time, out.earth.MeltMassFluxMan * 1e-06, color=vplot.\n colors.dark_blue)\n', (6013, 6099), True, 'import matplotlib.pyplot as plt\n'), ((6090, 6254), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['out.earth.Time[-1]', '(out.earth.MeltMassFluxMan[-1] * 1e-06)'], {'yerr': '[[MeltMassFlux_lo], [MeltMassFlux_hi]]', 'color': 'vplot.colors.dark_blue', 'fmt': '"""o"""'}), "(out.earth.Time[-1], out.earth.MeltMassFluxMan[-1] * 1e-06,\n yerr=[[MeltMassFlux_lo], [MeltMassFlux_hi]], color=vplot.colors.\n dark_blue, fmt='o')\n", (6102, 6254), True, 'import matplotlib.pyplot as plt\n'), ((6238, 6297), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Melt Mass Flux Mantle ($\\\\times 10^6$ kg$/$s)"""'], {}), "('Melt Mass Flux Mantle ($\\\\times 10^6$ kg$/$s)')\n", (6248, 6297), True, 'import matplotlib.pyplot as plt\n'), ((6298, 6322), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (Gyr)"""'], {}), "('Time (Gyr)')\n", (6308, 6322), True, 'import matplotlib.pyplot as plt\n'), ((6323, 6339), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', '(100)'], {}), '(0, 100)\n', (6331, 6339), True, 'import matplotlib.pyplot as plt\n'), ((6339, 6355), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(4.6)'], {}), '(0, 4.6)\n', (6347, 6355), True, 'import matplotlib.pyplot as plt\n'), ((6355, 6382), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 1, 2, 3, 4]'], {}), '([0, 1, 2, 3, 4])\n', (6365, 6382), True, 'import matplotlib.pyplot as plt\n'), ((6380, 6402), 'vplot.make_pretty', 'vplot.make_pretty', (['fig'], {}), '(fig)\n', (6397, 6402), False, 'import vplot\n'), ((6582, 6616), 'matplotlib.pyplot.figure', 'plt.figure', (['nfig'], {'figsize': '(10, 10)'}), '(nfig, figsize=(10, 10))\n', (6592, 6616), True, 'import matplotlib.pyplot as plt\n'), ((6626, 6656), 'matplotlib.pyplot.subplot', 'plt.subplot', (['rows', 'cols', 'panel'], {}), '(rows, cols, panel)\n', (6637, 6656), True, 'import matplotlib.pyplot as plt\n'), ((6655, 6707), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.RIC'], {'label': '"""RIC"""'}), "(out.earth.Time, out.earth.RIC, label='RIC')\n", (6663, 6707), True, 'import matplotlib.pyplot as plt\n'), ((6706, 6723), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', '(1500)'], {}), '(0, 1500)\n', (6714, 6723), True, 'import matplotlib.pyplot as plt\n'), ((6723, 6759), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Inner Core Radius (km)"""'], {}), "('Inner Core Radius (km)')\n", (6733, 6759), True, 'import matplotlib.pyplot as plt\n'), ((6761, 6785), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (Gyr)"""'], {}), "('Time (Gyr)')\n", (6771, 6785), True, 'import matplotlib.pyplot as plt\n'), ((6797, 6827), 'matplotlib.pyplot.subplot', 'plt.subplot', (['rows', 'cols', 'panel'], {}), '(rows, cols, panel)\n', (6808, 6827), True, 'import matplotlib.pyplot as plt\n'), ((6826, 6916), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', '(out.earth.CoreBuoyTherm * 10000000000000.0)'], {'label': '"""Thermal"""'}), "(out.earth.Time, out.earth.CoreBuoyTherm * 10000000000000.0, label=\n 'Thermal')\n", (6834, 6916), True, 'import matplotlib.pyplot as plt\n'), ((6896, 6992), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', '(out.earth.CoreBuoyCompo * 10000000000000.0)'], {'label': '"""Compositional"""'}), "(out.earth.Time, out.earth.CoreBuoyCompo * 10000000000000.0, label=\n 'Compositional')\n", (6904, 6992), True, 'import matplotlib.pyplot as plt\n'), ((6972, 7060), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', '(out.earth.CoreBuoyTotal * 10000000000000.0)'], {'label': '"""Total"""'}), "(out.earth.Time, out.earth.CoreBuoyTotal * 10000000000000.0, label=\n 'Total')\n", (6980, 7060), True, 'import matplotlib.pyplot as plt\n'), ((7040, 7076), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""', 'frameon': '(True)'}), "(loc='best', frameon=True)\n", (7050, 7076), True, 'import matplotlib.pyplot as plt\n'), ((7076, 7140), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Core Buoyancy Flux ($\\\\times10^{-13}$ m$^2/$s$^3$)"""'], {}), "('Core Buoyancy Flux ($\\\\times10^{-13}$ m$^2/$s$^3$)')\n", (7086, 7140), True, 'import matplotlib.pyplot as plt\n'), ((7141, 7165), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (Gyr)"""'], {}), "('Time (Gyr)')\n", (7151, 7165), True, 'import matplotlib.pyplot as plt\n'), ((7166, 7193), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 1, 2, 3, 4]'], {}), '([0, 1, 2, 3, 4])\n', (7176, 7193), True, 'import matplotlib.pyplot as plt\n'), ((7201, 7231), 'matplotlib.pyplot.subplot', 'plt.subplot', (['rows', 'cols', 'panel'], {}), '(rows, cols, panel)\n', (7212, 7231), True, 'import matplotlib.pyplot as plt\n'), ((7230, 7288), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.MagMom'], {'label': '"""MagMom"""'}), "(out.earth.Time, out.earth.MagMom, label='MagMom')\n", (7238, 7288), True, 'import matplotlib.pyplot as plt\n'), ((7287, 7301), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', '(2)'], {}), '(0, 2)\n', (7295, 7301), True, 'import matplotlib.pyplot as plt\n'), ((7301, 7341), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Magnetic Moment (E. Units)"""'], {}), "('Magnetic Moment (E. Units)')\n", (7311, 7341), True, 'import matplotlib.pyplot as plt\n'), ((7342, 7366), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (Gyr)"""'], {}), "('Time (Gyr)')\n", (7352, 7366), True, 'import matplotlib.pyplot as plt\n'), ((7367, 7394), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 1, 2, 3, 4]'], {}), '([0, 1, 2, 3, 4])\n', (7377, 7394), True, 'import matplotlib.pyplot as plt\n'), ((7402, 7432), 'matplotlib.pyplot.subplot', 'plt.subplot', (['rows', 'cols', 'panel'], {}), '(rows, cols, panel)\n', (7413, 7432), True, 'import matplotlib.pyplot as plt\n'), ((7431, 7478), 'matplotlib.pyplot.plot', 'plt.plot', (['out.earth.Time', 'out.earth.MagPauseRad'], {}), '(out.earth.Time, out.earth.MagPauseRad)\n', (7439, 7478), True, 'import matplotlib.pyplot as plt\n'), ((7478, 7522), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Magnetopause Radius (E. Units)"""'], {}), "('Magnetopause Radius (E. Units)')\n", (7488, 7522), True, 'import matplotlib.pyplot as plt\n'), ((7524, 7548), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (Gyr)"""'], {}), "('Time (Gyr)')\n", (7534, 7548), True, 'import matplotlib.pyplot as plt\n'), ((7549, 7576), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 1, 2, 3, 4]'], {}), '([0, 1, 2, 3, 4])\n', (7559, 7576), True, 'import matplotlib.pyplot as plt\n'), ((7858, 7880), 'vplot.make_pretty', 'vplot.make_pretty', (['fig'], {}), '(fig)\n', (7875, 7880), False, 'import vplot\n'), ((8023, 8034), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (8032, 8034), True, 'import matplotlib.pyplot as plt\n'), ((1090, 1116), 'numpy.array', 'np.array', (['[1280.0, 1475.0]'], {}), '([1280.0, 1475.0])\n', (1098, 1116), True, 'import numpy as np\n'), ((1480, 1508), 'numpy.array', 'np.array', (['[1.5e+19, 1.5e+22]'], {}), '([1.5e+19, 1.5e+22])\n', (1488, 1508), True, 'import numpy as np\n'), ((1558, 1584), 'numpy.array', 'np.array', (['[3e+19, 1.5e+22]'], {}), '([3e+19, 1.5e+22])\n', (1566, 1584), True, 'import numpy as np\n'), ((6434, 6475), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('EarthInterior%d.pdf' % nfig)"], {}), "('EarthInterior%d.pdf' % nfig)\n", (6445, 6475), True, 'import matplotlib.pyplot as plt\n'), ((6505, 6546), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('EarthInterior%d.png' % nfig)"], {}), "('EarthInterior%d.png' % nfig)\n", (6516, 6546), True, 'import matplotlib.pyplot as plt\n'), ((7912, 7953), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('EarthInterior%d.pdf' % nfig)"], {}), "('EarthInterior%d.pdf' % nfig)\n", (7923, 7953), True, 'import matplotlib.pyplot as plt\n'), ((7983, 8024), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('EarthInterior%d.png' % nfig)"], {}), "('EarthInterior%d.png' % nfig)\n", (7994, 8024), True, 'import matplotlib.pyplot as plt\n')] |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import array
import numpy as np
from numcodecs.compat import buffer_tobytes
def test_buffer_tobytes():
bufs = [
b'adsdasdas',
bytes(20),
np.arange(100),
array.array('l', b'qwertyuiqwertyui')
]
for buf in bufs:
b = buffer_tobytes(buf)
assert isinstance(b, bytes)
| [
"numcodecs.compat.buffer_tobytes",
"array.array",
"numpy.arange"
] | [((260, 274), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (269, 274), True, 'import numpy as np\n'), ((284, 321), 'array.array', 'array.array', (['"""l"""', "b'qwertyuiqwertyui'"], {}), "('l', b'qwertyuiqwertyui')\n", (295, 321), False, 'import array\n'), ((361, 380), 'numcodecs.compat.buffer_tobytes', 'buffer_tobytes', (['buf'], {}), '(buf)\n', (375, 380), False, 'from numcodecs.compat import buffer_tobytes\n')] |
from app.cache import get_from_cache, set_into_cache, delete_from_cache
import logging as _logging
import hashlib, json
logging = _logging.getLogger("matrufsc2_decorators_cacheable")
logging.setLevel(_logging.DEBUG)
__author__ = 'fernando'
CACHE_CACHEABLE_KEY = "cache/functions/%s/%s"
def cacheable(consider_only=None):
def decorator(fn):
def dec(filters, **kwargs):
if consider_only is not None and filters:
filters = {k: filters[k] for k in filters.iterkeys() if k in consider_only}
filters_hash = hashlib.sha1(json.dumps(filters, sort_keys=True)).hexdigest()
cache_key = CACHE_CACHEABLE_KEY % (
fn.__name__,
filters_hash
)
persistent = kwargs.get("persistent", True)
if kwargs.get("overwrite"):
update_with = kwargs.get("update_with")
if update_with:
result = get_from_cache(cache_key, persistent=persistent).get_result()
if not result:
result = update_with
if type(result) == type(update_with):
logging.debug("Updating cache with passed in value")
set_into_cache(cache_key, update_with, persistent=persistent).get_result()
else:
raise Exception("Types differents: %s != %s" % (str(type(result)), str(type(update_with))))
elif kwargs.get("exclude"):
return delete_from_cache(cache_key, persistent=persistent).get_result()
else:
result = None
else:
result = get_from_cache(cache_key, persistent=persistent).get_result()
if not result:
result = fn(filters)
set_into_cache(cache_key, result, persistent=persistent).get_result()
return result
dec.__name__ = fn.__name__
dec.__doc__ = fn.__doc__
return dec
return decorator | [
"logging.getLogger",
"json.dumps",
"app.cache.delete_from_cache",
"app.cache.set_into_cache",
"app.cache.get_from_cache"
] | [((131, 183), 'logging.getLogger', '_logging.getLogger', (['"""matrufsc2_decorators_cacheable"""'], {}), "('matrufsc2_decorators_cacheable')\n", (149, 183), True, 'import logging as _logging\n'), ((570, 605), 'json.dumps', 'json.dumps', (['filters'], {'sort_keys': '(True)'}), '(filters, sort_keys=True)\n', (580, 605), False, 'import hashlib, json\n'), ((1705, 1753), 'app.cache.get_from_cache', 'get_from_cache', (['cache_key'], {'persistent': 'persistent'}), '(cache_key, persistent=persistent)\n', (1719, 1753), False, 'from app.cache import get_from_cache, set_into_cache, delete_from_cache\n'), ((1847, 1903), 'app.cache.set_into_cache', 'set_into_cache', (['cache_key', 'result'], {'persistent': 'persistent'}), '(cache_key, result, persistent=persistent)\n', (1861, 1903), False, 'from app.cache import get_from_cache, set_into_cache, delete_from_cache\n'), ((952, 1000), 'app.cache.get_from_cache', 'get_from_cache', (['cache_key'], {'persistent': 'persistent'}), '(cache_key, persistent=persistent)\n', (966, 1000), False, 'from app.cache import get_from_cache, set_into_cache, delete_from_cache\n'), ((1253, 1314), 'app.cache.set_into_cache', 'set_into_cache', (['cache_key', 'update_with'], {'persistent': 'persistent'}), '(cache_key, update_with, persistent=persistent)\n', (1267, 1314), False, 'from app.cache import get_from_cache, set_into_cache, delete_from_cache\n'), ((1541, 1592), 'app.cache.delete_from_cache', 'delete_from_cache', (['cache_key'], {'persistent': 'persistent'}), '(cache_key, persistent=persistent)\n', (1558, 1592), False, 'from app.cache import get_from_cache, set_into_cache, delete_from_cache\n')] |
import warnings
from dataclasses import dataclass
from typing import List, Optional
import torch
from falkon.utils.stream_utils import sync_current_stream
from falkon.mmv_ops.utils import _get_gpu_info, create_output_mat, _start_wait_processes
from falkon.options import FalkonOptions, BaseOptions
from falkon.utils import decide_cuda
from falkon.utils.helpers import sizeof_dtype, calc_gpu_block_sizes
from pykeops.torch import Genred
@dataclass(frozen=True)
class ArgsFmmv:
X1: torch.Tensor
X2: torch.Tensor
v: torch.Tensor
other_vars: List[torch.Tensor]
out: torch.Tensor
gpu_ram: float
backend: str
function: callable
def _keops_dtype(dtype: torch.dtype) -> str:
"""Returns a string which represents the given data type.
The string representation is necessary for KeOps which doesn't
like type objects.
"""
if dtype == torch.float64:
return 'float64'
elif dtype == torch.float32:
return 'float32'
else:
raise NotImplementedError("Data type %s not recognized." % (dtype))
def _decide_backend(opt: BaseOptions, num_dim: int) -> str:
"""Switch between CPU and GPU backend for KeOps
"""
if not decide_cuda(opt):
return 'CPU'
else:
return 'GPU_1D'
def _estimate_split(N, M, D, T, R, ds):
"""Estimate the splits along dimensions N and M for a MVM to fit in memory
The operations consist of computing the product between a kernel
matrix (from a N*D and a M*D matrix) and a 'vector' of shape M*T
This typically requires storage of the input and output matrices,
which occupies (M + N)*(D + T) memory locations plus some intermediate
buffers to perform computations.
TODO: It is not clear how much intermediate memory KeOps requires;
the only thing that is certain is that it is quadratic in D.
For now we sidestep this issue by using a smaller R than what is
actually available in GPU memory.
This function calculates the split along N and M into blocks of size n*m
so that we can compute the kernel-vector product between such blocks
and still fit in GPU memory.
Parameters
-----------
- N : int
The first dimension of the kernel matrix
- M : int
The second dimension of the kernel matrix
- D : int
The data dimensionality
- T : int
The number of output columns
- R : float
The amount of memory available (in bytes)
- ds : int
The size in bytes of each element in the data matrices
(e.g. 4 if the data is in single precision).
Returns
--------
- n : int
The block size to be used along the first dimension
- m : int
The block size along the second dimension of the kernel
matrix
Raises
-------
RuntimeError
If the available memory `R` is insufficient to store even the smallest
possible input matrices. This may happen if `D` is very large since we
do not perform any splitting along `D`.
Notes
------
We find 'good' values of M, N such that
N*(D+T) + M*(D+T) <= R/ds
"""
R = R / ds
# We have a linear equation in two variables (N, M)
slope = -1
intercept = R / (D + T)
slack_points = 10
# We try to pick a point at the edges such that only one kind of split
# is necessary
if N < intercept - 1:
M = min(M, intercept + slope * N)
elif M < intercept - 1:
N = min(N, intercept + slope * M)
else:
# All points on the slope such that N, M > 0 are possible
N = intercept - slack_points - 1
M = intercept + slope * N
if N <= 0 or M <= 0:
raise RuntimeError(
"Insufficient available GPU "
"memory (available %.2fGB)" % (R * ds / 2 ** 30))
return int(N), int(M)
def _single_gpu_method(proc_idx, queue, device_id):
a: ArgsFmmv = queue.get()
backend = a.backend
X1 = a.X1
X2 = a.X2
v = a.v
oout = a.out
other_vars = a.other_vars
fn = a.function
R = a.gpu_ram
N, D = X1.shape
M = X2.shape[0]
T = v.shape[1]
device = torch.device(f"cuda:{device_id}")
# Second round of subdivision (only if necessary due to RAM constraints)
n, m = _estimate_split(N, M, D, T, R, sizeof_dtype(X1.dtype))
other_vars_dev = [ov.to(device, copy=False) for ov in other_vars]
out_ic = oout.device.index == device_id
# Process the two rounds of splitting with a nested loop.
with torch.cuda.device(device_id):
for mi in range(0, M, m):
ml = min(m, M - mi)
if ml != M and mi > 0: # Then we must create a temporary output array
out = torch.empty_like(oout)
else:
out = oout
cX2 = X2[mi:mi + ml, :].to(device, copy=False)
cv = v[mi:mi + ml, :].to(device, copy=False)
for ni in range(0, N, n):
nl = min(n, N - ni)
cX1 = X1[ni:ni + nl, :].to(device, copy=False)
cout = out[ni: ni + nl, :].to(device, copy=False)
variables = [cX1, cX2, cv] + other_vars_dev
fn(*variables, out=cout, device_id=device_id, backend=backend)
if not out_ic:
out[ni: ni + nl, :].copy_(cout)
if ml != M and mi > 0:
oout.add_(out)
return oout
def run_keops_mmv(X1: torch.Tensor,
X2: torch.Tensor,
v: torch.Tensor,
other_vars: List[torch.Tensor],
out: Optional[torch.Tensor],
formula: str,
aliases: List[str],
axis: int,
reduction: str = 'Sum',
opt: Optional[FalkonOptions] = None) -> torch.Tensor:
if opt is None:
opt = FalkonOptions()
# Choose backend
N, D = X1.shape
T = v.shape[1]
backend = _decide_backend(opt, D)
dtype = _keops_dtype(X1.dtype)
data_devs = [X1.device, X2.device, v.device]
if any([ddev.type == 'cuda' for ddev in data_devs]) and (not backend.startswith("GPU")):
warnings.warn("KeOps backend was chosen to be CPU, but GPU input tensors found. "
"Defaulting to 'GPU_1D' backend. To force usage of the CPU backend, "
"please pass CPU tensors; to avoid this warning if the GPU backend is "
"desired, check your options (i.e. set 'use_cpu=False').")
backend = "GPU_1D"
differentiable = any(
[X1.requires_grad, X2.requires_grad, v.requires_grad] +
[o.requires_grad for o in other_vars]
)
if differentiable:
from falkon.kernels.tiling_red import TilingGenred
fn = TilingGenred(formula, aliases, reduction_op='Sum', axis=1, dtype=dtype,
dtype_acc="auto", sum_scheme="auto", opt=opt)
return fn(X1, X2, v, *other_vars, out=out, backend=backend)
# Define formula wrapper
fn = Genred(formula, aliases,
reduction_op=reduction, axis=axis,
dtype=dtype, dtype_acc=opt.keops_acc_dtype,
sum_scheme=opt.keops_sum_scheme)
comp_dev_type = backend[:3].lower().replace('gpu', 'cuda') # 'cpu' or 'cuda'
out = create_output_mat(out, data_devs, is_sparse=False, shape=(N, T), dtype=X1.dtype,
comp_dev_type=comp_dev_type, other_mat=X1, output_stride="C")
if comp_dev_type == 'cpu' and all([ddev.type == 'cpu' for ddev in data_devs]): # incore CPU
variables = [X1, X2, v] + other_vars
out = fn(*variables, out=out, backend=backend)
elif comp_dev_type == 'cuda' and all([ddev.type == 'cuda' for ddev in data_devs]): # incore CUDA
variables = [X1, X2, v] + other_vars
device = data_devs[0]
with torch.cuda.device(device):
sync_current_stream(device)
out = fn(*variables, out=out, backend=backend)
else: # Out of core
# slack is high due to imprecise memory usage estimates for keops
gpu_info = _get_gpu_info(opt, slack=opt.keops_memory_slack)
block_sizes = calc_gpu_block_sizes(gpu_info, N)
# Create queues
args = [] # Arguments passed to each subprocess
for i, g in enumerate(gpu_info):
# First round of subdivision
bwidth = block_sizes[i + 1] - block_sizes[i]
if bwidth <= 0:
continue
args.append((ArgsFmmv(
X1=X1.narrow(0, block_sizes[i], bwidth),
X2=X2,
v=v,
out=out.narrow(0, block_sizes[i], bwidth),
other_vars=other_vars,
function=fn,
backend=backend,
gpu_ram=g.usable_memory
), g.Id))
_start_wait_processes(_single_gpu_method, args)
return out
| [
"torch.cuda.device",
"falkon.kernels.tiling_red.TilingGenred",
"falkon.utils.helpers.calc_gpu_block_sizes",
"dataclasses.dataclass",
"torch.empty_like",
"falkon.mmv_ops.utils._get_gpu_info",
"pykeops.torch.Genred",
"falkon.utils.helpers.sizeof_dtype",
"falkon.mmv_ops.utils._start_wait_processes",
... | [((442, 464), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (451, 464), False, 'from dataclasses import dataclass\n'), ((4159, 4192), 'torch.device', 'torch.device', (['f"""cuda:{device_id}"""'], {}), "(f'cuda:{device_id}')\n", (4171, 4192), False, 'import torch\n'), ((7039, 7179), 'pykeops.torch.Genred', 'Genred', (['formula', 'aliases'], {'reduction_op': 'reduction', 'axis': 'axis', 'dtype': 'dtype', 'dtype_acc': 'opt.keops_acc_dtype', 'sum_scheme': 'opt.keops_sum_scheme'}), '(formula, aliases, reduction_op=reduction, axis=axis, dtype=dtype,\n dtype_acc=opt.keops_acc_dtype, sum_scheme=opt.keops_sum_scheme)\n', (7045, 7179), False, 'from pykeops.torch import Genred\n'), ((7317, 7464), 'falkon.mmv_ops.utils.create_output_mat', 'create_output_mat', (['out', 'data_devs'], {'is_sparse': '(False)', 'shape': '(N, T)', 'dtype': 'X1.dtype', 'comp_dev_type': 'comp_dev_type', 'other_mat': 'X1', 'output_stride': '"""C"""'}), "(out, data_devs, is_sparse=False, shape=(N, T), dtype=X1.\n dtype, comp_dev_type=comp_dev_type, other_mat=X1, output_stride='C')\n", (7334, 7464), False, 'from falkon.mmv_ops.utils import _get_gpu_info, create_output_mat, _start_wait_processes\n'), ((1200, 1216), 'falkon.utils.decide_cuda', 'decide_cuda', (['opt'], {}), '(opt)\n', (1211, 1216), False, 'from falkon.utils import decide_cuda\n'), ((4313, 4335), 'falkon.utils.helpers.sizeof_dtype', 'sizeof_dtype', (['X1.dtype'], {}), '(X1.dtype)\n', (4325, 4335), False, 'from falkon.utils.helpers import sizeof_dtype, calc_gpu_block_sizes\n'), ((4524, 4552), 'torch.cuda.device', 'torch.cuda.device', (['device_id'], {}), '(device_id)\n', (4541, 4552), False, 'import torch\n'), ((5873, 5888), 'falkon.options.FalkonOptions', 'FalkonOptions', ([], {}), '()\n', (5886, 5888), False, 'from falkon.options import FalkonOptions, BaseOptions\n'), ((6173, 6456), 'warnings.warn', 'warnings.warn', (['"""KeOps backend was chosen to be CPU, but GPU input tensors found. Defaulting to \'GPU_1D\' backend. To force usage of the CPU backend, please pass CPU tensors; to avoid this warning if the GPU backend is desired, check your options (i.e. set \'use_cpu=False\')."""'], {}), '(\n "KeOps backend was chosen to be CPU, but GPU input tensors found. Defaulting to \'GPU_1D\' backend. To force usage of the CPU backend, please pass CPU tensors; to avoid this warning if the GPU backend is desired, check your options (i.e. set \'use_cpu=False\')."\n )\n', (6186, 6456), False, 'import warnings\n'), ((6788, 6909), 'falkon.kernels.tiling_red.TilingGenred', 'TilingGenred', (['formula', 'aliases'], {'reduction_op': '"""Sum"""', 'axis': '(1)', 'dtype': 'dtype', 'dtype_acc': '"""auto"""', 'sum_scheme': '"""auto"""', 'opt': 'opt'}), "(formula, aliases, reduction_op='Sum', axis=1, dtype=dtype,\n dtype_acc='auto', sum_scheme='auto', opt=opt)\n", (6800, 6909), False, 'from falkon.kernels.tiling_red import TilingGenred\n'), ((8120, 8168), 'falkon.mmv_ops.utils._get_gpu_info', '_get_gpu_info', (['opt'], {'slack': 'opt.keops_memory_slack'}), '(opt, slack=opt.keops_memory_slack)\n', (8133, 8168), False, 'from falkon.mmv_ops.utils import _get_gpu_info, create_output_mat, _start_wait_processes\n'), ((8191, 8224), 'falkon.utils.helpers.calc_gpu_block_sizes', 'calc_gpu_block_sizes', (['gpu_info', 'N'], {}), '(gpu_info, N)\n', (8211, 8224), False, 'from falkon.utils.helpers import sizeof_dtype, calc_gpu_block_sizes\n'), ((8865, 8912), 'falkon.mmv_ops.utils._start_wait_processes', '_start_wait_processes', (['_single_gpu_method', 'args'], {}), '(_single_gpu_method, args)\n', (8886, 8912), False, 'from falkon.mmv_ops.utils import _get_gpu_info, create_output_mat, _start_wait_processes\n'), ((4725, 4747), 'torch.empty_like', 'torch.empty_like', (['oout'], {}), '(oout)\n', (4741, 4747), False, 'import torch\n'), ((7876, 7901), 'torch.cuda.device', 'torch.cuda.device', (['device'], {}), '(device)\n', (7893, 7901), False, 'import torch\n'), ((7915, 7942), 'falkon.utils.stream_utils.sync_current_stream', 'sync_current_stream', (['device'], {}), '(device)\n', (7934, 7942), False, 'from falkon.utils.stream_utils import sync_current_stream\n')] |
'''
node.py
ancilla
Created by <NAME> (<EMAIL>) on 01/14/20
Copyright 2019 FrenzyLabs, LLC.
'''
import time
from .api import Api
from ..events import Event
from ...data.models import Service, Printer, Camera, ServiceAttachment, CameraRecording, Node
from ..response import AncillaError, AncillaResponse
import re
import math
import os
import json
MB = 1 << 20
BUFF_SIZE = 10 * MB
class NodeApi(Api):
def setup(self):
super().setup("/api")
# self.service.route('/services', 'GET', self.services)
self.service.route('/api/node', 'GET', self.get_node)
self.service.route('/api/node', 'PATCH', self.update_node)
self.service.route('/api/nodes', 'GET', self.discover_nodes)
self.service.route('/api/services', 'GET', self.services)
self.service.route('/api/recordings', 'GET', self.recordings)
self.service.route('/api/recordings/<recording_id>', 'GET', self.get_recording)
self.service.route('/api/recordings/<recording_id>', 'DELETE', self.delete_recording)
self.service.route('/api/recordings/<recording_id>/video', 'GET', self.get_video)
# self.service.route('/services/<service_id>/restart', 'GET', self.restart_service)
self.service.route('/api/attachments/<attachment_id>', 'PATCH', self.update_attachment)
self.service.route('/api/services/<service_id>', 'PATCH', self.update_service_model)
self.service.route('/api/services/<service_id>', 'DELETE', self.delete_service)
self.service.route('/api/services/<service_id>/stop', 'GET', self.stop_service)
self.service.route('/api/services/camera', 'GET', self.list_cameras)
self.service.route('/api/services/camera', 'POST', self.create_camera)
self.service.route('/api/services/printer', 'POST', self.create_printer)
self.service.route('/api/services/printer', 'GET', self.list_printers)
# self.service.route('/services/<service>/<service_id><other:re:.*>', ['GET', 'PUT', 'POST', 'DELETE', 'PATCH'], self.catchUnmountedServices)
# self.service.route('/services/<name><other:re:.*>', 'GET', self.catchIt)
def get_node(self, request, *args):
model = self.service.model
return {"node": model.json}
def update_node(self, request, *args):
model = self.service.model
frozen_keys = ['id', 'name', 'original_name', 'created_at', 'updated_at']
newname = request.params.get("name")
if newname:
model.node_name = newname
modelkeys = model.__data__.keys() - frozen_keys
for k in modelkeys:
kval = request.params.get(k)
if kval:
model.__setattr__(k, kval)
if not model.is_valid:
raise AncillaError(400, {"errors": model.errors})
model.save()
return {"node": model}
# newname = request.params.get("name")
# n = Node.select().first()
def discover_nodes(self, request, *args):
res = self.service.discovery.nodes()
# print(f'Node res = {res}')
nodes = []
ips = {}
for r in res:
if "ip" in r:
ips[r["ip"]] = r
beacon = self.service.discovery.beacon
if beacon and beacon.listener:
networkservices = beacon.listener.myservices
else:
networkservices = {}
# {'addresses': ['192.168.1.129'], 'port': 5000, 'server': 'ancilla.local', 'type': '_ancilla._tcp.local.'}
try:
for key, ns in networkservices.items():
ip = ns["addresses"][0]
if ip:
nd = {"network_name": key}
if ip in ips:
nd.update({**ns, **ips[ip]})
nodes.append(nd)
del ips[ip]
else:
nd.update(ns)
nodes.append(nd)
except Exception as e:
print(f"Node Exception = {str(e)}", flush=True)
## The rest of ips not part of the bonjour services for some reason")
for n in ips.values():
nodes.append(n)
return {"nodes": nodes}
async def delete_service(self, request, layerkeep, service_id, *args):
smodel = Service.get_by_id(service_id)
model = smodel.model
with Service._meta.database.atomic() as transaction:
try:
if model:
# if request.params.get("layerkeep_sync") and request.params.get("layerkeep_sync") != "false":
if layerkeep and smodel.kind == "printer" and model.layerkeep_id:
response = await layerkeep.delete_printer({"data": {"layerkeep_id": model.layerkeep_id}})
if not response.success:
raise response
model.delete_instance(recursive=True)
smodel.delete_instance(recursive=True)
self.service.delete_service(smodel)
except Exception as e:
print(f"DELETE SERvice excption= {str(e)}", flush=True)
transaction.rollback()
raise e
# return {"error": "Could Not Delete Service"}
return {"success": True}
def stop_service(self, request, service_id, *args):
s = Service.get_by_id(service_id)
self.service.stop_service(s)
return {"success": True}
def services(self, request, *args):
allservices = []
q = Service.select()
if request.params.get("kind"):
q = q.where(Service.kind == request.params.get("kind"))
for service in q:
js = service.to_json(extra_attrs=["identity"])
model = service.model
if model:
js.update(model=model.to_json(recurse=False))
allservices.append(js)
return {'services': allservices}
# return {'services': [service.json for service in Service.select()]}
def actions(self, *args):
return {"actions": self.service.list_actions()}
def service_model(self, request, model_id, *args):
s = Service.get_by_id(model_id)
return {"service_model": s.json}
def update_service_model(self, request, layerkeep, service_id, *args):
s = Service.get_by_id(service_id)
with Service._meta.database.atomic() as transaction:
try:
model = s.model
newname = request.params.get("name")
if newname:
s.service_name = newname
if model:
model.name = newname
if not model.is_valid:
raise AncillaError(400, {"errors": model.errors})
model.save()
if request.params.get('configuration') != None:
s.configuration = request.params.get('configuration')
if request.params.get('settings') != None:
s.settings = request.params.get('settings')
s.event_listeners = request.params.get('event_listeners') or s.event_listeners
s.save()
smodel = s.json
if model:
smodel.update(model=model.to_json(recurse=False))
return {"service_model": smodel}
except Exception as e:
# Because this block of code is wrapped with "atomic", a
# new transaction will begin automatically after the call
# to rollback().
transaction.rollback()
# return {"Error"}
raise e
# def register_event_listeners(self, *args):
def delete_service_model(self, request, model_id, *args):
s = Service.get_by_id(model_id)
self.service.remove_service()
list_1 = [item for item in list_1 if item[2] >= 5 or item[3] >= 0.3]
def recordings(self, request, *args, **kwargs):
page = int(request.params.get("page") or 1)
per_page = int(request.params.get("per_page") or 5)
q = CameraRecording.select().order_by(CameraRecording.created_at.desc())
if request.params.get("q[print_id]"):
q = q.where(CameraRecording.print_id == request.params.get("q[print_id]"))
if request.params.get("q[camera_id]"):
q = q.where(CameraRecording.camera_id == request.params.get("q[camera_id]"))
if request.params.get("q[status]"):
q = q.where(CameraRecording.status == request.params.get("q[status]"))
cnt = q.count()
num_pages = math.ceil(cnt / per_page)
return {"data": [p.to_json(recurse=True) for p in q.paginate(page, per_page)], "meta": {"current_page": page, "last_page": num_pages, "total": cnt}}
def get_recording(self, request, recording_id, *args):
rcd = CameraRecording.get_by_id(recording_id)
return {"data": rcd.json}
def delete_recording(self, request, recording_id, *args):
rcd = CameraRecording.get_by_id(recording_id)
if self.service.delete_recording(rcd):
return {"success": "Deleted"}
raise AncillaError(400, {"errors": "Coud Not Delete Recording"})
def get_video(self, request, recording_id, *args):
rcd = CameraRecording.get_by_id(recording_id)
# path = rcd.video_path + "/output.mp4"
fp = open(rcd.video_path, "rb")
request.response.set_header('Content-Disposition', 'filename=%s' % "output.mp4")
if request.params.get("download"):
request.response.set_header('Content-Type', 'application/octet-stream')
return fp
request.response.status = 206
request.response.set_header('Content-Type', 'video/mp4')
request.response.set_header('Accept-Ranges', 'bytes')
return self.stream_video(request, fp)
def list_printers(self, *args, **kwargs):
return {'printers': [printer.json for printer in Printer.select()]}
def list_cameras(self, request, *args, **kwargs):
return {'cameras': [camera.json for camera in Camera.select()]}
def create_camera(self, request, *args, **kwargs):
with Service._meta.database.atomic() as transaction:
try:
service = Service(name=request.params.get("name"), kind="camera", class_name="Camera")
service.service_name = request.params.get("name")
if not service.is_valid:
raise AncillaError(400, {"errors": service.errors})
service.save()
camera = Camera(**request.params, service=service)
default_settings = {
"record": {
"timelapse": 2,
"frames_per_second": 10,
},
"video": {
"size": [640, 480],
"format": "avc1"
}
}
camera.settings = default_settings
if not camera.is_valid:
raise AncillaError(400, {"errors": camera.errors})
camera.save()
camera_service = service.json
camera_service.update(model=camera.json)
return {"camera": camera_service}
except Exception as e:
# Because this block of code is wrapped with "atomic", a
# new transaction will begin automatically after the call
# to rollback().
transaction.rollback()
raise e
async def create_printer(self, request, layerkeep, *args, **kwargs):
with Service._meta.database.atomic() as transaction:
try:
service = Service(name=request.params.get("name"), kind="printer", class_name="Printer")
service.service_name = request.params.get("name")
if not service.is_valid:
raise AncillaError(400, {"errors": service.errors})
service.save()
printer = Printer(**request.params, service=service)
if not printer.is_valid:
raise AncillaError(400, {"errors": printer.errors})
if request.params.get("layerkeep_sync") == True:
if layerkeep:
response = await layerkeep.create_printer({"data": request.params})
if response.success:
printer.layerkeep_id = response.body.get("data").get("id")
else:
raise response
printer.save()
printerservice = service.json
printerservice.update(model=printer.json)
return {"printer": printerservice}
except Exception as e:
# Because this block of code is wrapped with "atomic", a
# new transaction will begin automatically after the call
# to rollback().
transaction.rollback()
raise e
async def update_attachment(self, request, attachment_id, *args):
sa = ServiceAttachment.get_by_id(attachment_id)
if request.params.get("settings"):
sa.settings = request.params.get("settings")
sa.save()
return {"data": sa.json}
def stream_video(self, request, fp):
start, end = self.get_range(request)
requestedrange = request.headers.get('Range')
# if requestedrange == None:
# print("NO REQUESTED RANGE",flush=True)
# else:
file_size = os.path.getsize(fp.name)
if end is None:
end = start + BUFF_SIZE - 1
end = min(end, file_size - 1)
end = min(end, start + BUFF_SIZE - 1)
length = end - start + 1
request.response.set_header(
'Content-Range', 'bytes {0}-{1}/{2}'.format(
start, end, file_size,
),
)
fp.seek(start)
bytes = fp.read(length)
return bytes
def get_range(self, request):
range = request.headers.get('Range')
m = None
if range:
m = re.match('bytes=(?P<start>\d+)-(?P<end>\d+)?', range)
if m:
start = m.group('start')
end = m.group('end')
start = int(start)
if end is not None:
end = int(end)
return start, end
else:
return 0, None
def catchUnmountedServices(self, request, service, service_id, *args, **kwargs):
print(f"INSIDECatch service {service} {service_id}", flush=True)
print(f"INSIDECatch {args}, {kwargs}", flush=True)
print(f"Request = {request.params}", flush=True)
try:
s = Service.get_by_id(service_id)
status, module = self.service.mount_service(s)
if status == "created":
return request.app._handle(request.environ)
else:
return {"status": "error", "error": "No Route"}
except Exception as e:
print(f"Could not mount service {str(e)}")
return {"error": str(e)}
return {"retry": True}
def catchIt(self, name, *args, **kwargs):
print("INSIDE CATCH IT")
return {"catch it": True}
| [
"os.path.getsize",
"math.ceil",
"re.match"
] | [((7819, 7844), 'math.ceil', 'math.ceil', (['(cnt / per_page)'], {}), '(cnt / per_page)\n', (7828, 7844), False, 'import math\n'), ((12243, 12267), 'os.path.getsize', 'os.path.getsize', (['fp.name'], {}), '(fp.name)\n', (12258, 12267), False, 'import os\n'), ((12743, 12798), 're.match', 're.match', (['"""bytes=(?P<start>\\\\d+)-(?P<end>\\\\d+)?"""', 'range'], {}), "('bytes=(?P<start>\\\\d+)-(?P<end>\\\\d+)?', range)\n", (12751, 12798), False, 'import re\n')] |
import codecs
import sys
RAW_DATA = "../data/ptb/ptb.train.txt"
VOCAB = "data/ptb.vocab"
OUTPUT_DATA = "data/ptb.train"
# 读取词汇表并建立映射
with codecs.open(VOCAB, "r", "utf-8") as f_vocab:
vocab = [w.strip() for w in f_vocab.readlines()]
word_to_id = {k: v for (k, v) in zip(vocab, range(len(vocab)))}
# 如果出现了被删除的低频词,替换成 <unk>
def get_id(word):
return word_to_id[word] if word in word_to_id else word_to_id["<unk>"]
fin = codecs.open(RAW_DATA, "r", "utf-8")
fout = codecs.open(OUTPUT_DATA, 'w', 'utf-8')
for line in fin:
words = line.strip().split() + ["<eos>"] # 读取单词并添加 <eos> 结束符
# 将每个单词替换为词汇表中的编号
out_line = ' '.join([str(get_id(w)) for w in words]) + '\n'
fout.write(out_line)
fin.close()
fout.close() | [
"codecs.open"
] | [((428, 463), 'codecs.open', 'codecs.open', (['RAW_DATA', '"""r"""', '"""utf-8"""'], {}), "(RAW_DATA, 'r', 'utf-8')\n", (439, 463), False, 'import codecs\n'), ((471, 509), 'codecs.open', 'codecs.open', (['OUTPUT_DATA', '"""w"""', '"""utf-8"""'], {}), "(OUTPUT_DATA, 'w', 'utf-8')\n", (482, 509), False, 'import codecs\n'), ((140, 172), 'codecs.open', 'codecs.open', (['VOCAB', '"""r"""', '"""utf-8"""'], {}), "(VOCAB, 'r', 'utf-8')\n", (151, 172), False, 'import codecs\n')] |
# -*- coding: utf-8 -*-
from flask import Blueprint, render_template
bp_error = Blueprint("bp_error", __name__, url_prefix="/error")
# Specific Error Handlers
@bp_error.route("/default")
def default():
return render_template(
"error/error_base.html",
error_code=500,
header_name="Error",
error_message="We will work on fixing that right away.",
)
@bp_error.route("/unauthorized")
def unauthorized():
return render_template(
"error/error_base.html",
error_code=500,
header_name="Unauthorized",
error_message="Not allowed to access this contents.",
)
| [
"flask.render_template",
"flask.Blueprint"
] | [((86, 138), 'flask.Blueprint', 'Blueprint', (['"""bp_error"""', '__name__'], {'url_prefix': '"""/error"""'}), "('bp_error', __name__, url_prefix='/error')\n", (95, 138), False, 'from flask import Blueprint, render_template\n'), ((227, 366), 'flask.render_template', 'render_template', (['"""error/error_base.html"""'], {'error_code': '(500)', 'header_name': '"""Error"""', 'error_message': '"""We will work on fixing that right away."""'}), "('error/error_base.html', error_code=500, header_name=\n 'Error', error_message='We will work on fixing that right away.')\n", (242, 366), False, 'from flask import Blueprint, render_template\n'), ((477, 620), 'flask.render_template', 'render_template', (['"""error/error_base.html"""'], {'error_code': '(500)', 'header_name': '"""Unauthorized"""', 'error_message': '"""Not allowed to access this contents."""'}), "('error/error_base.html', error_code=500, header_name=\n 'Unauthorized', error_message='Not allowed to access this contents.')\n", (492, 620), False, 'from flask import Blueprint, render_template\n')] |
import time
import datetime
from waveshare_epd import epd7in5_V2
from PIL import Image, ImageDraw, ImageFont
import calendar
import random
import os
picdir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'pic')
fontdir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'font')
epd = epd7in5_V2.EPD()
# Set the colors
black = 'rgb(0,0,0)'
white = 'rgb(255,255,255)'
def choose_mod(mod_choice, mod_turn):
if mod_choice == "weather" or mod_choice == "transit" or mod_choice == "tasklist":
mod_turn = 0
elif mod_choice == "c-s" or mod_choice == "news" or mod_choice == "meetings":
mod_turn = 1
elif mod_choice == "off":
mod_turn = 2
elif mod_choice == "random":
mod_rand = random.randint(0, 1)
while mod_rand == mod_turn:
mod_rand = random.randint(0, 1)
if mod_turn != mod_rand:
mod_turn = mod_rand
else:
print("mode unknown, going to default mode")
mod_turn = 0
return mod_turn
def time_in_range(start_hour, end_hour):
start = datetime.time((start_hour), 0, 0)
end = datetime.time((end_hour), 0, 0)
current_hour = datetime.datetime.now().strftime('%H')
current_min = datetime.datetime.now().strftime('%M')
current_sec = datetime.datetime.now().strftime('%S')
x = datetime.time(int(current_hour), int(current_min), int(current_sec))
"""Return true if x is in the range [start, end]"""
if start <= end:
return start <= x <= end
else:
return start <= x or x <= end
def tir_min(hour, x_min, y_min, y_sec):
start = datetime.time((hour), (x_min), 0)
end = datetime.time((hour), (y_min), (y_sec))
current_hour = datetime.datetime.now().strftime('%H')
current_min = datetime.datetime.now().strftime('%M')
current_sec = datetime.datetime.now().strftime('%S')
x = datetime.time(int(current_hour), int(current_min), int(current_sec))
"""Return true if x is in the range [start, end]"""
if start <= end:
return start <= x <= end
else:
return start <= x or x <= end
def sep_strings(it_str, chk_start):
chk_str = int(len(str(it_str)))
chk_str_1 = chk_str
check = False
#print("before" + str(chk_str))
if chk_str > chk_start:
chk_str = chk_start
else:
chk_str = chk_str
check = True
#print("after" + str(chk_str))
while check is False:
if str(it_str)[chk_str] != " ":
chk_str = chk_str - 1
# print("space_false: " + str(chk_str))
check = False
else:
chk_str = chk_str
# print("space_true: " + str(chk_str))
check = True
if chk_str_1 >= chk_start:
sep_it_1 = str(it_str)[0: chk_str] + " "
sep_it_2 = str(it_str)[chk_str+1: chk_str_1] + " "
else:
sep_it_1 = str(it_str)[0: chk_str] + " "
sep_it_2 = " "
return sep_it_1, sep_it_2
def draw_cal_mod(cal_s_x_0, cal_s_y, draw, color_1, color_2):
cal_month = datetime.datetime.now().month
cal_year = datetime.datetime.now().year
cal_day = datetime.datetime.now().day
cal_n_m = calendar.month_name[cal_month]
cal_text = calendar.TextCalendar(calendar.SUNDAY)
cal_list = cal_text.monthdayscalendar(cal_year, cal_month)
cal_s_x = cal_s_x_0
draw.text((cal_s_x + 60, cal_s_y-65), str(cal_n_m) + ' ' + str(cal_year),
font=font_size(35), fill=color_1)
draw.text((cal_s_x, cal_s_y-25), 'SU MO TU WED THU FRI SAT',
font=font_size(22), fill=color_1)
for cal_x in (0, 1, 2, 3, 4):
for cal_y in (0, 1, 2, 3, 4, 5, 6):
if cal_list[cal_x][cal_y] != 0:
if cal_list[cal_x][cal_y] == cal_day:
draw.rectangle((cal_s_x-5, cal_s_y, cal_s_x+22, cal_s_y+28), fill=color_1)
draw.text((cal_s_x, cal_s_y), str(
cal_list[cal_x][cal_y]), font=font_size(22), fill=color_2, align='right')
else:
draw.text((cal_s_x, cal_s_y), str(
cal_list[cal_x][cal_y]), font=font_size(22), fill=color_1, align='right')
cal_s_x = cal_s_x + 55
cal_s_x = cal_s_x_0
cal_s_y = cal_s_y + 30
def font_size(size):
fs = ImageFont.truetype(os.path.join(fontdir, 'BAHNSCHRIFT.TTF'), size)
#fs = ImageFont.truetype(os.path.join(fontdir, 'Font.ttc'), size)
return fs
def get_time(local_time):
pst_time = time.localtime(int(local_time))
pst_time = time.strftime('%A, %b %d', pst_time)
return pst_time
def get_year():
datetime_object = datetime.datetime.now()
year_str = (str(datetime_object.year) + "-" +
str(datetime_object.month)+"-" + str(datetime_object.day))
return year_str
def dayname():
day_name = datetime.datetime.now().strftime("%A")
return day_name
def cur_hr():
cur_hr = datetime.datetime.now().strftime("%H")
return cur_hr
def isTimeFormat(input):
try:
time.strptime(input, '%I:%M%p %Y-%m-%d')
return True
except ValueError:
return False
# define funciton for writing image and sleeping for 5 min.
def sep_datetime(utc_datetime):
if len(str(utc_datetime)) > 10:
date_time_x = datetime.datetime.strptime(str(utc_datetime), '%Y-%m-%dT%H:%M:%S%z')
# pst_time = time.strftime('%Y-%m-%d %H:%M', pst_time)
date_x = str(date_time_x.day) + '/' + str(date_time_x.month) + '/' + str(date_time_x.year)
time_x = str(date_time_x.strftime('%I')) + ':' + \
str(date_time_x.strftime('%M')) + str(date_time_x.strftime('%p'))
else:
date_time_x = datetime.datetime.strptime(str(utc_datetime), '%Y-%m-%d')
# pst_time = time.strftime('%Y-%m-%d %H:%M', pst_time)
date_x = str(date_time_x.day) + '/' + str(date_time_x.month) + '/' + str(date_time_x.year)
time_x = ''
return date_x, time_x
def write_to_screen(image, sleep_seconds):
print('Writing to screen.')
# Write to screen
h_image = Image.new('1', (epd.width, epd.height), 255)
# Open the template
screen_output_file = Image.open(os.path.join(picdir, image))
# Initialize the drawing context with template as background
h_image.paste(screen_output_file, (0, 0))
epd.display(epd.getbuffer(h_image))
# Sleep
print('Sleeping for ' + str(int(sleep_seconds/60)) + ' min.')
time.sleep(sleep_seconds)
# define function for displaying error
def display_error(error_source, color):
# Display an error
print('Error in the', error_source, 'request.')
# Initialize drawing
error_image = Image.new('1', (epd.width, epd.height), 255)
# Initialize the drawing
draw = ImageDraw.Draw(error_image)
draw.text((100, 150), error_source + ' ERROR', font=font_size(30), fill=color)
draw.text((100, 300), 'Retrying in 8 min', font=font_size(22), fill=color)
current_time = datetime.datetime.now().strftime('%H:%M')
draw.text((300, 365), 'Last Refresh: ' + str(current_time), font=font_size(30), fill=color)
# Save the error image
error_image_file = 'error.png'
error_image.save(os.path.join(picdir, error_image_file))
# Close error image
error_image.close()
# Write error to screen
write_to_screen(error_image_file, 8*60)
| [
"time.strptime",
"datetime.time",
"PIL.Image.new",
"time.strftime",
"os.path.join",
"time.sleep",
"os.path.realpath",
"datetime.datetime.now",
"PIL.ImageDraw.Draw",
"waveshare_epd.epd7in5_V2.EPD",
"calendar.TextCalendar",
"random.randint"
] | [((356, 372), 'waveshare_epd.epd7in5_V2.EPD', 'epd7in5_V2.EPD', ([], {}), '()\n', (370, 372), False, 'from waveshare_epd import epd7in5_V2\n'), ((1147, 1178), 'datetime.time', 'datetime.time', (['start_hour', '(0)', '(0)'], {}), '(start_hour, 0, 0)\n', (1160, 1178), False, 'import datetime\n'), ((1192, 1221), 'datetime.time', 'datetime.time', (['end_hour', '(0)', '(0)'], {}), '(end_hour, 0, 0)\n', (1205, 1221), False, 'import datetime\n'), ((1698, 1727), 'datetime.time', 'datetime.time', (['hour', 'x_min', '(0)'], {}), '(hour, x_min, 0)\n', (1711, 1727), False, 'import datetime\n'), ((1744, 1777), 'datetime.time', 'datetime.time', (['hour', 'y_min', 'y_sec'], {}), '(hour, y_min, y_sec)\n', (1757, 1777), False, 'import datetime\n'), ((3349, 3387), 'calendar.TextCalendar', 'calendar.TextCalendar', (['calendar.SUNDAY'], {}), '(calendar.SUNDAY)\n', (3370, 3387), False, 'import calendar\n'), ((4737, 4773), 'time.strftime', 'time.strftime', (['"""%A, %b %d"""', 'pst_time'], {}), "('%A, %b %d', pst_time)\n", (4750, 4773), False, 'import time\n'), ((4839, 4862), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4860, 4862), False, 'import datetime\n'), ((6312, 6356), 'PIL.Image.new', 'Image.new', (['"""1"""', '(epd.width, epd.height)', '(255)'], {}), "('1', (epd.width, epd.height), 255)\n", (6321, 6356), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((6687, 6712), 'time.sleep', 'time.sleep', (['sleep_seconds'], {}), '(sleep_seconds)\n', (6697, 6712), False, 'import time\n'), ((6922, 6966), 'PIL.Image.new', 'Image.new', (['"""1"""', '(epd.width, epd.height)', '(255)'], {}), "('1', (epd.width, epd.height), 255)\n", (6931, 6966), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((7009, 7036), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['error_image'], {}), '(error_image)\n', (7023, 7036), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((3169, 3192), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3190, 3192), False, 'import datetime\n'), ((3215, 3238), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3236, 3238), False, 'import datetime\n'), ((3259, 3282), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3280, 3282), False, 'import datetime\n'), ((4508, 4548), 'os.path.join', 'os.path.join', (['fontdir', '"""BAHNSCHRIFT.TTF"""'], {}), "(fontdir, 'BAHNSCHRIFT.TTF')\n", (4520, 4548), False, 'import os\n'), ((5247, 5287), 'time.strptime', 'time.strptime', (['input', '"""%I:%M%p %Y-%m-%d"""'], {}), "(input, '%I:%M%p %Y-%m-%d')\n", (5260, 5287), False, 'import time\n'), ((6419, 6446), 'os.path.join', 'os.path.join', (['picdir', 'image'], {}), '(picdir, image)\n', (6431, 6446), False, 'import os\n'), ((7446, 7484), 'os.path.join', 'os.path.join', (['picdir', 'error_image_file'], {}), '(picdir, error_image_file)\n', (7458, 7484), False, 'import os\n'), ((216, 242), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (232, 242), False, 'import os\n'), ((309, 335), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (325, 335), False, 'import os\n'), ((1244, 1267), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1265, 1267), False, 'import datetime\n'), ((1302, 1325), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1323, 1325), False, 'import datetime\n'), ((1360, 1383), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1381, 1383), False, 'import datetime\n'), ((1805, 1828), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1826, 1828), False, 'import datetime\n'), ((1863, 1886), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1884, 1886), False, 'import datetime\n'), ((1921, 1944), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1942, 1944), False, 'import datetime\n'), ((5047, 5070), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5068, 5070), False, 'import datetime\n'), ((5140, 5163), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5161, 5163), False, 'import datetime\n'), ((7221, 7244), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (7242, 7244), False, 'import datetime\n'), ((808, 828), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (822, 828), False, 'import random\n'), ((890, 910), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (904, 910), False, 'import random\n')] |
import logging
import sys
import classes.iDb as db
# Set Logging Level
logging.basicConfig(level=logging.INFO)
class Friend:
def __init__(self, User, Friend):
self.user_id = User.user_id
self.friend_id = Friend.user_id
pass
def addFriend(self):
pass
def removeFriend(self):
pass
@staticmethod
def getFriends(user_id):
print(user_id)
try:
sql = "SELECT * FROM FRIEND WHERE USER1 = \'" + str(user_id) +'\' OR USER2 = \'' + str(user_id) + "'"
# Calls database with constructed SQL from imported db class
#favs = db.db.callDbFetch(sql)
friends_query_obj = db.dbQuery(sql)
friends = db.dbQuery.callDbFetch(friends_query_obj)
# Log Results of DB call and return results
logging.debug("successful connect to db2")
logging.info("favorites response: " + str(friends))
if friends != [False]:
return friends
else:
return {
"statusCode": 400,
"headers": {"Content-Type": "application/json"},
"body": {"error": str(sql) + str(sys.exc_info())}
}
except:
logging.error("Oops!" + str(sys.exc_info()) + "occured. ")
return {
"statusCode": 400,
"headers": {"Content-Type": "application/json"},
"body": {"error": str(sql) + str(sys.exc_info())}
}
| [
"logging.basicConfig",
"classes.iDb.dbQuery.callDbFetch",
"logging.debug",
"sys.exc_info",
"classes.iDb.dbQuery"
] | [((72, 111), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (91, 111), False, 'import logging\n'), ((708, 723), 'classes.iDb.dbQuery', 'db.dbQuery', (['sql'], {}), '(sql)\n', (718, 723), True, 'import classes.iDb as db\n'), ((746, 787), 'classes.iDb.dbQuery.callDbFetch', 'db.dbQuery.callDbFetch', (['friends_query_obj'], {}), '(friends_query_obj)\n', (768, 787), True, 'import classes.iDb as db\n'), ((857, 899), 'logging.debug', 'logging.debug', (['"""successful connect to db2"""'], {}), "('successful connect to db2')\n", (870, 899), False, 'import logging\n'), ((1223, 1237), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1235, 1237), False, 'import sys\n'), ((1311, 1325), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1323, 1325), False, 'import sys\n'), ((1512, 1526), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1524, 1526), False, 'import sys\n')] |
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from .views import PostListView, PostDetailView
urlpatterns = patterns('',
# URL pattern for the PostListView # noqa
url(
regex=r'^$',
view=PostListView.as_view(),
name='post_list'
),
url(
regex=r'^category/(?P<category>[\w-]+)/$',
view=PostListView.as_view(),
name='category_post_list'
),
url(
regex=r'^tag/(?P<tag>[\w-]+)/$',
view=PostListView.as_view(),
name='tag_post_list'
),
url(
regex=r'^(?P<year>\d{4})/(?P<month>\d{1,2})/$',
view=PostListView.as_view(),
name='archive_post_list'
),
url(
regex=r'^blog/(?P<slug>[\w-]+)/$',
view=PostDetailView.as_view(),
name='post_detail'
),
url(
regex=r'^add/comment/$',
view='post.views.add_comment',
name='add_comment',
),
) | [
"django.conf.urls.url"
] | [((1340, 1418), 'django.conf.urls.url', 'url', ([], {'regex': '"""^add/comment/$"""', 'view': '"""post.views.add_comment"""', 'name': '"""add_comment"""'}), "(regex='^add/comment/$', view='post.views.add_comment', name='add_comment')\n", (1343, 1418), False, 'from django.conf.urls import patterns, url\n')] |
from setuptools import find_packages, setup
setup(
name='django-studentsdb-app',
version='1.0',
author=u'<NAME>',
author_email='<EMAIL>',
packages=find_packages(),
license='BSD licence, see LICENCE.txt',
description='Students DB application',
long_description=open('README.txt').read(),
zip_safe=False,
include_package_data=True,
package_data = {
'students': ['requirements.txt']
},
) | [
"setuptools.find_packages"
] | [((168, 183), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (181, 183), False, 'from setuptools import find_packages, setup\n')] |
#!/usr/bin/env python
import pathlib
import matplotlib.pyplot as plt
import torch
import pyro
from state_space import state_space_model
SEED = 123
torch.manual_seed(SEED)
pyro.set_rng_seed(SEED)
def main():
figdir = pathlib.Path('./figures')
figdir.mkdir(exist_ok=True)
# demo predictive capacity
N = 3
T = 101
# draws from the prior predictive are shape (T, N)
# each draw uses different draws from global drift and vol params
n_prior_draws = 5
prior_predictive = torch.stack(
[state_space_model(None, N=N, T=T) for _ in range(n_prior_draws)]
)
colors = plt.get_cmap('cividis', n_prior_draws)
fig, ax = plt.subplots()
list(map(
lambda i: ax.plot(prior_predictive[i], color=colors(i)),
range(prior_predictive.shape[0])
))
plt.savefig(figdir / 'state_space_prior_predictive.png', bbox_inches='tight')
#######
# as far as inference goes, actually just a diagonal normal should be ok..
data_N = 100
data_T = 50
data = state_space_model(None, N=data_N, T=data_T, verbose=True)
guide = pyro.infer.autoguide.AutoDiagonalNormal(state_space_model)
optim = pyro.optim.Adam({'lr': 0.01})
svi = pyro.infer.SVI(state_space_model, guide, optim, loss=pyro.infer.Trace_ELBO())
niter = 2500 # or whatever, you'll have to play with this and other optim params
pyro.clear_param_store()
losses = torch.empty((niter,))
for n in range(niter):
loss = svi.step(data, N=data_N, T=data_T)
losses[n] = loss
if n % 50 == 0:
print(f"On iteration {n}, loss = {loss}")
# you can extract the latent time series in a variety of ways
# one of these is the pyro.infer.Predictive class
num_samples = 100
posterior_predictive = pyro.infer.Predictive(
state_space_model,
guide=guide,
num_samples=num_samples
)
posterior_draws = posterior_predictive(None, N=data_N, T=data_T)
# since our model returns the latent, we should have this in the `latent` value
print(
posterior_draws['latent'].squeeze().shape == (num_samples, data_T, data_N)
)
if __name__ == "__main__":
main()
| [
"torch.manual_seed",
"matplotlib.pyplot.savefig",
"pyro.clear_param_store",
"pathlib.Path",
"pyro.infer.autoguide.AutoDiagonalNormal",
"pyro.infer.Trace_ELBO",
"pyro.optim.Adam",
"pyro.set_rng_seed",
"pyro.infer.Predictive",
"state_space.state_space_model",
"torch.empty",
"matplotlib.pyplot.su... | [((152, 175), 'torch.manual_seed', 'torch.manual_seed', (['SEED'], {}), '(SEED)\n', (169, 175), False, 'import torch\n'), ((176, 199), 'pyro.set_rng_seed', 'pyro.set_rng_seed', (['SEED'], {}), '(SEED)\n', (193, 199), False, 'import pyro\n'), ((227, 252), 'pathlib.Path', 'pathlib.Path', (['"""./figures"""'], {}), "('./figures')\n", (239, 252), False, 'import pathlib\n'), ((621, 659), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""cividis"""', 'n_prior_draws'], {}), "('cividis', n_prior_draws)\n", (633, 659), True, 'import matplotlib.pyplot as plt\n'), ((674, 688), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (686, 688), True, 'import matplotlib.pyplot as plt\n'), ((821, 898), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(figdir / 'state_space_prior_predictive.png')"], {'bbox_inches': '"""tight"""'}), "(figdir / 'state_space_prior_predictive.png', bbox_inches='tight')\n", (832, 898), True, 'import matplotlib.pyplot as plt\n'), ((1036, 1093), 'state_space.state_space_model', 'state_space_model', (['None'], {'N': 'data_N', 'T': 'data_T', 'verbose': '(True)'}), '(None, N=data_N, T=data_T, verbose=True)\n', (1053, 1093), False, 'from state_space import state_space_model\n'), ((1106, 1164), 'pyro.infer.autoguide.AutoDiagonalNormal', 'pyro.infer.autoguide.AutoDiagonalNormal', (['state_space_model'], {}), '(state_space_model)\n', (1145, 1164), False, 'import pyro\n'), ((1177, 1206), 'pyro.optim.Adam', 'pyro.optim.Adam', (["{'lr': 0.01}"], {}), "({'lr': 0.01})\n", (1192, 1206), False, 'import pyro\n'), ((1386, 1410), 'pyro.clear_param_store', 'pyro.clear_param_store', ([], {}), '()\n', (1408, 1410), False, 'import pyro\n'), ((1424, 1445), 'torch.empty', 'torch.empty', (['(niter,)'], {}), '((niter,))\n', (1435, 1445), False, 'import torch\n'), ((1798, 1876), 'pyro.infer.Predictive', 'pyro.infer.Predictive', (['state_space_model'], {'guide': 'guide', 'num_samples': 'num_samples'}), '(state_space_model, guide=guide, num_samples=num_samples)\n', (1819, 1876), False, 'import pyro\n'), ((532, 565), 'state_space.state_space_model', 'state_space_model', (['None'], {'N': 'N', 'T': 'T'}), '(None, N=N, T=T)\n', (549, 565), False, 'from state_space import state_space_model\n'), ((1270, 1293), 'pyro.infer.Trace_ELBO', 'pyro.infer.Trace_ELBO', ([], {}), '()\n', (1291, 1293), False, 'import pyro\n')] |
#!/usr/bin/env python3
# import additional code to complete our task
import shutil
import os
# move into the working directory
os.chdir("/home/student/mycode/")
# copy the fileA to fileB
shutil.copy("5g_research/sdn_network.txt", "5g_research/sdn_network.txt.copy")
# copy the entire directoryA to directoryB
shutil.copytree("5g_research/", "5g_research_backup/")
| [
"os.chdir",
"shutil.copytree",
"shutil.copy"
] | [((129, 162), 'os.chdir', 'os.chdir', (['"""/home/student/mycode/"""'], {}), "('/home/student/mycode/')\n", (137, 162), False, 'import os\n'), ((190, 268), 'shutil.copy', 'shutil.copy', (['"""5g_research/sdn_network.txt"""', '"""5g_research/sdn_network.txt.copy"""'], {}), "('5g_research/sdn_network.txt', '5g_research/sdn_network.txt.copy')\n", (201, 268), False, 'import shutil\n'), ((313, 367), 'shutil.copytree', 'shutil.copytree', (['"""5g_research/"""', '"""5g_research_backup/"""'], {}), "('5g_research/', '5g_research_backup/')\n", (328, 367), False, 'import shutil\n')] |
from typing import Any, List, Mapping, Sequence
import jsonschema
from dataclasses import dataclass, field
from sqlalchemy.orm import scoped_session
from vardb.datamodel.jsonschemas.load_schema import load_schema
from vardb.datamodel import annotation
@dataclass
class ConverterConfig:
elements: Sequence[Mapping[str, Any]]
@dataclass(init=False)
class AnnotationImportConfig:
name: str
converter_config: ConverterConfig
def __init__(self, name: str, converter_config: Mapping[str, Any]) -> None:
self.name = name
self.converter_config = ConverterConfig(**converter_config)
@dataclass(init=False)
class AnnotationConfig:
deposit: Sequence[AnnotationImportConfig]
view: List = field(default_factory=list)
def __init__(self, deposit: Sequence[Mapping[str, Any]], view: List) -> None:
self.view = view
self.deposit = list()
for sub_conf in deposit:
self.deposit.append(AnnotationImportConfig(**sub_conf))
def deposit_annotationconfig(
session: scoped_session, annotationconfig: Mapping[str, Any]
) -> annotation.AnnotationConfig:
schema = load_schema("annotationconfig.json")
jsonschema.validate(annotationconfig, schema)
active_annotationconfig = (
session.query(annotation.AnnotationConfig)
.order_by(annotation.AnnotationConfig.id.desc())
.limit(1)
.one_or_none()
)
# Check if annotation config is equal. Note that for deposit, we do not care about order or duplicity
# Since the deposit is a list of dicts, we can not check set-equality (dicts are not hashable),
# so we check that all items in incoming are in active, and vice versa.
if (
active_annotationconfig
and all(x in active_annotationconfig.deposit for x in annotationconfig["deposit"])
and all(x in annotationconfig["deposit"] for x in active_annotationconfig.deposit)
and active_annotationconfig.view == annotationconfig["view"]
):
raise RuntimeError("The annotation config matches the current active annotation config.")
ac_obj = annotation.AnnotationConfig(
deposit=annotationconfig["deposit"], view=annotationconfig["view"]
)
session.add(ac_obj)
session.flush()
return ac_obj
| [
"vardb.datamodel.annotation.AnnotationConfig",
"dataclasses.dataclass",
"vardb.datamodel.annotation.AnnotationConfig.id.desc",
"vardb.datamodel.jsonschemas.load_schema.load_schema",
"jsonschema.validate",
"dataclasses.field"
] | [((333, 354), 'dataclasses.dataclass', 'dataclass', ([], {'init': '(False)'}), '(init=False)\n', (342, 354), False, 'from dataclasses import dataclass, field\n'), ((614, 635), 'dataclasses.dataclass', 'dataclass', ([], {'init': '(False)'}), '(init=False)\n', (623, 635), False, 'from dataclasses import dataclass, field\n'), ((723, 750), 'dataclasses.field', 'field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (728, 750), False, 'from dataclasses import dataclass, field\n'), ((1134, 1170), 'vardb.datamodel.jsonschemas.load_schema.load_schema', 'load_schema', (['"""annotationconfig.json"""'], {}), "('annotationconfig.json')\n", (1145, 1170), False, 'from vardb.datamodel.jsonschemas.load_schema import load_schema\n'), ((1175, 1220), 'jsonschema.validate', 'jsonschema.validate', (['annotationconfig', 'schema'], {}), '(annotationconfig, schema)\n', (1194, 1220), False, 'import jsonschema\n'), ((2103, 2203), 'vardb.datamodel.annotation.AnnotationConfig', 'annotation.AnnotationConfig', ([], {'deposit': "annotationconfig['deposit']", 'view': "annotationconfig['view']"}), "(deposit=annotationconfig['deposit'], view=\n annotationconfig['view'])\n", (2130, 2203), False, 'from vardb.datamodel import annotation\n'), ((1323, 1360), 'vardb.datamodel.annotation.AnnotationConfig.id.desc', 'annotation.AnnotationConfig.id.desc', ([], {}), '()\n', (1358, 1360), False, 'from vardb.datamodel import annotation\n')] |
#!/usr/bin/env python
# coding: utf-8
import pandas as pd
def get_age(name):
df = pd.read_excel("test.xlsx", sheet_name="Sheet1", headers=True)
print("*"*20)
print(df)
print("*"*20)
rows, cols = df[df['Name']==name].shape
print(rows, cols, "^^^")
if rows==1:
age = df[df['Name']==name]['Age'][0]
return age
else:
return -1
def get_all_above_age(age):
df = pd.read_excel("test.xlsx", sheet_name="Sheet1", headers=True)
df_filter = df[(df['Profession'].str.contains("Developer")) & (df['Age']>age)]
print("&&&&&&&&&&&&&&&&")
print(df_filter)
print("&&&&&&&&&&&&&&&&")
return df_filter.to_json(orient='records')
def get_2_values(name):
return (name,str(get_age(name)))
if __name__=='__main__':
print(get_age("Ram"))
print(get_all_above_age(25))
print(get_2_values("Ram"))
| [
"pandas.read_excel"
] | [((89, 150), 'pandas.read_excel', 'pd.read_excel', (['"""test.xlsx"""'], {'sheet_name': '"""Sheet1"""', 'headers': '(True)'}), "('test.xlsx', sheet_name='Sheet1', headers=True)\n", (102, 150), True, 'import pandas as pd\n'), ((429, 490), 'pandas.read_excel', 'pd.read_excel', (['"""test.xlsx"""'], {'sheet_name': '"""Sheet1"""', 'headers': '(True)'}), "('test.xlsx', sheet_name='Sheet1', headers=True)\n", (442, 490), True, 'import pandas as pd\n')] |
import tensorflow as tf
import numpy as np
import cv2
import os
import rospy
from timeit import default_timer as timer
from styx_msgs.msg import TrafficLight
CLASS_TRAFFIC_LIGHT = 10
MODEL_DIR = 'light_classification/models/'
IMG_DIR = 'light_classification/img/'
DEBUG_DIR = 'light_classification/result/'
class TLClassifier(object):
def __init__(self):
#TODO load classifier
# object detection: faster_rcnn_inception_v2
# from Tensorflow detection model zoo:
# https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md
self.detector = MODEL_DIR + 'faster_rcnn_inception_v2.pb'
self.sess= self.load_graph(self.detector)
detection_graph = self.sess.graph
if not os.path.exists(DEBUG_DIR): #check the result of light detection
os.makedirs(DEBUG_DIR)
# The input placeholder for the image.
# 'get_tensor_by_name' returns the Tensor with the associated name in the Graph.
self.image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
self.detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
self.detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
self.detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
# the first decoding
test_image = cv2.imread(IMG_DIR + 'image_test.jpg')
image_np, box_coords, classes, scores = self.detect_tl(test_image)
# Traditional traffic light classifier
pred_image, is_red = self.classify_red_tl(image_np, box_coords, classes, scores)
# rospy.loginfo("DEBUG: stage 4")
if is_red:
rospy.loginfo("Classifier: RED")
else:
rospy.loginfo("Classifier: NOT RED")
cv2.imwrite(IMG_DIR + 'pred_image.png', pred_image)
rospy.loginfo("TensorFlow Initiation: Done")
self.num_image = 1
def load_graph(self, graph_file, use_xla=False):
config = tf.ConfigProto(log_device_placement=False)
config.gpu_options.allow_growth = True
session = tf.Session(config=config)
# if use_xla:
# jit_level = tf.OptimizerOptions.ON_1
# config.graph_options.optimizer_options.global_jit_level = jit_level
with tf.Session(graph=tf.Graph(), config=config) as sess:
gd = tf.GraphDef()
with tf.gfile.Open(graph_file, 'rb') as f:
data = f.read()
gd.ParseFromString(data)
tf.import_graph_def(gd, name='')
ops = sess.graph.get_operations()
n_ops = len(ops)
print("number of operations = %d" % n_ops)
return sess
# return sess, ops
def detect_tl(self, image):
trt_image = np.copy(image)
image_np = np.expand_dims(np.asarray(trt_image, dtype=np.uint8), 0)
# Actual detection.
(boxes, scores, classes) = self.sess.run([self.detection_boxes, self.detection_scores, self.detection_classes],
feed_dict={self.image_tensor: image_np})
# Remove unnecessary dimensions
boxes = np.squeeze(boxes)
scores = np.squeeze(scores)
classes = np.squeeze(classes)
confidence_cutoff = 0.8
# Filter traffic light boxes with a confidence score less than `confidence_cutoff`
boxes, scores, classes = self.filter_boxes(confidence_cutoff, boxes, scores, classes, keep_classes=[CLASS_TRAFFIC_LIGHT])
# The current box coordinates are normalized to a range between 0 and 1.
# This converts the coordinates actual location on the image.
image_np = np.squeeze(image_np)
width = image_np.shape[1]
height = image_np.shape[0]
box_coords = self.to_image_coords(boxes, height, width)
return image_np, box_coords, classes, scores
# Filter the boxes which detection confidence lower than the threshold
def filter_boxes(self, min_score, boxes, scores, classes, keep_classes):
n = len(classes)
idxs = []
for i in range(n):
if scores[i] >= min_score:
if ((keep_classes is None) or (int(classes[i]) in keep_classes)):
idxs.append(i)
filtered_boxes = boxes[idxs, ...]
filtered_scores = scores[idxs, ...]
filtered_classes = classes[idxs, ...]
return filtered_boxes, filtered_scores, filtered_classes
# Convert the normalized box coordinates (0~1) to image coordinates
def to_image_coords(self, boxes, height, width):
box_coords = np.zeros_like(boxes)
box_coords[:, 0] = boxes[:, 0] * height
box_coords[:, 1] = boxes[:, 1] * width
box_coords[:, 2] = boxes[:, 2] * height
box_coords[:, 3] = boxes[:, 3] * width
return box_coords
#Draw bounding box on traffic light, and detect if it is RED
def classify_red_tl(self, image_np, boxes, classes, scores, thickness=5):
for i in range(len(boxes)):
# rospy.loginfo("DEBUG: stage 3.1")
bot, left, top, right = boxes[i, ...]
class_id = int(classes[i])
score = scores[i]
h = top - bot
w = right - left
if h <= 1.5 * w:
continue # Truncated Traffic Ligth box
cv2.rectangle(image_np,(left, top), (right, bot), (255, 43, 255), thickness) # BGR format for color
tl_img = image_np[int(bot):int(top), int(left):int(right)]
tl_img_simu = self.select_red_simu(tl_img) # SELECT RED
tl_img_real = self.select_lighton_real(tl_img) # SELECT TL
tl_img = (tl_img_simu + tl_img_real) / 2
gray_tl_img = cv2.cvtColor(tl_img, cv2.COLOR_RGB2GRAY)
nrows, ncols = gray_tl_img.shape[0], gray_tl_img.shape[1]
# compute center of mass of RED points
mean_row = 0
mean_col = 0
npoints = 0
for row in range(nrows):
for col in range(ncols):
if (gray_tl_img[row, col] > 0):
mean_row += row
mean_col += col
npoints += 1
if npoints > 0:
mean_row = float(mean_row / npoints) / nrows
mean_col = float(mean_col / npoints) / ncols
# Get the normalized center of mass of RED points
# Use the location of light to detect the color, RED is in the upper part of the box
if npoints > 10 and mean_row < 0.33:
rospy.loginfo("RED Light Detection Confidance: %.2f", score)
return image_np, True
return image_np, False
# select RED mask in simulation situation
def select_red_simu(self, img): # BGR
lower = np.array([ 0, 0, 200], dtype="uint8")
upper = np.array([ 55, 55, 255], dtype="uint8")
red_mask = cv2.inRange(img, lower, upper)
return cv2.bitwise_and(img, img, mask = red_mask)
# select Traffic Lighton area(HLS: high L and high S) in real situation
# for camera without polarization filter
def select_lighton_real(self, img): # HLS for real
hls_img = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)
lower = np.array([ 50, 150, 150], dtype="uint8")
upper = np.array([ 100, 255, 255], dtype="uint8")
tl_mask = cv2.inRange(hls_img, lower, upper)
return cv2.bitwise_and(img, img, mask = tl_mask)
def get_classification(self, image):
"""Determines the color of the traffic light in the image
Args:
image (cv::Mat): image containing the traffic light
Returns:
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
"""
#implement light color prediction
image_np, box_coords, classes, scores = self.detect_tl(image)
# light color detection
detected_image, is_red = self.classify_red_tl(image_np, box_coords, classes, scores)
# fimage = DEBUG_DIR + 'detected_img_' + str(self.num_image) + '.png'
# #output the predicted image
# cv2.imwrite(fimage, detected_image)
self.num_image += 1
#return 'if it is a RED'
if is_red:
return TrafficLight.RED
else:
return TrafficLight.UNKNOWN
| [
"cv2.rectangle",
"numpy.array",
"os.path.exists",
"tensorflow.Graph",
"tensorflow.Session",
"numpy.asarray",
"tensorflow.GraphDef",
"tensorflow.ConfigProto",
"numpy.squeeze",
"cv2.cvtColor",
"tensorflow.import_graph_def",
"cv2.imread",
"rospy.loginfo",
"cv2.imwrite",
"numpy.copy",
"ten... | [((1463, 1501), 'cv2.imread', 'cv2.imread', (["(IMG_DIR + 'image_test.jpg')"], {}), "(IMG_DIR + 'image_test.jpg')\n", (1473, 1501), False, 'import cv2\n'), ((1908, 1959), 'cv2.imwrite', 'cv2.imwrite', (["(IMG_DIR + 'pred_image.png')", 'pred_image'], {}), "(IMG_DIR + 'pred_image.png', pred_image)\n", (1919, 1959), False, 'import cv2\n'), ((1968, 2012), 'rospy.loginfo', 'rospy.loginfo', (['"""TensorFlow Initiation: Done"""'], {}), "('TensorFlow Initiation: Done')\n", (1981, 2012), False, 'import rospy\n'), ((2129, 2171), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {'log_device_placement': '(False)'}), '(log_device_placement=False)\n', (2143, 2171), True, 'import tensorflow as tf\n'), ((2237, 2262), 'tensorflow.Session', 'tf.Session', ([], {'config': 'config'}), '(config=config)\n', (2247, 2262), True, 'import tensorflow as tf\n'), ((2928, 2942), 'numpy.copy', 'np.copy', (['image'], {}), '(image)\n', (2935, 2942), True, 'import numpy as np\n'), ((3328, 3345), 'numpy.squeeze', 'np.squeeze', (['boxes'], {}), '(boxes)\n', (3338, 3345), True, 'import numpy as np\n'), ((3363, 3381), 'numpy.squeeze', 'np.squeeze', (['scores'], {}), '(scores)\n', (3373, 3381), True, 'import numpy as np\n'), ((3400, 3419), 'numpy.squeeze', 'np.squeeze', (['classes'], {}), '(classes)\n', (3410, 3419), True, 'import numpy as np\n'), ((3853, 3873), 'numpy.squeeze', 'np.squeeze', (['image_np'], {}), '(image_np)\n', (3863, 3873), True, 'import numpy as np\n'), ((4812, 4832), 'numpy.zeros_like', 'np.zeros_like', (['boxes'], {}), '(boxes)\n', (4825, 4832), True, 'import numpy as np\n'), ((7078, 7114), 'numpy.array', 'np.array', (['[0, 0, 200]'], {'dtype': '"""uint8"""'}), "([0, 0, 200], dtype='uint8')\n", (7086, 7114), True, 'import numpy as np\n'), ((7134, 7172), 'numpy.array', 'np.array', (['[55, 55, 255]'], {'dtype': '"""uint8"""'}), "([55, 55, 255], dtype='uint8')\n", (7142, 7172), True, 'import numpy as np\n'), ((7193, 7223), 'cv2.inRange', 'cv2.inRange', (['img', 'lower', 'upper'], {}), '(img, lower, upper)\n', (7204, 7223), False, 'import cv2\n'), ((7239, 7279), 'cv2.bitwise_and', 'cv2.bitwise_and', (['img', 'img'], {'mask': 'red_mask'}), '(img, img, mask=red_mask)\n', (7254, 7279), False, 'import cv2\n'), ((7469, 7505), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_RGB2HLS'], {}), '(img, cv2.COLOR_RGB2HLS)\n', (7481, 7505), False, 'import cv2\n'), ((7522, 7561), 'numpy.array', 'np.array', (['[50, 150, 150]'], {'dtype': '"""uint8"""'}), "([50, 150, 150], dtype='uint8')\n", (7530, 7561), True, 'import numpy as np\n'), ((7581, 7621), 'numpy.array', 'np.array', (['[100, 255, 255]'], {'dtype': '"""uint8"""'}), "([100, 255, 255], dtype='uint8')\n", (7589, 7621), True, 'import numpy as np\n'), ((7641, 7675), 'cv2.inRange', 'cv2.inRange', (['hls_img', 'lower', 'upper'], {}), '(hls_img, lower, upper)\n', (7652, 7675), False, 'import cv2\n'), ((7691, 7730), 'cv2.bitwise_and', 'cv2.bitwise_and', (['img', 'img'], {'mask': 'tl_mask'}), '(img, img, mask=tl_mask)\n', (7706, 7730), False, 'import cv2\n'), ((820, 845), 'os.path.exists', 'os.path.exists', (['DEBUG_DIR'], {}), '(DEBUG_DIR)\n', (834, 845), False, 'import os\n'), ((896, 918), 'os.makedirs', 'os.makedirs', (['DEBUG_DIR'], {}), '(DEBUG_DIR)\n', (907, 918), False, 'import os\n'), ((1786, 1818), 'rospy.loginfo', 'rospy.loginfo', (['"""Classifier: RED"""'], {}), "('Classifier: RED')\n", (1799, 1818), False, 'import rospy\n'), ((1845, 1881), 'rospy.loginfo', 'rospy.loginfo', (['"""Classifier: NOT RED"""'], {}), "('Classifier: NOT RED')\n", (1858, 1881), False, 'import rospy\n'), ((2501, 2514), 'tensorflow.GraphDef', 'tf.GraphDef', ([], {}), '()\n', (2512, 2514), True, 'import tensorflow as tf\n'), ((2655, 2687), 'tensorflow.import_graph_def', 'tf.import_graph_def', (['gd'], {'name': '""""""'}), "(gd, name='')\n", (2674, 2687), True, 'import tensorflow as tf\n'), ((2977, 3014), 'numpy.asarray', 'np.asarray', (['trt_image'], {'dtype': 'np.uint8'}), '(trt_image, dtype=np.uint8)\n', (2987, 3014), True, 'import numpy as np\n'), ((5567, 5644), 'cv2.rectangle', 'cv2.rectangle', (['image_np', '(left, top)', '(right, bot)', '(255, 43, 255)', 'thickness'], {}), '(image_np, (left, top), (right, bot), (255, 43, 255), thickness)\n', (5580, 5644), False, 'import cv2\n'), ((5958, 5998), 'cv2.cvtColor', 'cv2.cvtColor', (['tl_img', 'cv2.COLOR_RGB2GRAY'], {}), '(tl_img, cv2.COLOR_RGB2GRAY)\n', (5970, 5998), False, 'import cv2\n'), ((2532, 2563), 'tensorflow.gfile.Open', 'tf.gfile.Open', (['graph_file', '"""rb"""'], {}), "(graph_file, 'rb')\n", (2545, 2563), True, 'import tensorflow as tf\n'), ((2448, 2458), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (2456, 2458), True, 'import tensorflow as tf\n'), ((6822, 6882), 'rospy.loginfo', 'rospy.loginfo', (['"""RED Light Detection Confidance: %.2f"""', 'score'], {}), "('RED Light Detection Confidance: %.2f', score)\n", (6835, 6882), False, 'import rospy\n')] |
from api import app
if __name__ == '__main__':
with open('urls.json', 'w') as fj:
fj.write('')
app.run() | [
"api.app.run"
] | [((112, 121), 'api.app.run', 'app.run', ([], {}), '()\n', (119, 121), False, 'from api import app\n')] |
# -*- coding: utf-8 -*-
"""
@author: <NAME>, <NAME>, <NAME>, <NAME>
"""
from Processor.Processor import Processor
from WebScrapper.Scrapper import Scrapper
import json
import os
print("The Data is being scrapped please wait!!!!!!!!!!")
start=0
flag = 1
scrap = Scrapper()
p = Processor()
print("Creating your Visualization Please Wait.........")
p.createOrientedReviewsMap()
p.summarize()
p.removeFeaturesWithNoReview()
p.separatePositiveAndNegative()
if os.path.exists("finalOrientation.txt"):
os.remove("finalOrientation.txt")
f = open("finalOrientation.txt", "a")
f.write(str(p.finalOrientation))
f.close()
if os.path.exists("OrientedReviews.txt"):
os.remove("OrientedReviews.txt")
f = open("OrientedReviews.txt", "a")
f.write(str(p.orientedReviews))
f.close()
from Visualization.Featuresandvisual import Visualization
vis = Visualization()
| [
"os.path.exists",
"WebScrapper.Scrapper.Scrapper",
"Visualization.Featuresandvisual.Visualization",
"Processor.Processor.Processor",
"os.remove"
] | [((278, 288), 'WebScrapper.Scrapper.Scrapper', 'Scrapper', ([], {}), '()\n', (286, 288), False, 'from WebScrapper.Scrapper import Scrapper\n'), ((294, 305), 'Processor.Processor.Processor', 'Processor', ([], {}), '()\n', (303, 305), False, 'from Processor.Processor import Processor\n'), ((481, 519), 'os.path.exists', 'os.path.exists', (['"""finalOrientation.txt"""'], {}), "('finalOrientation.txt')\n", (495, 519), False, 'import os\n'), ((648, 685), 'os.path.exists', 'os.path.exists', (['"""OrientedReviews.txt"""'], {}), "('OrientedReviews.txt')\n", (662, 685), False, 'import os\n'), ((876, 891), 'Visualization.Featuresandvisual.Visualization', 'Visualization', ([], {}), '()\n', (889, 891), False, 'from Visualization.Featuresandvisual import Visualization\n'), ((526, 559), 'os.remove', 'os.remove', (['"""finalOrientation.txt"""'], {}), "('finalOrientation.txt')\n", (535, 559), False, 'import os\n'), ((692, 724), 'os.remove', 'os.remove', (['"""OrientedReviews.txt"""'], {}), "('OrientedReviews.txt')\n", (701, 724), False, 'import os\n')] |
# ========================= eCAL LICENSE =================================
#
# Copyright (C) 2016 - 2019 Continental Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ========================= eCAL LICENSE =================================
import sys
import time
import ecal.core.core as ecal_core
import ecal.core.service as ecal_service
def main():
# print eCAL version and date
print("eCAL {} ({})\n".format(ecal_core.getversion(), ecal_core.getdate()))
# initialize eCAL API
ecal_core.initialize(sys.argv, "py_minimal_service_client")
# set process state
ecal_core.set_process_state(1, 1, "I feel good")
# create a client for the "DemoService" service
client = ecal_service.Client("DemoService")
# define the client response callback to catch server responses
def client_resp_callback(service_info, response):
if (service_info["call_state"] == "call_state_executed"):
print("'DemoService' method '{}' responded : '{}'".format(service_info["method_name"], response))
print()
else:
print("server {} response failed, error : '{}'".format(service_info["host_name"], service_info["error_msg"]))
print()
# and add it to the client
client.add_response_callback(client_resp_callback)
# idle and call service methods
i = 0
while(ecal_core.ok()):
i = i + 1
# call foo
request = bytes("hello foo {}".format(i), "ascii")
print("'DemoService' method 'foo' requested with : {}".format(request))
client.call_method("foo", request)
time.sleep(0.5)
# call ping
request = bytes("ping number {}".format(i), "ascii")
print("'DemoService' method 'ping' requested with : {}".format(request))
client.call_method("ping", request)
time.sleep(0.5)
# destroy client
client.destroy()
# finalize eCAL API
ecal_core.finalize()
if __name__ == "__main__":
main()
| [
"ecal.core.core.finalize",
"ecal.core.core.initialize",
"time.sleep",
"ecal.core.core.getversion",
"ecal.core.core.getdate",
"ecal.core.core.ok",
"ecal.core.service.Client",
"ecal.core.core.set_process_state"
] | [((1008, 1067), 'ecal.core.core.initialize', 'ecal_core.initialize', (['sys.argv', '"""py_minimal_service_client"""'], {}), "(sys.argv, 'py_minimal_service_client')\n", (1028, 1067), True, 'import ecal.core.core as ecal_core\n'), ((1095, 1143), 'ecal.core.core.set_process_state', 'ecal_core.set_process_state', (['(1)', '(1)', '"""I feel good"""'], {}), "(1, 1, 'I feel good')\n", (1122, 1143), True, 'import ecal.core.core as ecal_core\n'), ((1206, 1240), 'ecal.core.service.Client', 'ecal_service.Client', (['"""DemoService"""'], {}), "('DemoService')\n", (1225, 1240), True, 'import ecal.core.service as ecal_service\n'), ((1814, 1828), 'ecal.core.core.ok', 'ecal_core.ok', ([], {}), '()\n', (1826, 1828), True, 'import ecal.core.core as ecal_core\n'), ((2326, 2346), 'ecal.core.core.finalize', 'ecal_core.finalize', ([], {}), '()\n', (2344, 2346), True, 'import ecal.core.core as ecal_core\n'), ((2034, 2049), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (2044, 2049), False, 'import time\n'), ((2244, 2259), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (2254, 2259), False, 'import time\n'), ((933, 955), 'ecal.core.core.getversion', 'ecal_core.getversion', ([], {}), '()\n', (953, 955), True, 'import ecal.core.core as ecal_core\n'), ((957, 976), 'ecal.core.core.getdate', 'ecal_core.getdate', ([], {}), '()\n', (974, 976), True, 'import ecal.core.core as ecal_core\n')] |
"""Created on Sat Oct 01 2015 16:24.
@author: <NAME>
"""
import numpy as np
def coe2mee(COE, mu=1.):
"""
Convert classical orbital elements to modified equinoctial elements.
Parameters
----------
COE : ndarray
mx6 array of elements ordered as [p e i W w nu].
mu : float
Standard gravitational parameter. Defaults to canonical units.
Returns
-------
MEE : ndarray
mx6 array of elements ordered as [p f g h k L].
"""
p = COE[0:, 0:1]
e = COE[0:, 1:2]
i = COE[0:, 2:3]
W = COE[0:, 3:4]
w = COE[0:, 4:5]
nu = COE[0:, 5:6]
# x,y components of eccentricity vector
f = e * np.cos(w + W)
g = e * np.sin(w + W)
# x,y components of ascending node vector
h = np.tan(i/2.) * np.cos(W)
k = np.tan(i/2.) * np.sin(W)
# true longitude
L = np.mod(W+w+nu, 2*np.pi)
return np.concatenate((p, f, g, h, k, L), 1)
| [
"numpy.tan",
"numpy.cos",
"numpy.concatenate",
"numpy.sin",
"numpy.mod"
] | [((852, 881), 'numpy.mod', 'np.mod', (['(W + w + nu)', '(2 * np.pi)'], {}), '(W + w + nu, 2 * np.pi)\n', (858, 881), True, 'import numpy as np\n'), ((888, 925), 'numpy.concatenate', 'np.concatenate', (['(p, f, g, h, k, L)', '(1)'], {}), '((p, f, g, h, k, L), 1)\n', (902, 925), True, 'import numpy as np\n'), ((669, 682), 'numpy.cos', 'np.cos', (['(w + W)'], {}), '(w + W)\n', (675, 682), True, 'import numpy as np\n'), ((695, 708), 'numpy.sin', 'np.sin', (['(w + W)'], {}), '(w + W)\n', (701, 708), True, 'import numpy as np\n'), ((764, 779), 'numpy.tan', 'np.tan', (['(i / 2.0)'], {}), '(i / 2.0)\n', (770, 779), True, 'import numpy as np\n'), ((779, 788), 'numpy.cos', 'np.cos', (['W'], {}), '(W)\n', (785, 788), True, 'import numpy as np\n'), ((797, 812), 'numpy.tan', 'np.tan', (['(i / 2.0)'], {}), '(i / 2.0)\n', (803, 812), True, 'import numpy as np\n'), ((812, 821), 'numpy.sin', 'np.sin', (['W'], {}), '(W)\n', (818, 821), True, 'import numpy as np\n')] |
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
"""
Use the provided metadata generator if you wish to support OPTIONS requests on
list url of resources that support bulk operations. The only difference from
the generator provided by REST Framework is that it does not try to check
object permissions when the request would be bulk update.
To use the class, add this to your settings:
REST_FRAMEWORK = {
'DEFAULT_METADATA_CLASS': 'contrib.bulk_operations.metadata.BulkMetadata'
}
"""
from django.core.exceptions import PermissionDenied
from django.http import Http404
from rest_framework import exceptions
from rest_framework import metadata
from rest_framework.request import clone_request
class BulkMetadata(metadata.SimpleMetadata):
"""
Simple wrapper around `SimpleMetadata` provided by REST Framework. This
class can handle views supporting bulk operations by not checking object
permissions on list URL.
"""
def determine_actions(self, request, view):
"""
For generic class based views we return information about the fields
that are accepted for 'PUT' and 'POST' methods.
This method expects that `get_object` may actually fail and gracefully
handles it.
Most of the code in this method is copied from the parent class.
"""
actions = {}
for method in set(['PUT', 'POST']) & set(view.allowed_methods):
view.request = clone_request(request, method)
try:
# Test global permissions
if hasattr(view, 'check_permissions'):
view.check_permissions(view.request)
# Test object permissions. This will fail on list url for
# resources supporting bulk operations. In such case
# permissions are not checked.
if method == 'PUT' and hasattr(view, 'get_object'):
try:
view.get_object()
except (AssertionError, KeyError):
pass
except (exceptions.APIException, PermissionDenied, Http404):
pass
else:
# If user has appropriate permissions for the view, include
# appropriate metadata about the fields that should be supplied.
serializer = view.get_serializer()
actions[method] = self.get_serializer_info(serializer)
finally:
view.request = request
return actions
| [
"rest_framework.request.clone_request"
] | [((1516, 1546), 'rest_framework.request.clone_request', 'clone_request', (['request', 'method'], {}), '(request, method)\n', (1529, 1546), False, 'from rest_framework.request import clone_request\n')] |
from django import template
register = template.Library()
@register.inclusion_tag('templatetags/form_field.html')
def show_form_field(field, icon=False):
return {'field': field, 'icon': icon}
@register.inclusion_tag('templatetags/learning_resource.html')
def show_resource(resource):
return {'resource': resource} | [
"django.template.Library"
] | [((40, 58), 'django.template.Library', 'template.Library', ([], {}), '()\n', (56, 58), False, 'from django import template\n')] |
import random
from music_theory.note import Note
class Mode:
def __init__(self):
self._modes = [
# Major oriented
"Ionian",
"Dorian",
"Phrygian",
"Lydian",
"Mixo",
"Aeolian",
"Locrian",
# Melodic minor oriented
"Jazz minor",
"Dorian b2",
"Lydian aug",
"Lydian dom",
"Mixo b6",
"Half dim",
"Altered"
]
def get_mode_list(self) -> []:
return self._modes
def get_random_mode(self) -> str:
random_note = Note().get_random_note()
random_mode = self.get_random_mode_type()
return random_note + " " + random_mode
def get_random_mode_type(self) -> str:
i = random.randint(0, len(self._modes) - 1)
return self._modes[i]
def get_random_modes(self, count: int, with_note=True) -> []:
output = []
note_obj = Note()
for c in range(count):
if with_note:
random_note = note_obj.get_random_note() + " "
else:
random_note = ""
random_mode = self.get_random_mode_type()
random_result = random_note + random_mode
output.append(random_result)
return output
| [
"music_theory.note.Note"
] | [((996, 1002), 'music_theory.note.Note', 'Note', ([], {}), '()\n', (1000, 1002), False, 'from music_theory.note import Note\n'), ((641, 647), 'music_theory.note.Note', 'Note', ([], {}), '()\n', (645, 647), False, 'from music_theory.note import Note\n')] |
import torch
from src import config, models
from src.models import WGANGPGModel, WGANGPDModel
from src.datasets import PositiveDataset
from ._base import Base
class WGANGP(Base):
def __init__(self):
super().__init__(WGANGPGModel(), WGANGPDModel())
def _fit(self):
d_optimizer = torch.optim.Adam(
params=self.d.parameters(),
lr=config.gan.d_lr,
betas=(0.5, 0.999),
)
g_optimizer = torch.optim.Adam(
params=self.g.parameters(),
lr=config.gan.g_lr,
betas=(0.5, 0.999),
)
x = PositiveDataset()[:][0].to(config.device)
for _ in range(config.gan.epochs):
for __ in range(config.gan.d_loops):
self.d.zero_grad()
prediction_real = self.d(x)
loss_real = - prediction_real.mean()
z = torch.randn(len(x), models.z_size, device=config.device)
fake_x = self.g(z).detach()
prediction_fake = self.d(fake_x)
loss_fake = prediction_fake.mean()
gradient_penalty = self._cal_gradient_penalty(x, fake_x)
loss = loss_real + loss_fake + gradient_penalty
loss.backward()
d_optimizer.step()
for __ in range(config.gan.g_loops):
self.g.zero_grad()
z = torch.randn(len(x), models.z_size, device=config.device)
fake_x = self.g(z)
prediction = self.d(fake_x)
loss = - prediction.mean()
loss.backward()
g_optimizer.step()
def _cal_gradient_penalty(
self,
x: torch.Tensor,
fake_x: torch.Tensor,
) -> torch.Tensor:
alpha = torch.rand(len(x), 1).to(config.device)
interpolates = alpha * x + (1 - alpha) * fake_x
interpolates.requires_grad = True
disc_interpolates = self.d(interpolates)
gradients = torch.autograd.grad(
outputs=disc_interpolates,
inputs=interpolates,
grad_outputs=torch.ones(disc_interpolates.size()).to(config.device),
create_graph=True,
retain_graph=True,
only_inputs=True,
)[0]
gradient_penalty = ((gradients.norm(2, dim=1) - 1) ** 2).mean() * config.gan.wgangp_lambda
return gradient_penalty
| [
"src.datasets.PositiveDataset",
"src.models.WGANGPGModel",
"src.models.WGANGPDModel"
] | [((231, 245), 'src.models.WGANGPGModel', 'WGANGPGModel', ([], {}), '()\n', (243, 245), False, 'from src.models import WGANGPGModel, WGANGPDModel\n'), ((247, 261), 'src.models.WGANGPDModel', 'WGANGPDModel', ([], {}), '()\n', (259, 261), False, 'from src.models import WGANGPGModel, WGANGPDModel\n'), ((605, 622), 'src.datasets.PositiveDataset', 'PositiveDataset', ([], {}), '()\n', (620, 622), False, 'from src.datasets import PositiveDataset\n')] |
import pickle
from sys import intern
from numpy import uint32
import numpy as np
import zarr
from napari_plugin_engine import napari_hook_implementation
from qtpy.QtWidgets import QWidget, QHBoxLayout, QPushButton
from magicgui import magic_factory
import pathlib
import napari
def viterbrain_reader(path: str) -> list:
with open(path, "rb") as handle:
viterbi = pickle.load(handle)
layer_labels = zarr.open(viterbi.fragment_path)
image_path = viterbi.fragment_path[:-12] + ".zarr"
layer_image = zarr.open(image_path)
scale = viterbi.resolution
meta_labels = {"name": "fragments", "scale": scale}
meta_image = {"name": "image", "scale": scale}
return [(layer_image, meta_image, "image"), (layer_labels, meta_labels, "labels")]
def napari_get_reader(path: str) -> list:
parts = path.split(".")
if parts[-1] == "pickle" or parts[-1] == "pkl":
return viterbrain_reader
else:
return None
@magic_factory(
call_button="Trace", start_comp={"max": 2**20}, end_comp={"max": 2**20}
)
def comp_trace(
v: napari.Viewer,
start_comp: int,
end_comp: int,
filename=pathlib.Path("/some/path.pickle"),
) -> None:
with open(filename, "rb") as handle:
viterbi = pickle.load(handle)
def comp2point(comp: int) -> list:
state = viterbi.comp_to_states[comp][0]
if viterbi.nxGraph.nodes[state]["type"] == "fragment":
return viterbi.nxGraph.nodes[state]["point1"]
else:
coords = viterbi.soma_fragment2coords[comp]
centroid = np.mean(coords, axis=0)
centroid = [int(c) for c in centroid]
return centroid
start_pt = comp2point(start_comp)
end_pt = comp2point(end_comp)
print(f"tracing from {start_pt} to {end_pt}")
path = viterbi.shortest_path(start_pt, end_pt)
v.add_shapes(
path,
shape_type="path",
edge_color="r",
edge_width=1,
name=f"trace {start_comp} to {end_comp}",
scale=viterbi.resolution,
)
| [
"numpy.mean",
"pathlib.Path",
"pickle.load",
"zarr.open",
"magicgui.magic_factory"
] | [((965, 1060), 'magicgui.magic_factory', 'magic_factory', ([], {'call_button': '"""Trace"""', 'start_comp': "{'max': 2 ** 20}", 'end_comp': "{'max': 2 ** 20}"}), "(call_button='Trace', start_comp={'max': 2 ** 20}, end_comp={\n 'max': 2 ** 20})\n", (978, 1060), False, 'from magicgui import magic_factory\n'), ((419, 451), 'zarr.open', 'zarr.open', (['viterbi.fragment_path'], {}), '(viterbi.fragment_path)\n', (428, 451), False, 'import zarr\n'), ((525, 546), 'zarr.open', 'zarr.open', (['image_path'], {}), '(image_path)\n', (534, 546), False, 'import zarr\n'), ((1149, 1182), 'pathlib.Path', 'pathlib.Path', (['"""/some/path.pickle"""'], {}), "('/some/path.pickle')\n", (1161, 1182), False, 'import pathlib\n'), ((379, 398), 'pickle.load', 'pickle.load', (['handle'], {}), '(handle)\n', (390, 398), False, 'import pickle\n'), ((1254, 1273), 'pickle.load', 'pickle.load', (['handle'], {}), '(handle)\n', (1265, 1273), False, 'import pickle\n'), ((1576, 1599), 'numpy.mean', 'np.mean', (['coords'], {'axis': '(0)'}), '(coords, axis=0)\n', (1583, 1599), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# encoding: utf8
#
# Copyright © <NAME> <burak at arskom dot com dot tr>,
# Arskom Ltd. http://www.arskom.com.tr
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the owner nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""This is a blocking example running in a single-process twisted setup.
In this example, user code runs directly in the reactor loop. So unless your
code fully adheres to the asynchronous programming principles, you can block
the reactor loop. ::
$ time curl -s "http://localhost:9757/block?seconds=10" > /dev/null & \
time curl -s "http://localhost:9757/block?seconds=10" > /dev/null &
[1] 27559
[2] 27560
real 0m10.026s
user 0m0.005s
sys 0m0.008s
real 0m20.045s
user 0m0.009s
sys 0m0.005s
If you call sleep, it sleeps by returning a deferred: ::
$ time curl -s "http://localhost:9757/sleep?seconds=10" > /dev/null & \
time curl -s "http://localhost:9757/sleep?seconds=10" > /dev/null &
[1] 27778
[2] 27779
real 0m10.012s
user 0m0.000s
sys 0m0.000s
real 0m10.013s
user 0m0.000s
sys 0m0.000s
"""
import sys
import time
import logging
from twisted.internet import reactor
from twisted.web.server import Site
from twisted.internet.task import deferLater
from twisted.python import log
from spyne import Unicode, Integer, Double, ByteArray, Iterable, rpc, \
ServiceBase, Application
from spyne.server.twisted import TwistedWebResource
from spyne.protocol.http import HttpRpc
HOST = '0.0.0.0'
PORT = 9758
class SomeService(ServiceBase):
@rpc(Integer, _returns=Integer)
def block(ctx, seconds):
"""Blocks the current thread for given number of seconds."""
time.sleep(seconds)
return seconds
class SomeNonBlockingService(ServiceBase):
@rpc(Integer, _returns=Unicode)
def sleep(ctx, seconds):
"""Waits without blocking reactor for given number of seconds by
returning a deferred."""
def _cb():
return "slept for %r seconds" % seconds
return deferLater(reactor, seconds, _cb)
@rpc(Unicode, Double, Double, _returns=ByteArray)
def say_hello_with_sleep(ctx, name, times, seconds):
"""Sends multiple hello messages by waiting given number of seconds
inbetween."""
times = [times] # Workaround for Python 2's lacking of nonlocal
def _cb(response):
if times[0] > 0:
msg = u"Hello %s, sleeping for %f seconds for " \
u"%d more time(s)." % (name, seconds, times[0])
response.append(msg.encode('utf8'))
response.append(b'\n')
logging.debug(msg)
times[0] -= 1
return deferLater(reactor, seconds, _cb, response)
return Iterable.Push(_cb)
def initialize(services, tns='spyne.examples.twisted.resource'):
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('spyne.protocol.xml').setLevel(logging.DEBUG)
observer = log.PythonLoggingObserver('twisted')
log.startLoggingWithObserver(observer.emit, setStdout=False)
return Application(services, 'spyne.examples.twisted.hello',
in_protocol=HttpRpc(), out_protocol=HttpRpc())
if __name__=='__main__':
application = initialize([SomeService, SomeNonBlockingService])
resource = TwistedWebResource(application)
site = Site(resource)
reactor.listenTCP(PORT, site, interface=HOST)
logging.info("listening on: %s:%d" % (HOST,PORT))
logging.info('wsdl is at: http://%s:%d/?wsdl' % (HOST, PORT))
sys.exit(reactor.run())
| [
"logging.basicConfig",
"twisted.python.log.startLoggingWithObserver",
"twisted.internet.task.deferLater",
"logging.getLogger",
"spyne.server.twisted.TwistedWebResource",
"logging.debug",
"spyne.rpc",
"twisted.python.log.PythonLoggingObserver",
"time.sleep",
"spyne.Iterable.Push",
"twisted.intern... | [((3037, 3067), 'spyne.rpc', 'rpc', (['Integer'], {'_returns': 'Integer'}), '(Integer, _returns=Integer)\n', (3040, 3067), False, 'from spyne import Unicode, Integer, Double, ByteArray, Iterable, rpc, ServiceBase, Application\n'), ((3267, 3297), 'spyne.rpc', 'rpc', (['Integer'], {'_returns': 'Unicode'}), '(Integer, _returns=Unicode)\n', (3270, 3297), False, 'from spyne import Unicode, Integer, Double, ByteArray, Iterable, rpc, ServiceBase, Application\n'), ((3561, 3609), 'spyne.rpc', 'rpc', (['Unicode', 'Double', 'Double'], {'_returns': 'ByteArray'}), '(Unicode, Double, Double, _returns=ByteArray)\n', (3564, 3609), False, 'from spyne import Unicode, Integer, Double, ByteArray, Iterable, rpc, ServiceBase, Application\n'), ((4375, 4415), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (4394, 4415), False, 'import logging\n'), ((4500, 4536), 'twisted.python.log.PythonLoggingObserver', 'log.PythonLoggingObserver', (['"""twisted"""'], {}), "('twisted')\n", (4525, 4536), False, 'from twisted.python import log\n'), ((4541, 4601), 'twisted.python.log.startLoggingWithObserver', 'log.startLoggingWithObserver', (['observer.emit'], {'setStdout': '(False)'}), '(observer.emit, setStdout=False)\n', (4569, 4601), False, 'from twisted.python import log\n'), ((4857, 4888), 'spyne.server.twisted.TwistedWebResource', 'TwistedWebResource', (['application'], {}), '(application)\n', (4875, 4888), False, 'from spyne.server.twisted import TwistedWebResource\n'), ((4900, 4914), 'twisted.web.server.Site', 'Site', (['resource'], {}), '(resource)\n', (4904, 4914), False, 'from twisted.web.server import Site\n'), ((4920, 4965), 'twisted.internet.reactor.listenTCP', 'reactor.listenTCP', (['PORT', 'site'], {'interface': 'HOST'}), '(PORT, site, interface=HOST)\n', (4937, 4965), False, 'from twisted.internet import reactor\n'), ((4971, 5021), 'logging.info', 'logging.info', (["('listening on: %s:%d' % (HOST, PORT))"], {}), "('listening on: %s:%d' % (HOST, PORT))\n", (4983, 5021), False, 'import logging\n'), ((5025, 5086), 'logging.info', 'logging.info', (["('wsdl is at: http://%s:%d/?wsdl' % (HOST, PORT))"], {}), "('wsdl is at: http://%s:%d/?wsdl' % (HOST, PORT))\n", (5037, 5086), False, 'import logging\n'), ((3174, 3193), 'time.sleep', 'time.sleep', (['seconds'], {}), '(seconds)\n', (3184, 3193), False, 'import time\n'), ((3521, 3554), 'twisted.internet.task.deferLater', 'deferLater', (['reactor', 'seconds', '_cb'], {}), '(reactor, seconds, _cb)\n', (3531, 3554), False, 'from twisted.internet.task import deferLater\n'), ((4285, 4303), 'spyne.Iterable.Push', 'Iterable.Push', (['_cb'], {}), '(_cb)\n', (4298, 4303), False, 'from spyne import Unicode, Integer, Double, ByteArray, Iterable, rpc, ServiceBase, Application\n'), ((5101, 5114), 'twisted.internet.reactor.run', 'reactor.run', ([], {}), '()\n', (5112, 5114), False, 'from twisted.internet import reactor\n'), ((4420, 4459), 'logging.getLogger', 'logging.getLogger', (['"""spyne.protocol.xml"""'], {}), "('spyne.protocol.xml')\n", (4437, 4459), False, 'import logging\n'), ((4712, 4721), 'spyne.protocol.http.HttpRpc', 'HttpRpc', ([], {}), '()\n', (4719, 4721), False, 'from spyne.protocol.http import HttpRpc\n'), ((4736, 4745), 'spyne.protocol.http.HttpRpc', 'HttpRpc', ([], {}), '()\n', (4743, 4745), False, 'from spyne.protocol.http import HttpRpc\n'), ((4152, 4170), 'logging.debug', 'logging.debug', (['msg'], {}), '(msg)\n', (4165, 4170), False, 'import logging\n'), ((4225, 4268), 'twisted.internet.task.deferLater', 'deferLater', (['reactor', 'seconds', '_cb', 'response'], {}), '(reactor, seconds, _cb, response)\n', (4235, 4268), False, 'from twisted.internet.task import deferLater\n')] |
import pandas as pd
import re
import numpy as np
import os
import sys
from collections import OrderedDict, defaultdict
import matplotlib as mpl
import matplotlib.pyplot as plt
import seaborn as sns
# from theano import *
# load state data
az_year = pd.read_csv("data/csv/price_expenditures/sector/az/price/teacd.csv", engine='c', low_memory=True)["Year"]
az_price = pd.read_csv("data/csv/price_expenditures/sector/az/price/teacd.csv", engine='c', low_memory=True, date_parser=az_year)
| [
"pandas.read_csv"
] | [((367, 490), 'pandas.read_csv', 'pd.read_csv', (['"""data/csv/price_expenditures/sector/az/price/teacd.csv"""'], {'engine': '"""c"""', 'low_memory': '(True)', 'date_parser': 'az_year'}), "('data/csv/price_expenditures/sector/az/price/teacd.csv', engine\n ='c', low_memory=True, date_parser=az_year)\n", (378, 490), True, 'import pandas as pd\n'), ((250, 352), 'pandas.read_csv', 'pd.read_csv', (['"""data/csv/price_expenditures/sector/az/price/teacd.csv"""'], {'engine': '"""c"""', 'low_memory': '(True)'}), "('data/csv/price_expenditures/sector/az/price/teacd.csv', engine\n ='c', low_memory=True)\n", (261, 352), True, 'import pandas as pd\n')] |
#!/usr/local/CyberCP/bin/python
import os
import os.path
import sys
import django
sys.path.append('/usr/local/CyberCP')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CyberCP.settings")
django.setup()
import json
from plogical.acl import ACLManager
import plogical.CyberCPLogFileWriter as logging
from plogical.virtualHostUtilities import virtualHostUtilities
import subprocess
from django.shortcuts import HttpResponse, render
from random import randint
import time
from plogical.firewallUtilities import FirewallUtilities
from firewall.models import FirewallRules
from plogical.modSec import modSec
from plogical.csf import CSF
from plogical.processUtilities import ProcessUtilities
from serverStatus.serverStatusUtil import ServerStatusUtil
class FirewallManager:
imunifyPath = '/usr/bin/imunify360-agent'
CLPath = '/etc/sysconfig/cloudlinux'
def __init__(self, request = None):
self.request = request
def securityHome(self, request = None, userID = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadError()
return render(request, 'firewall/index.html')
except BaseException as msg:
return HttpResponse(str(msg))
def firewallHome(self, request = None, userID = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadError()
return render(request, 'firewall/firewall.html')
except BaseException as msg:
return HttpResponse(str(msg))
def getCurrentRules(self, userID = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('fetchStatus', 0)
rules = FirewallRules.objects.all()
json_data = "["
checker = 0
for items in rules:
dic = {
'id': items.id,
'name': items.name,
'proto': items.proto,
'port': items.port,
'ipAddress': items.ipAddress,
}
if checker == 0:
json_data = json_data + json.dumps(dic)
checker = 1
else:
json_data = json_data + ',' + json.dumps(dic)
json_data = json_data + ']'
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def addRule(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('add_status', 0)
ruleName = data['ruleName']
ruleProtocol = data['ruleProtocol']
rulePort = data['rulePort']
ruleIP = data['ruleIP']
FirewallUtilities.addRule(ruleProtocol, rulePort, ruleIP)
newFWRule = FirewallRules(name=ruleName, proto=ruleProtocol, port=rulePort, ipAddress=ruleIP)
newFWRule.save()
final_dic = {'status': 1, 'add_status': 1, 'error_message': "None"}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'add_status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def deleteRule(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('delete_status', 0)
ruleID = data['id']
ruleProtocol = data['proto']
rulePort = data['port']
ruleIP = data['ruleIP']
FirewallUtilities.deleteRule(ruleProtocol, rulePort, ruleIP)
delRule = FirewallRules.objects.get(id=ruleID)
delRule.delete()
final_dic = {'status': 1, 'delete_status': 1, 'error_message': "None"}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'delete_status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def reloadFirewall(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('reload_status', 0)
command = 'sudo firewall-cmd --reload'
res = ProcessUtilities.executioner(command)
if res == 1:
final_dic = {'reload_status': 1, 'error_message': "None"}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
else:
final_dic = {'reload_status': 0,
'error_message': "Can not reload firewall, see CyberCP main log file."}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'reload_status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def startFirewall(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('start_status', 0)
command = 'sudo systemctl start firewalld'
res = ProcessUtilities.executioner(command)
if res == 1:
final_dic = {'start_status': 1, 'error_message': "None"}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
else:
final_dic = {'start_status': 0,
'error_message': "Can not start firewall, see CyberCP main log file."}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'start_status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def stopFirewall(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('stop_status', 0)
command = 'sudo systemctl stop firewalld'
res = ProcessUtilities.executioner(command)
if res == 1:
final_dic = {'stop_status': 1, 'error_message': "None"}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
else:
final_dic = {'stop_status': 0,
'error_message': "Can not stop firewall, see CyberCP main log file."}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'stop_status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def firewallStatus(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson()
command = 'systemctl status firewalld'
status = ProcessUtilities.outputExecutioner(command)
if status.find("dead") > -1:
final_dic = {'status': 1, 'error_message': "none", 'firewallStatus': 0}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
else:
final_dic = {'status': 1, 'error_message': "none", 'firewallStatus': 1}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def secureSSH(self, request = None, userID = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadError()
return render(request, 'firewall/secureSSH.html')
except BaseException as msg:
return HttpResponse(str(msg))
def getSSHConfigs(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson()
type = data['type']
if type == "1":
## temporarily changing permission for sshd files
pathToSSH = "/etc/ssh/sshd_config"
cat = "sudo cat " + pathToSSH
data = ProcessUtilities.outputExecutioner(cat).split('\n')
permitRootLogin = 0
sshPort = "22"
for items in data:
if items.find("PermitRootLogin") > -1:
if items.find("Yes") > -1 or items.find("yes") > -1:
permitRootLogin = 1
continue
if items.find("Port") > -1 and not items.find("GatewayPorts") > -1:
sshPort = items.split(" ")[1].strip("\n")
final_dic = {'status': 1, 'permitRootLogin': permitRootLogin, 'sshPort': sshPort}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
else:
pathToKeyFile = "/root/.ssh/authorized_keys"
cat = "sudo cat " + pathToKeyFile
data = ProcessUtilities.outputExecutioner(cat).split('\n')
json_data = "["
checker = 0
for items in data:
if items.find("ssh-rsa") > -1:
keydata = items.split(" ")
try:
key = "ssh-rsa " + keydata[1][:50] + " .. " + keydata[2]
try:
userName = keydata[2][:keydata[2].index("@")]
except:
userName = keydata[2]
except:
key = "ssh-rsa " + keydata[1][:50]
userName = ''
dic = {'userName': userName,
'key': key,
}
if checker == 0:
json_data = json_data + json.dumps(dic)
checker = 1
else:
json_data = json_data + ',' + json.dumps(dic)
json_data = json_data + ']'
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def saveSSHConfigs(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('saveStatus', 0)
type = data['type']
sshPort = data['sshPort']
rootLogin = data['rootLogin']
if rootLogin == True:
rootLogin = "1"
else:
rootLogin = "0"
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/firewallUtilities.py"
execPath = execPath + " saveSSHConfigs --type " + str(type) + " --sshPort " + sshPort + " --rootLogin " + rootLogin
output = ProcessUtilities.outputExecutioner(execPath)
if output.find("1,None") > -1:
csfPath = '/etc/csf'
if os.path.exists(csfPath):
dataIn = {'protocol': 'TCP_IN', 'ports': sshPort}
self.modifyPorts(dataIn)
dataIn = {'protocol': 'TCP_OUT', 'ports': sshPort}
self.modifyPorts(dataIn)
else:
try:
updateFW = FirewallRules.objects.get(name="SSHCustom")
FirewallUtilities.deleteRule("tcp", updateFW.port, "0.0.0.0/0")
updateFW.port = sshPort
updateFW.save()
FirewallUtilities.addRule('tcp', sshPort, "0.0.0.0/0")
except:
try:
newFireWallRule = FirewallRules(name="SSHCustom", port=sshPort, proto="tcp")
newFireWallRule.save()
FirewallUtilities.addRule('tcp', sshPort, "0.0.0.0/0")
command = 'firewall-cmd --permanent --remove-service=ssh'
ProcessUtilities.executioner(command)
except BaseException as msg:
logging.CyberCPLogFileWriter.writeToFile(str(msg))
final_dic = {'status': 1, 'saveStatus': 1}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
else:
final_dic = {'status': 0, 'saveStatus': 0, "error_message": output}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0 ,'saveStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def deleteSSHKey(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('delete_status', 0)
key = data['key']
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/firewallUtilities.py"
execPath = execPath + " deleteSSHKey --key '" + key + "'"
output = ProcessUtilities.outputExecutioner(execPath)
if output.find("1,None") > -1:
final_dic = {'status': 1, 'delete_status': 1}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
else:
final_dic = {'status': 1, 'delete_status': 1, "error_mssage": output}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'delete_status': 0, 'error_mssage': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def addSSHKey(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('add_status', 0)
key = data['key']
tempPath = "/home/cyberpanel/" + str(randint(1000, 9999))
writeToFile = open(tempPath, "w")
writeToFile.write(key)
writeToFile.close()
execPath = "sudo /usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/firewallUtilities.py"
execPath = execPath + " addSSHKey --tempPath " + tempPath
output = ProcessUtilities.outputExecutioner(execPath)
if output.find("1,None") > -1:
final_dic = {'status': 1, 'add_status': 1}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
else:
final_dic = {'status': 0, 'add_status': 0, "error_mssage": output}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'add_status': 0, 'error_mssage': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def loadModSecurityHome(self, request = None, userID = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadError()
if ProcessUtilities.decideServer() == ProcessUtilities.OLS:
OLS = 1
confPath = os.path.join(virtualHostUtilities.Server_root, "conf/httpd_config.conf")
command = "sudo cat " + confPath
httpdConfig = ProcessUtilities.outputExecutioner(command).splitlines()
modSecInstalled = 0
for items in httpdConfig:
if items.find('module mod_security') > -1:
modSecInstalled = 1
break
else:
OLS = 0
modSecInstalled = 1
return render(request, 'firewall/modSecurity.html', {'modSecInstalled': modSecInstalled, 'OLS': OLS})
except BaseException as msg:
return HttpResponse(str(msg))
def installModSec(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('installModSec', 0)
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/modSec.py"
execPath = execPath + " installModSec"
ProcessUtilities.popenExecutioner(execPath)
time.sleep(3)
final_json = json.dumps({'installModSec': 1, 'error_message': "None"})
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'installModSec': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def installStatusModSec(self, userID = None, data = None):
try:
command = "sudo cat " + modSec.installLogPath
installStatus = ProcessUtilities.outputExecutioner(command)
if installStatus.find("[200]") > -1:
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/modSec.py"
execPath = execPath + " installModSecConfigs"
output = ProcessUtilities.outputExecutioner(execPath)
if output.find("1,None") > -1:
pass
else:
final_json = json.dumps({
'error_message': "Failed to install ModSecurity configurations.",
'requestStatus': installStatus,
'abort': 1,
'installed': 0,
})
return HttpResponse(final_json)
final_json = json.dumps({
'error_message': "None",
'requestStatus': installStatus,
'abort': 1,
'installed': 1,
})
return HttpResponse(final_json)
elif installStatus.find("[404]") > -1:
final_json = json.dumps({
'abort': 1,
'installed': 0,
'error_message': "None",
'requestStatus': installStatus,
})
return HttpResponse(final_json)
else:
final_json = json.dumps({
'abort': 0,
'error_message': "None",
'requestStatus': installStatus,
})
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'abort': 1, 'installed': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def fetchModSecSettings(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('fetchStatus', 0)
if ProcessUtilities.decideServer() == ProcessUtilities.OLS:
modsecurity = 0
SecAuditEngine = 0
SecRuleEngine = 0
SecDebugLogLevel = "9"
SecAuditLogRelevantStatus = '^(?:5|4(?!04))'
SecAuditLogParts = 'ABIJDEFHZ'
SecAuditLogType = 'Serial'
confPath = os.path.join(virtualHostUtilities.Server_root, 'conf/httpd_config.conf')
modSecPath = os.path.join(virtualHostUtilities.Server_root, 'modules', 'mod_security.so')
if os.path.exists(modSecPath):
command = "sudo cat " + confPath
data = ProcessUtilities.outputExecutioner(command).split('\n')
for items in data:
if items.find('modsecurity ') > -1:
if items.find('on') > -1 or items.find('On') > -1:
modsecurity = 1
continue
if items.find('SecAuditEngine ') > -1:
if items.find('on') > -1 or items.find('On') > -1:
SecAuditEngine = 1
continue
if items.find('SecRuleEngine ') > -1:
if items.find('on') > -1 or items.find('On') > -1:
SecRuleEngine = 1
continue
if items.find('SecDebugLogLevel') > -1:
result = items.split(' ')
if result[0] == 'SecDebugLogLevel':
SecDebugLogLevel = result[1]
continue
if items.find('SecAuditLogRelevantStatus') > -1:
result = items.split(' ')
if result[0] == 'SecAuditLogRelevantStatus':
SecAuditLogRelevantStatus = result[1]
continue
if items.find('SecAuditLogParts') > -1:
result = items.split(' ')
if result[0] == 'SecAuditLogParts':
SecAuditLogParts = result[1]
continue
if items.find('SecAuditLogType') > -1:
result = items.split(' ')
if result[0] == 'SecAuditLogType':
SecAuditLogType = result[1]
continue
final_dic = {'fetchStatus': 1,
'installed': 1,
'SecRuleEngine': SecRuleEngine,
'modsecurity': modsecurity,
'SecAuditEngine': SecAuditEngine,
'SecDebugLogLevel': SecDebugLogLevel,
'SecAuditLogParts': SecAuditLogParts,
'SecAuditLogRelevantStatus': SecAuditLogRelevantStatus,
'SecAuditLogType': SecAuditLogType,
}
else:
final_dic = {'fetchStatus': 1,
'installed': 0}
else:
SecAuditEngine = 0
SecRuleEngine = 0
SecDebugLogLevel = "9"
SecAuditLogRelevantStatus = '^(?:5|4(?!04))'
SecAuditLogParts = 'ABIJDEFHZ'
SecAuditLogType = 'Serial'
confPath = os.path.join(virtualHostUtilities.Server_root, 'conf/modsec.conf')
command = "sudo cat " + confPath
data = ProcessUtilities.outputExecutioner(command).split('\n')
for items in data:
if items.find('SecAuditEngine ') > -1:
if items.find('on') > -1 or items.find('On') > -1:
SecAuditEngine = 1
continue
if items.find('SecRuleEngine ') > -1:
if items.find('on') > -1 or items.find('On') > -1:
SecRuleEngine = 1
continue
if items.find('SecDebugLogLevel') > -1:
result = items.split(' ')
if result[0] == 'SecDebugLogLevel':
SecDebugLogLevel = result[1]
continue
if items.find('SecAuditLogRelevantStatus') > -1:
result = items.split(' ')
if result[0] == 'SecAuditLogRelevantStatus':
SecAuditLogRelevantStatus = result[1]
continue
if items.find('SecAuditLogParts') > -1:
result = items.split(' ')
if result[0] == 'SecAuditLogParts':
SecAuditLogParts = result[1]
continue
if items.find('SecAuditLogType') > -1:
result = items.split(' ')
if result[0] == 'SecAuditLogType':
SecAuditLogType = result[1]
continue
final_dic = {'fetchStatus': 1,
'installed': 1,
'SecRuleEngine': SecRuleEngine,
'SecAuditEngine': SecAuditEngine,
'SecDebugLogLevel': SecDebugLogLevel,
'SecAuditLogParts': SecAuditLogParts,
'SecAuditLogRelevantStatus': SecAuditLogRelevantStatus,
'SecAuditLogType': SecAuditLogType,
}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def saveModSecConfigurations(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('saveStatus', 0)
if ProcessUtilities.decideServer() == ProcessUtilities.OLS:
modsecurity = data['modsecurity_status']
SecAuditEngine = data['SecAuditEngine']
SecRuleEngine = data['SecRuleEngine']
SecDebugLogLevel = data['SecDebugLogLevel']
SecAuditLogParts = data['SecAuditLogParts']
SecAuditLogRelevantStatus = data['SecAuditLogRelevantStatus']
SecAuditLogType = data['SecAuditLogType']
if modsecurity == True:
modsecurity = "modsecurity on"
else:
modsecurity = "modsecurity off"
if SecAuditEngine == True:
SecAuditEngine = "SecAuditEngine on"
else:
SecAuditEngine = "SecAuditEngine off"
if SecRuleEngine == True:
SecRuleEngine = "SecRuleEngine On"
else:
SecRuleEngine = "SecRuleEngine off"
SecDebugLogLevel = "SecDebugLogLevel " + str(SecDebugLogLevel)
SecAuditLogParts = "SecAuditLogParts " + str(SecAuditLogParts)
SecAuditLogRelevantStatus = "SecAuditLogRelevantStatus " + SecAuditLogRelevantStatus
SecAuditLogType = "SecAuditLogType " + SecAuditLogType
## writing data temporary to file
tempConfigPath = "/home/cyberpanel/" + str(randint(1000, 9999))
confPath = open(tempConfigPath, "w")
confPath.writelines(modsecurity + "\n")
confPath.writelines(SecAuditEngine + "\n")
confPath.writelines(SecRuleEngine + "\n")
confPath.writelines(SecDebugLogLevel + "\n")
confPath.writelines(SecAuditLogParts + "\n")
confPath.writelines(SecAuditLogRelevantStatus + "\n")
confPath.writelines(SecAuditLogType + "\n")
confPath.close()
## save configuration data
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/modSec.py"
execPath = execPath + " saveModSecConfigs --tempConfigPath " + tempConfigPath
output = ProcessUtilities.outputExecutioner(execPath)
if output.find("1,None") > -1:
data_ret = {'saveStatus': 1, 'error_message': "None"}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
else:
data_ret = {'saveStatus': 0, 'error_message': output}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
else:
SecAuditEngine = data['SecAuditEngine']
SecRuleEngine = data['SecRuleEngine']
SecDebugLogLevel = data['SecDebugLogLevel']
SecAuditLogParts = data['SecAuditLogParts']
SecAuditLogRelevantStatus = data['SecAuditLogRelevantStatus']
SecAuditLogType = data['SecAuditLogType']
if SecAuditEngine == True:
SecAuditEngine = "SecAuditEngine on"
else:
SecAuditEngine = "SecAuditEngine off"
if SecRuleEngine == True:
SecRuleEngine = "SecRuleEngine On"
else:
SecRuleEngine = "SecRuleEngine off"
SecDebugLogLevel = "SecDebugLogLevel " + str(SecDebugLogLevel)
SecAuditLogParts = "SecAuditLogParts " + str(SecAuditLogParts)
SecAuditLogRelevantStatus = "SecAuditLogRelevantStatus " + SecAuditLogRelevantStatus
SecAuditLogType = "SecAuditLogType " + SecAuditLogType
## writing data temporary to file
tempConfigPath = "/home/cyberpanel/" + str(randint(1000, 9999))
confPath = open(tempConfigPath, "w")
confPath.writelines(SecAuditEngine + "\n")
confPath.writelines(SecRuleEngine + "\n")
confPath.writelines(SecDebugLogLevel + "\n")
confPath.writelines(SecAuditLogParts + "\n")
confPath.writelines(SecAuditLogRelevantStatus + "\n")
confPath.writelines(SecAuditLogType + "\n")
confPath.close()
## save configuration data
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/modSec.py"
execPath = execPath + " saveModSecConfigs --tempConfigPath " + tempConfigPath
output = ProcessUtilities.outputExecutioner(execPath)
if output.find("1,None") > -1:
data_ret = {'saveStatus': 1, 'error_message': "None"}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
else:
data_ret = {'saveStatus': 0, 'error_message': output}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
data_ret = {'saveStatus': 0, 'error_message': str(msg)}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
def modSecRules(self, request = None, userID = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadError()
if ProcessUtilities.decideServer() == ProcessUtilities.OLS:
confPath = os.path.join(virtualHostUtilities.Server_root, "conf/httpd_config.conf")
command = "sudo cat " + confPath
httpdConfig = ProcessUtilities.outputExecutioner(command).split('\n')
modSecInstalled = 0
for items in httpdConfig:
if items.find('module mod_security') > -1:
modSecInstalled = 1
break
else:
modSecInstalled = 1
return render(request, 'firewall/modSecurityRules.html', {'modSecInstalled': modSecInstalled})
except BaseException as msg:
return HttpResponse(str(msg))
def fetchModSecRules(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('modSecInstalled', 0)
if ProcessUtilities.decideServer() == ProcessUtilities.OLS:
confPath = os.path.join(virtualHostUtilities.Server_root, "conf/httpd_config.conf")
command = "sudo cat " + confPath
httpdConfig = ProcessUtilities.outputExecutioner(command).split('\n')
modSecInstalled = 0
for items in httpdConfig:
if items.find('module mod_security') > -1:
modSecInstalled = 1
break
rulesPath = os.path.join(virtualHostUtilities.Server_root + "/conf/modsec/rules.conf")
if modSecInstalled:
command = "sudo cat " + rulesPath
currentModSecRules = ProcessUtilities.outputExecutioner(command).split('\n')
final_dic = {'modSecInstalled': 1,
'currentModSecRules': currentModSecRules}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
else:
final_dic = {'modSecInstalled': 0}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
else:
rulesPath = os.path.join(virtualHostUtilities.Server_root + "/conf/rules.conf")
command = "sudo cat " + rulesPath
currentModSecRules = ProcessUtilities.outputExecutioner(command).split('\n')
final_dic = {'modSecInstalled': 1,
'currentModSecRules': currentModSecRules}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'modSecInstalled': 0,
'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def saveModSecRules(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('saveStatus', 0)
newModSecRules = data['modSecRules']
## writing data temporary to file
rulesPath = open(modSec.tempRulesFile, "w")
rulesPath.write(newModSecRules)
rulesPath.close()
## save configuration data
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/modSec.py"
execPath = execPath + " saveModSecRules"
output = ProcessUtilities.outputExecutioner(execPath)
if output.find("1,None") > -1:
data_ret = {'saveStatus': 1, 'error_message': "None"}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
else:
data_ret = {'saveStatus': 0, 'error_message': output}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
data_ret = {'saveStatus': 0, 'error_message': str(msg)}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
def modSecRulesPacks(self, request = None, userID = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadError()
if ProcessUtilities.decideServer() == ProcessUtilities.OLS:
confPath = os.path.join(virtualHostUtilities.Server_root, "conf/httpd_config.conf")
command = "sudo cat " + confPath
httpdConfig = ProcessUtilities.outputExecutioner(command).split('\n')
modSecInstalled = 0
for items in httpdConfig:
if items.find('module mod_security') > -1:
modSecInstalled = 1
break
else:
modSecInstalled = 1
return render(request, 'firewall/modSecurityRulesPacks.html', {'modSecInstalled': modSecInstalled})
except BaseException as msg:
return HttpResponse(msg)
def getOWASPAndComodoStatus(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('modSecInstalled', 0)
if ProcessUtilities.decideServer() == ProcessUtilities.OLS:
confPath = os.path.join(virtualHostUtilities.Server_root, "conf/httpd_config.conf")
command = "sudo cat " + confPath
httpdConfig = ProcessUtilities.outputExecutioner(command).splitlines()
modSecInstalled = 0
for items in httpdConfig:
if items.find('module mod_security') > -1:
modSecInstalled = 1
break
comodoInstalled = 0
owaspInstalled = 0
if modSecInstalled:
command = "sudo cat " + confPath
httpdConfig = ProcessUtilities.outputExecutioner(command).splitlines()
for items in httpdConfig:
if items.find('modsec/comodo') > -1:
comodoInstalled = 1
elif items.find('modsec/owasp') > -1:
owaspInstalled = 1
if owaspInstalled == 1 and comodoInstalled == 1:
break
final_dic = {
'modSecInstalled': 1,
'owaspInstalled': owaspInstalled,
'comodoInstalled': comodoInstalled
}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
else:
final_dic = {'modSecInstalled': 0}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
else:
comodoInstalled = 0
owaspInstalled = 0
try:
command = 'sudo ls /usr/local/lsws/conf/comodo_litespeed/'
output = ProcessUtilities.outputExecutioner(command)
if output.find('No such') > -1:
comodoInstalled = 0
else:
comodoInstalled = 1
except subprocess.CalledProcessError:
pass
final_dic = {
'modSecInstalled': 1,
'owaspInstalled': owaspInstalled,
'comodoInstalled': comodoInstalled
}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'modSecInstalled': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def installModSecRulesPack(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('installStatus', 0)
packName = data['packName']
if ProcessUtilities.decideServer() == ProcessUtilities.OLS:
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/modSec.py"
execPath = execPath + " " + packName
output = ProcessUtilities.outputExecutioner(execPath)
if output.find("1,None") > -1:
data_ret = {'installStatus': 1, 'error_message': "None"}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
else:
data_ret = {'installStatus': 0, 'error_message': output}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
else:
if packName == 'disableOWASP' or packName == 'installOWASP':
final_json = json.dumps({'installStatus': 0, 'error_message': "OWASP will be available later.", })
return HttpResponse(final_json)
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/modSec.py"
execPath = execPath + " " + packName
output = ProcessUtilities.outputExecutioner(execPath)
if output.find("1,None") > -1:
data_ret = {'installStatus': 1, 'error_message': "None"}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
else:
data_ret = {'installStatus': 0, 'error_message': output}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
data_ret = {'installStatus': 0, 'error_message': str(msg)}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
def getRulesFiles(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('fetchStatus', 0)
packName = data['packName']
if ProcessUtilities.decideServer() == ProcessUtilities.OLS:
confPath = os.path.join(virtualHostUtilities.Server_root, 'conf/httpd_config.conf')
command = "sudo cat " + confPath
httpdConfig = ProcessUtilities.outputExecutioner(command).splitlines()
json_data = "["
checker = 0
counter = 0
for items in httpdConfig:
if items.find('modsec/' + packName) > -1:
counter = counter + 1
if items[0] == '#':
status = False
else:
status = True
fileName = items.lstrip('#')
fileName = fileName.split('/')[-1]
dic = {
'id': counter,
'fileName': fileName,
'packName': packName,
'status': status,
}
if checker == 0:
json_data = json_data + json.dumps(dic)
checker = 1
else:
json_data = json_data + ',' + json.dumps(dic)
json_data = json_data + ']'
final_json = json.dumps({'fetchStatus': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
else:
if packName == 'owasp':
final_json = json.dumps({'fetchStatus': 0, 'error_message': "OWASP will be available later.", })
return HttpResponse(final_json)
comodoPath = '/usr/local/lsws/conf/comodo_litespeed'
command = 'sudo chown -R cyberpanel:cyberpanel /usr/local/lsws/conf'
ProcessUtilities.executioner(command)
json_data = "["
counter = 0
checker = 0
for fileName in os.listdir(comodoPath):
if fileName == 'categories.conf':
continue
if fileName.endswith('bak'):
status = 0
fileName = fileName.rstrip('.bak')
elif fileName.endswith('conf'):
status = 1
else:
continue
dic = {
'id': counter,
'fileName': fileName,
'packName': packName,
'status': status,
}
counter = counter + 1
if checker == 0:
json_data = json_data + json.dumps(dic)
checker = 1
else:
json_data = json_data + ',' + json.dumps(dic)
command = 'sudo chown -R lsadm:lsadm /usr/local/lsws/conf'
ProcessUtilities.executioner(command)
json_data = json_data + ']'
final_json = json.dumps({'fetchStatus': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def enableDisableRuleFile(self, userID = None, data = None):
try:
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('saveStatus', 0)
packName = data['packName']
fileName = data['fileName']
currentStatus = data['status']
if currentStatus == True:
functionName = 'disableRuleFile'
else:
functionName = 'enableRuleFile'
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/modSec.py"
execPath = execPath + " " + functionName + ' --packName ' + packName + ' --fileName ' + fileName
output = ProcessUtilities.outputExecutioner(execPath)
if output.find("1,None") > -1:
data_ret = {'saveStatus': 1, 'error_message': "None"}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
else:
data_ret = {'saveStatus': 0, 'error_message': output}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
data_ret = {'saveStatus': 0, 'error_message': str(msg)}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
def csf(self):
try:
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadError()
csfInstalled = 1
try:
command = 'csf -h'
output = ProcessUtilities.outputExecutioner(command)
if output.find("command not found") > -1:
csfInstalled = 0
except subprocess.CalledProcessError:
csfInstalled = 0
return render(self.request,'firewall/csf.html', {'csfInstalled' : csfInstalled})
except BaseException as msg:
return HttpResponse(str(msg))
def installCSF(self):
try:
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('installStatus', 0)
execPath = "sudo /usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/csf.py"
execPath = execPath + " installCSF"
ProcessUtilities.popenExecutioner(execPath)
time.sleep(2)
data_ret = {"installStatus": 1}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
final_dic = {'installStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def installStatusCSF(self):
try:
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
installStatus = ProcessUtilities.outputExecutioner("sudo cat " + CSF.installLogPath)
if installStatus.find("[200]")>-1:
command = 'sudo rm -f ' + CSF.installLogPath
ProcessUtilities.executioner(command)
final_json = json.dumps({
'error_message': "None",
'requestStatus': installStatus,
'abort':1,
'installed': 1,
})
return HttpResponse(final_json)
elif installStatus.find("[404]") > -1:
command = 'sudo rm -f ' + CSF.installLogPath
ProcessUtilities.executioner(command)
final_json = json.dumps({
'abort':1,
'installed':0,
'error_message': "None",
'requestStatus': installStatus,
})
return HttpResponse(final_json)
else:
final_json = json.dumps({
'abort':0,
'error_message': "None",
'requestStatus': installStatus,
})
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'abort':1, 'installed':0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def removeCSF(self):
try:
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('installStatus', 0)
execPath = "sudo /usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/csf.py"
execPath = execPath + " removeCSF"
ProcessUtilities.popenExecutioner(execPath)
time.sleep(2)
data_ret = {"installStatus": 1}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
final_dic = {'installStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def fetchCSFSettings(self):
try:
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson('fetchStatus', 0)
currentSettings = CSF.fetchCSFSettings()
data_ret = {"fetchStatus": 1, 'testingMode' : currentSettings['TESTING'],
'tcpIN' : currentSettings['tcpIN'],
'tcpOUT': currentSettings['tcpOUT'],
'udpIN': currentSettings['udpIN'],
'udpOUT': currentSettings['udpOUT'],
'firewallStatus': currentSettings['firewallStatus']
}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
final_dic = {'fetchStatus': 0, 'error_message': 'CSF is not installed.'}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def changeStatus(self):
try:
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson()
data = json.loads(self.request.body)
controller = data['controller']
status = data['status']
execPath = "sudo /usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/csf.py"
execPath = execPath + " changeStatus --controller " + controller + " --status " + status
output = ProcessUtilities.outputExecutioner(execPath)
if output.find("1,None") > -1:
data_ret = {"status": 1}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
else:
data_ret = {'status': 0, 'error_message': output}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
final_dic = {'status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def modifyPorts(self, data = None):
try:
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson()
protocol = data['protocol']
ports = data['ports']
portsPath = '/tmp/ports'
if os.path.exists(portsPath):
os.remove(portsPath)
writeToFile = open(portsPath, 'w')
writeToFile.write(ports)
writeToFile.close()
execPath = "sudo /usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/csf.py"
execPath = execPath + " modifyPorts --protocol " + protocol + " --ports " + portsPath
output = ProcessUtilities.outputExecutioner(execPath)
if output.find("1,None") > -1:
data_ret = {"status": 1}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
else:
data_ret = {'status': 0, 'error_message': output}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
final_dic = {'status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def modifyIPs(self):
try:
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadErrorJson()
data = json.loads(self.request.body)
mode = data['mode']
ipAddress = data['ipAddress']
if mode == 'allowIP':
CSF.allowIP(ipAddress)
elif mode == 'blockIP':
CSF.blockIP(ipAddress)
data_ret = {"status": 1}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
final_dic = {'status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def imunify(self):
try:
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
return ACLManager.loadError()
ipFile = "/etc/cyberpanel/machineIP"
f = open(ipFile)
ipData = f.read()
ipAddress = ipData.split('\n', 1)[0]
data = {}
data['ipAddress'] = ipAddress
if os.path.exists(FirewallManager.CLPath):
data['CL'] = 1
else:
data['CL'] = 0
if os.path.exists(FirewallManager.imunifyPath):
data['imunify'] = 1
else:
data['imunify'] = 0
if data['CL'] == 0:
return render(self.request, 'firewall/notAvailable.html', data)
elif data['imunify'] == 0:
return render(self.request, 'firewall/notAvailable.html', data)
else:
return render(self.request, 'firewall/imunify.html', data)
except BaseException as msg:
return HttpResponse(str(msg))
def submitinstallImunify(self):
try:
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 1:
pass
else:
logging.CyberCPLogFileWriter.statusWriter(ServerStatusUtil.lswsInstallStatusPath,
'Not authorized to install container packages. [404].',
1)
return 0
data = json.loads(self.request.body)
execPath = "/usr/local/CyberCP/bin/python /usr/local/CyberCP/CLManager/CageFS.py"
execPath = execPath + " --function submitinstallImunify --key %s" % (data['key'])
ProcessUtilities.popenExecutioner(execPath)
data_ret = {'status': 1, 'error_message': 'None'}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
logging.CyberCPLogFileWriter.statusWriter(ServerStatusUtil.lswsInstallStatusPath, str(msg) + ' [404].', 1)
| [
"plogical.csf.CSF.fetchCSFSettings",
"firewall.models.FirewallRules",
"time.sleep",
"plogical.processUtilities.ProcessUtilities.outputExecutioner",
"sys.path.append",
"os.remove",
"os.environ.setdefault",
"django.shortcuts.render",
"plogical.processUtilities.ProcessUtilities.executioner",
"os.path... | [((82, 119), 'sys.path.append', 'sys.path.append', (['"""/usr/local/CyberCP"""'], {}), "('/usr/local/CyberCP')\n", (97, 119), False, 'import sys\n'), ((120, 187), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""CyberCP.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'CyberCP.settings')\n", (141, 187), False, 'import os\n'), ((188, 202), 'django.setup', 'django.setup', ([], {}), '()\n', (200, 202), False, 'import django\n'), ((1028, 1056), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (1048, 1056), False, 'from plogical.acl import ACLManager\n'), ((1204, 1242), 'django.shortcuts.render', 'render', (['request', '"""firewall/index.html"""'], {}), "(request, 'firewall/index.html')\n", (1210, 1242), False, 'from django.shortcuts import HttpResponse, render\n'), ((1420, 1448), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (1440, 1448), False, 'from plogical.acl import ACLManager\n'), ((1596, 1637), 'django.shortcuts.render', 'render', (['request', '"""firewall/firewall.html"""'], {}), "(request, 'firewall/firewall.html')\n", (1602, 1637), False, 'from django.shortcuts import HttpResponse, render\n'), ((1802, 1830), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (1822, 1830), False, 'from plogical.acl import ACLManager\n'), ((1999, 2026), 'firewall.models.FirewallRules.objects.all', 'FirewallRules.objects.all', ([], {}), '()\n', (2024, 2026), False, 'from firewall.models import FirewallRules\n'), ((2665, 2756), 'json.dumps', 'json.dumps', (["{'status': 1, 'fetchStatus': 1, 'error_message': 'None', 'data': json_data}"], {}), "({'status': 1, 'fetchStatus': 1, 'error_message': 'None', 'data':\n json_data})\n", (2675, 2756), False, 'import json\n'), ((2772, 2796), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (2784, 2796), False, 'from django.shortcuts import HttpResponse, render\n'), ((3099, 3127), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (3119, 3127), False, 'from plogical.acl import ACLManager\n'), ((3453, 3510), 'plogical.firewallUtilities.FirewallUtilities.addRule', 'FirewallUtilities.addRule', (['ruleProtocol', 'rulePort', 'ruleIP'], {}), '(ruleProtocol, rulePort, ruleIP)\n', (3478, 3510), False, 'from plogical.firewallUtilities import FirewallUtilities\n'), ((3536, 3622), 'firewall.models.FirewallRules', 'FirewallRules', ([], {'name': 'ruleName', 'proto': 'ruleProtocol', 'port': 'rulePort', 'ipAddress': 'ruleIP'}), '(name=ruleName, proto=ruleProtocol, port=rulePort, ipAddress=\n ruleIP)\n', (3549, 3622), False, 'from firewall.models import FirewallRules\n'), ((3753, 3774), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (3763, 3774), False, 'import json\n'), ((3794, 3818), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (3806, 3818), False, 'from django.shortcuts import HttpResponse, render\n'), ((4124, 4152), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (4144, 4152), False, 'from plogical.acl import ACLManager\n'), ((4462, 4522), 'plogical.firewallUtilities.FirewallUtilities.deleteRule', 'FirewallUtilities.deleteRule', (['ruleProtocol', 'rulePort', 'ruleIP'], {}), '(ruleProtocol, rulePort, ruleIP)\n', (4490, 4522), False, 'from plogical.firewallUtilities import FirewallUtilities\n'), ((4546, 4582), 'firewall.models.FirewallRules.objects.get', 'FirewallRules.objects.get', ([], {'id': 'ruleID'}), '(id=ruleID)\n', (4571, 4582), False, 'from firewall.models import FirewallRules\n'), ((4721, 4742), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (4731, 4742), False, 'import json\n'), ((4762, 4786), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (4774, 4786), False, 'from django.shortcuts import HttpResponse, render\n'), ((5099, 5127), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (5119, 5127), False, 'from plogical.acl import ACLManager\n'), ((5347, 5384), 'plogical.processUtilities.ProcessUtilities.executioner', 'ProcessUtilities.executioner', (['command'], {}), '(command)\n', (5375, 5384), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((6149, 6177), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (6169, 6177), False, 'from plogical.acl import ACLManager\n'), ((6400, 6437), 'plogical.processUtilities.ProcessUtilities.executioner', 'ProcessUtilities.executioner', (['command'], {}), '(command)\n', (6428, 6437), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((7197, 7225), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (7217, 7225), False, 'from plogical.acl import ACLManager\n'), ((7446, 7483), 'plogical.processUtilities.ProcessUtilities.executioner', 'ProcessUtilities.executioner', (['command'], {}), '(command)\n', (7474, 7483), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((8241, 8269), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (8261, 8269), False, 'from plogical.acl import ACLManager\n'), ((8474, 8517), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (8508, 8517), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((9241, 9269), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (9261, 9269), False, 'from plogical.acl import ACLManager\n'), ((9417, 9459), 'django.shortcuts.render', 'render', (['request', '"""firewall/secureSSH.html"""'], {}), "(request, 'firewall/secureSSH.html')\n", (9423, 9459), False, 'from django.shortcuts import HttpResponse, render\n'), ((9636, 9664), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (9656, 9664), False, 'from plogical.acl import ACLManager\n'), ((12506, 12534), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (12526, 12534), False, 'from plogical.acl import ACLManager\n'), ((13187, 13231), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['execPath'], {}), '(execPath)\n', (13221, 13231), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((15217, 15245), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (15237, 15245), False, 'from plogical.acl import ACLManager\n'), ((15644, 15688), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['execPath'], {}), '(execPath)\n', (15678, 15688), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((16402, 16430), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (16422, 16430), False, 'from plogical.acl import ACLManager\n'), ((17016, 17060), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['execPath'], {}), '(execPath)\n', (17050, 17060), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((17778, 17806), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (17798, 17806), False, 'from plogical.acl import ACLManager\n'), ((18583, 18681), 'django.shortcuts.render', 'render', (['request', '"""firewall/modSecurity.html"""', "{'modSecInstalled': modSecInstalled, 'OLS': OLS}"], {}), "(request, 'firewall/modSecurity.html', {'modSecInstalled':\n modSecInstalled, 'OLS': OLS})\n", (18589, 18681), False, 'from django.shortcuts import HttpResponse, render\n'), ((18853, 18881), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (18873, 18881), False, 'from plogical.acl import ACLManager\n'), ((19210, 19253), 'plogical.processUtilities.ProcessUtilities.popenExecutioner', 'ProcessUtilities.popenExecutioner', (['execPath'], {}), '(execPath)\n', (19243, 19253), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((19267, 19280), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (19277, 19280), False, 'import time\n'), ((19307, 19364), 'json.dumps', 'json.dumps', (["{'installModSec': 1, 'error_message': 'None'}"], {}), "({'installModSec': 1, 'error_message': 'None'})\n", (19317, 19364), False, 'import json\n'), ((19384, 19408), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (19396, 19408), False, 'from django.shortcuts import HttpResponse, render\n'), ((19774, 19817), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (19808, 19817), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((21728, 21756), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (21748, 21756), False, 'from plogical.acl import ACLManager\n'), ((27921, 27942), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (27931, 27942), False, 'import json\n'), ((27962, 27986), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (27974, 27986), False, 'from django.shortcuts import HttpResponse, render\n'), ((28293, 28321), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (28313, 28321), False, 'from plogical.acl import ACLManager\n'), ((33933, 33961), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (33953, 33961), False, 'from plogical.acl import ACLManager\n'), ((34689, 34780), 'django.shortcuts.render', 'render', (['request', '"""firewall/modSecurityRules.html"""', "{'modSecInstalled': modSecInstalled}"], {}), "(request, 'firewall/modSecurityRules.html', {'modSecInstalled':\n modSecInstalled})\n", (34695, 34780), False, 'from django.shortcuts import HttpResponse, render\n'), ((34956, 34984), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (34976, 34984), False, 'from plogical.acl import ACLManager\n'), ((37186, 37214), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (37206, 37214), False, 'from plogical.acl import ACLManager\n'), ((37818, 37862), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['execPath'], {}), '(execPath)\n', (37852, 37862), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((38554, 38582), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (38574, 38582), False, 'from plogical.acl import ACLManager\n'), ((39311, 39407), 'django.shortcuts.render', 'render', (['request', '"""firewall/modSecurityRulesPacks.html"""', "{'modSecInstalled': modSecInstalled}"], {}), "(request, 'firewall/modSecurityRulesPacks.html', {'modSecInstalled':\n modSecInstalled})\n", (39317, 39407), False, 'from django.shortcuts import HttpResponse, render\n'), ((39586, 39614), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (39606, 39614), False, 'from plogical.acl import ACLManager\n'), ((42551, 42579), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (42571, 42579), False, 'from plogical.acl import ACLManager\n'), ((44751, 44779), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (44771, 44779), False, 'from plogical.acl import ACLManager\n'), ((48583, 48611), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (48603, 48611), False, 'from plogical.acl import ACLManager\n'), ((49283, 49327), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['execPath'], {}), '(execPath)\n', (49317, 49327), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((50026, 50054), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (50046, 50054), False, 'from plogical.acl import ACLManager\n'), ((50530, 50603), 'django.shortcuts.render', 'render', (['self.request', '"""firewall/csf.html"""', "{'csfInstalled': csfInstalled}"], {}), "(self.request, 'firewall/csf.html', {'csfInstalled': csfInstalled})\n", (50536, 50603), False, 'from django.shortcuts import HttpResponse, render\n'), ((50804, 50832), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (50824, 50832), False, 'from plogical.acl import ACLManager\n'), ((51160, 51203), 'plogical.processUtilities.ProcessUtilities.popenExecutioner', 'ProcessUtilities.popenExecutioner', (['execPath'], {}), '(execPath)\n', (51193, 51203), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((51217, 51230), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (51227, 51230), False, 'import time\n'), ((51300, 51320), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (51310, 51320), False, 'import json\n'), ((51340, 51363), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (51352, 51363), False, 'from django.shortcuts import HttpResponse, render\n'), ((51688, 51716), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (51708, 51716), False, 'from plogical.acl import ACLManager\n'), ((51746, 51814), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (["('sudo cat ' + CSF.installLogPath)"], {}), "('sudo cat ' + CSF.installLogPath)\n", (51780, 51814), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((53576, 53604), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (53596, 53604), False, 'from plogical.acl import ACLManager\n'), ((53930, 53973), 'plogical.processUtilities.ProcessUtilities.popenExecutioner', 'ProcessUtilities.popenExecutioner', (['execPath'], {}), '(execPath)\n', (53963, 53973), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((53987, 54000), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (53997, 54000), False, 'import time\n'), ((54070, 54090), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (54080, 54090), False, 'import json\n'), ((54110, 54133), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (54122, 54133), False, 'from django.shortcuts import HttpResponse, render\n'), ((54459, 54487), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (54479, 54487), False, 'from plogical.acl import ACLManager\n'), ((54666, 54688), 'plogical.csf.CSF.fetchCSFSettings', 'CSF.fetchCSFSettings', ([], {}), '()\n', (54686, 54688), False, 'from plogical.csf import CSF\n'), ((55144, 55164), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (55154, 55164), False, 'import json\n'), ((55184, 55207), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (55196, 55207), False, 'from django.shortcuts import HttpResponse, render\n'), ((55541, 55569), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (55561, 55569), False, 'from plogical.acl import ACLManager\n'), ((55721, 55750), 'json.loads', 'json.loads', (['self.request.body'], {}), '(self.request.body)\n', (55731, 55750), False, 'import json\n'), ((56071, 56115), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['execPath'], {}), '(execPath)\n', (56105, 56115), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((56803, 56831), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (56823, 56831), False, 'from plogical.acl import ACLManager\n'), ((57092, 57117), 'os.path.exists', 'os.path.exists', (['portsPath'], {}), '(portsPath)\n', (57106, 57117), False, 'import os\n'), ((57509, 57553), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['execPath'], {}), '(execPath)\n', (57543, 57553), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((58226, 58254), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (58246, 58254), False, 'from plogical.acl import ACLManager\n'), ((58406, 58435), 'json.loads', 'json.loads', (['self.request.body'], {}), '(self.request.body)\n', (58416, 58435), False, 'import json\n'), ((58722, 58742), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (58732, 58742), False, 'import json\n'), ((58762, 58785), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (58774, 58785), False, 'from django.shortcuts import HttpResponse, render\n'), ((59094, 59122), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (59114, 59122), False, 'from plogical.acl import ACLManager\n'), ((59489, 59527), 'os.path.exists', 'os.path.exists', (['FirewallManager.CLPath'], {}), '(FirewallManager.CLPath)\n', (59503, 59527), False, 'import os\n'), ((59625, 59668), 'os.path.exists', 'os.path.exists', (['FirewallManager.imunifyPath'], {}), '(FirewallManager.imunifyPath)\n', (59639, 59668), False, 'import os\n'), ((60293, 60321), 'plogical.acl.ACLManager.loadedACL', 'ACLManager.loadedACL', (['userID'], {}), '(userID)\n', (60313, 60321), False, 'from plogical.acl import ACLManager\n'), ((60721, 60750), 'json.loads', 'json.loads', (['self.request.body'], {}), '(self.request.body)\n', (60731, 60750), False, 'import json\n'), ((60952, 60995), 'plogical.processUtilities.ProcessUtilities.popenExecutioner', 'ProcessUtilities.popenExecutioner', (['execPath'], {}), '(execPath)\n', (60985, 60995), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((61083, 61103), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (61093, 61103), False, 'import json\n'), ((61123, 61146), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (61135, 61146), False, 'from django.shortcuts import HttpResponse, render\n'), ((1161, 1183), 'plogical.acl.ACLManager.loadError', 'ACLManager.loadError', ([], {}), '()\n', (1181, 1183), False, 'from plogical.acl import ACLManager\n'), ((1553, 1575), 'plogical.acl.ACLManager.loadError', 'ACLManager.loadError', ([], {}), '()\n', (1573, 1575), False, 'from plogical.acl import ACLManager\n'), ((1935, 1977), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""fetchStatus"""', '(0)'], {}), "('fetchStatus', 0)\n", (1959, 1977), False, 'from plogical.acl import ACLManager\n'), ((2943, 2964), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (2953, 2964), False, 'import json\n'), ((2984, 3008), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (2996, 3008), False, 'from django.shortcuts import HttpResponse, render\n'), ((3232, 3273), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""add_status"""', '(0)'], {}), "('add_status', 0)\n", (3256, 3273), False, 'from plogical.acl import ACLManager\n'), ((3964, 3985), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (3974, 3985), False, 'import json\n'), ((4005, 4029), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (4017, 4029), False, 'from django.shortcuts import HttpResponse, render\n'), ((4257, 4301), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""delete_status"""', '(0)'], {}), "('delete_status', 0)\n", (4281, 4301), False, 'from plogical.acl import ACLManager\n'), ((4935, 4956), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (4945, 4956), False, 'import json\n'), ((4976, 5000), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (4988, 5000), False, 'from django.shortcuts import HttpResponse, render\n'), ((5232, 5276), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""reload_status"""', '(0)'], {}), "('reload_status', 0)\n", (5256, 5276), False, 'from plogical.acl import ACLManager\n'), ((5514, 5535), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (5524, 5535), False, 'import json\n'), ((5559, 5583), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (5571, 5583), False, 'from django.shortcuts import HttpResponse, render\n'), ((5781, 5802), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (5791, 5802), False, 'import json\n'), ((5826, 5850), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (5838, 5850), False, 'from django.shortcuts import HttpResponse, render\n'), ((5986, 6007), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (5996, 6007), False, 'import json\n'), ((6027, 6051), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (6039, 6051), False, 'from django.shortcuts import HttpResponse, render\n'), ((6282, 6325), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""start_status"""', '(0)'], {}), "('start_status', 0)\n", (6306, 6325), False, 'from plogical.acl import ACLManager\n'), ((6566, 6587), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (6576, 6587), False, 'import json\n'), ((6611, 6635), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (6623, 6635), False, 'from django.shortcuts import HttpResponse, render\n'), ((6831, 6852), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (6841, 6852), False, 'import json\n'), ((6876, 6900), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (6888, 6900), False, 'from django.shortcuts import HttpResponse, render\n'), ((7035, 7056), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (7045, 7056), False, 'import json\n'), ((7076, 7100), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (7088, 7100), False, 'from django.shortcuts import HttpResponse, render\n'), ((7330, 7372), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""stop_status"""', '(0)'], {}), "('stop_status', 0)\n", (7354, 7372), False, 'from plogical.acl import ACLManager\n'), ((7611, 7632), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (7621, 7632), False, 'import json\n'), ((7656, 7680), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (7668, 7680), False, 'from django.shortcuts import HttpResponse, render\n'), ((7874, 7895), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (7884, 7895), False, 'import json\n'), ((7919, 7943), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (7931, 7943), False, 'from django.shortcuts import HttpResponse, render\n'), ((8077, 8098), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (8087, 8098), False, 'import json\n'), ((8118, 8142), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (8130, 8142), False, 'from django.shortcuts import HttpResponse, render\n'), ((8374, 8400), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', ([], {}), '()\n', (8398, 8400), False, 'from plogical.acl import ACLManager\n'), ((8677, 8698), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (8687, 8698), False, 'import json\n'), ((8722, 8746), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (8734, 8746), False, 'from django.shortcuts import HttpResponse, render\n'), ((8882, 8903), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (8892, 8903), False, 'import json\n'), ((8927, 8951), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (8939, 8951), False, 'from django.shortcuts import HttpResponse, render\n'), ((9080, 9101), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (9090, 9101), False, 'import json\n'), ((9121, 9145), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (9133, 9145), False, 'from django.shortcuts import HttpResponse, render\n'), ((9374, 9396), 'plogical.acl.ACLManager.loadError', 'ACLManager.loadError', ([], {}), '()\n', (9394, 9396), False, 'from plogical.acl import ACLManager\n'), ((9769, 9795), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', ([], {}), '()\n', (9793, 9795), False, 'from plogical.acl import ACLManager\n'), ((10706, 10727), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (10716, 10727), False, 'import json\n'), ((10751, 10775), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (10763, 10775), False, 'from django.shortcuts import HttpResponse, render\n'), ((12097, 12166), 'json.dumps', 'json.dumps', (["{'status': 1, 'error_message': 'None', 'data': json_data}"], {}), "({'status': 1, 'error_message': 'None', 'data': json_data})\n", (12107, 12166), False, 'import json\n'), ((12190, 12214), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (12202, 12214), False, 'from django.shortcuts import HttpResponse, render\n'), ((12343, 12364), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (12353, 12364), False, 'import json\n'), ((12384, 12408), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (12396, 12408), False, 'from django.shortcuts import HttpResponse, render\n'), ((12639, 12680), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""saveStatus"""', '(0)'], {}), "('saveStatus', 0)\n", (12663, 12680), False, 'from plogical.acl import ACLManager\n'), ((13334, 13357), 'os.path.exists', 'os.path.exists', (['csfPath'], {}), '(csfPath)\n', (13348, 13357), False, 'import os\n'), ((14640, 14661), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (14650, 14661), False, 'import json\n'), ((14685, 14709), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (14697, 14709), False, 'from django.shortcuts import HttpResponse, render\n'), ((14841, 14862), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (14851, 14862), False, 'import json\n'), ((14886, 14910), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (14898, 14910), False, 'from django.shortcuts import HttpResponse, render\n'), ((15056, 15077), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (15066, 15077), False, 'import json\n'), ((15097, 15121), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (15109, 15121), False, 'from django.shortcuts import HttpResponse, render\n'), ((15350, 15394), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""delete_status"""', '(0)'], {}), "('delete_status', 0)\n", (15374, 15394), False, 'from plogical.acl import ACLManager\n'), ((15824, 15845), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (15834, 15845), False, 'import json\n'), ((15869, 15893), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (15881, 15893), False, 'from django.shortcuts import HttpResponse, render\n'), ((16027, 16048), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (16037, 16048), False, 'import json\n'), ((16072, 16096), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (16084, 16096), False, 'from django.shortcuts import HttpResponse, render\n'), ((16244, 16265), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (16254, 16265), False, 'import json\n'), ((16285, 16309), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (16297, 16309), False, 'from django.shortcuts import HttpResponse, render\n'), ((16535, 16576), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""add_status"""', '(0)'], {}), "('add_status', 0)\n", (16559, 16576), False, 'from plogical.acl import ACLManager\n'), ((17193, 17214), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (17203, 17214), False, 'import json\n'), ((17238, 17262), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (17250, 17262), False, 'from django.shortcuts import HttpResponse, render\n'), ((17393, 17414), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (17403, 17414), False, 'import json\n'), ((17438, 17462), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (17450, 17462), False, 'from django.shortcuts import HttpResponse, render\n'), ((17607, 17628), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (17617, 17628), False, 'import json\n'), ((17648, 17672), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (17660, 17672), False, 'from django.shortcuts import HttpResponse, render\n'), ((17911, 17933), 'plogical.acl.ACLManager.loadError', 'ACLManager.loadError', ([], {}), '()\n', (17931, 17933), False, 'from plogical.acl import ACLManager\n'), ((17950, 17981), 'plogical.processUtilities.ProcessUtilities.decideServer', 'ProcessUtilities.decideServer', ([], {}), '()\n', (17979, 17981), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((18058, 18130), 'os.path.join', 'os.path.join', (['virtualHostUtilities.Server_root', '"""conf/httpd_config.conf"""'], {}), "(virtualHostUtilities.Server_root, 'conf/httpd_config.conf')\n", (18070, 18130), False, 'import os\n'), ((18986, 19030), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""installModSec"""', '(0)'], {}), "('installModSec', 0)\n", (19010, 19030), False, 'from plogical.acl import ACLManager\n'), ((19544, 19565), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (19554, 19565), False, 'import json\n'), ((19585, 19609), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (19597, 19609), False, 'from django.shortcuts import HttpResponse, render\n'), ((20076, 20120), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['execPath'], {}), '(execPath)\n', (20110, 20120), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((20589, 20690), 'json.dumps', 'json.dumps', (["{'error_message': 'None', 'requestStatus': installStatus, 'abort': 1,\n 'installed': 1}"], {}), "({'error_message': 'None', 'requestStatus': installStatus,\n 'abort': 1, 'installed': 1})\n", (20599, 20690), False, 'import json\n'), ((20809, 20833), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (20821, 20833), False, 'from django.shortcuts import HttpResponse, render\n'), ((21560, 21581), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (21570, 21581), False, 'import json\n'), ((21601, 21625), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (21613, 21625), False, 'from django.shortcuts import HttpResponse, render\n'), ((21861, 21903), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""fetchStatus"""', '(0)'], {}), "('fetchStatus', 0)\n", (21885, 21903), False, 'from plogical.acl import ACLManager\n'), ((21920, 21951), 'plogical.processUtilities.ProcessUtilities.decideServer', 'ProcessUtilities.decideServer', ([], {}), '()\n', (21949, 21951), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((22297, 22369), 'os.path.join', 'os.path.join', (['virtualHostUtilities.Server_root', '"""conf/httpd_config.conf"""'], {}), "(virtualHostUtilities.Server_root, 'conf/httpd_config.conf')\n", (22309, 22369), False, 'import os\n'), ((22399, 22475), 'os.path.join', 'os.path.join', (['virtualHostUtilities.Server_root', '"""modules"""', '"""mod_security.so"""'], {}), "(virtualHostUtilities.Server_root, 'modules', 'mod_security.so')\n", (22411, 22475), False, 'import os\n'), ((22496, 22522), 'os.path.exists', 'os.path.exists', (['modSecPath'], {}), '(modSecPath)\n', (22510, 22522), False, 'import os\n'), ((25614, 25680), 'os.path.join', 'os.path.join', (['virtualHostUtilities.Server_root', '"""conf/modsec.conf"""'], {}), "(virtualHostUtilities.Server_root, 'conf/modsec.conf')\n", (25626, 25680), False, 'import os\n'), ((28120, 28141), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (28130, 28141), False, 'import json\n'), ((28161, 28185), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (28173, 28185), False, 'from django.shortcuts import HttpResponse, render\n'), ((28426, 28467), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""saveStatus"""', '(0)'], {}), "('saveStatus', 0)\n", (28450, 28467), False, 'from plogical.acl import ACLManager\n'), ((28484, 28515), 'plogical.processUtilities.ProcessUtilities.decideServer', 'ProcessUtilities.decideServer', ([], {}), '()\n', (28513, 28515), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((30752, 30796), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['execPath'], {}), '(execPath)\n', (30786, 30796), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((33170, 33214), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['execPath'], {}), '(execPath)\n', (33204, 33214), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((33771, 33791), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (33781, 33791), False, 'import json\n'), ((33811, 33834), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (33823, 33834), False, 'from django.shortcuts import HttpResponse, render\n'), ((34066, 34088), 'plogical.acl.ACLManager.loadError', 'ACLManager.loadError', ([], {}), '()\n', (34086, 34088), False, 'from plogical.acl import ACLManager\n'), ((34105, 34136), 'plogical.processUtilities.ProcessUtilities.decideServer', 'ProcessUtilities.decideServer', ([], {}), '()\n', (34134, 34136), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((34189, 34261), 'os.path.join', 'os.path.join', (['virtualHostUtilities.Server_root', '"""conf/httpd_config.conf"""'], {}), "(virtualHostUtilities.Server_root, 'conf/httpd_config.conf')\n", (34201, 34261), False, 'import os\n'), ((35089, 35135), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""modSecInstalled"""', '(0)'], {}), "('modSecInstalled', 0)\n", (35113, 35135), False, 'from plogical.acl import ACLManager\n'), ((35152, 35183), 'plogical.processUtilities.ProcessUtilities.decideServer', 'ProcessUtilities.decideServer', ([], {}), '()\n', (35181, 35183), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((35237, 35309), 'os.path.join', 'os.path.join', (['virtualHostUtilities.Server_root', '"""conf/httpd_config.conf"""'], {}), "(virtualHostUtilities.Server_root, 'conf/httpd_config.conf')\n", (35249, 35309), False, 'import os\n'), ((35692, 35766), 'os.path.join', 'os.path.join', (["(virtualHostUtilities.Server_root + '/conf/modsec/rules.conf')"], {}), "(virtualHostUtilities.Server_root + '/conf/modsec/rules.conf')\n", (35704, 35766), False, 'import os\n'), ((36425, 36492), 'os.path.join', 'os.path.join', (["(virtualHostUtilities.Server_root + '/conf/rules.conf')"], {}), "(virtualHostUtilities.Server_root + '/conf/rules.conf')\n", (36437, 36492), False, 'import os\n'), ((36790, 36811), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (36800, 36811), False, 'import json\n'), ((36835, 36859), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (36847, 36859), False, 'from django.shortcuts import HttpResponse, render\n'), ((37022, 37043), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (37032, 37043), False, 'import json\n'), ((37063, 37087), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (37075, 37087), False, 'from django.shortcuts import HttpResponse, render\n'), ((37319, 37360), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""saveStatus"""', '(0)'], {}), "('saveStatus', 0)\n", (37343, 37360), False, 'from plogical.acl import ACLManager\n'), ((38005, 38025), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (38015, 38025), False, 'import json\n'), ((38049, 38072), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (38061, 38072), False, 'from django.shortcuts import HttpResponse, render\n'), ((38189, 38209), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (38199, 38209), False, 'import json\n'), ((38233, 38256), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (38245, 38256), False, 'from django.shortcuts import HttpResponse, render\n'), ((38387, 38407), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (38397, 38407), False, 'import json\n'), ((38427, 38450), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (38439, 38450), False, 'from django.shortcuts import HttpResponse, render\n'), ((38687, 38709), 'plogical.acl.ACLManager.loadError', 'ACLManager.loadError', ([], {}), '()\n', (38707, 38709), False, 'from plogical.acl import ACLManager\n'), ((38726, 38757), 'plogical.processUtilities.ProcessUtilities.decideServer', 'ProcessUtilities.decideServer', ([], {}), '()\n', (38755, 38757), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((38811, 38883), 'os.path.join', 'os.path.join', (['virtualHostUtilities.Server_root', '"""conf/httpd_config.conf"""'], {}), "(virtualHostUtilities.Server_root, 'conf/httpd_config.conf')\n", (38823, 38883), False, 'import os\n'), ((39461, 39478), 'django.shortcuts.HttpResponse', 'HttpResponse', (['msg'], {}), '(msg)\n', (39473, 39478), False, 'from django.shortcuts import HttpResponse, render\n'), ((39719, 39765), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""modSecInstalled"""', '(0)'], {}), "('modSecInstalled', 0)\n", (39743, 39765), False, 'from plogical.acl import ACLManager\n'), ((39782, 39813), 'plogical.processUtilities.ProcessUtilities.decideServer', 'ProcessUtilities.decideServer', ([], {}), '()\n', (39811, 39813), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((39866, 39938), 'os.path.join', 'os.path.join', (['virtualHostUtilities.Server_root', '"""conf/httpd_config.conf"""'], {}), "(virtualHostUtilities.Server_root, 'conf/httpd_config.conf')\n", (39878, 39938), False, 'import os\n'), ((42172, 42193), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (42182, 42193), False, 'import json\n'), ((42217, 42241), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (42229, 42241), False, 'from django.shortcuts import HttpResponse, render\n'), ((42379, 42400), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (42389, 42400), False, 'import json\n'), ((42420, 42444), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (42432, 42444), False, 'from django.shortcuts import HttpResponse, render\n'), ((42684, 42728), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""installStatus"""', '(0)'], {}), "('installStatus', 0)\n", (42708, 42728), False, 'from plogical.acl import ACLManager\n'), ((42786, 42817), 'plogical.processUtilities.ProcessUtilities.decideServer', 'ProcessUtilities.decideServer', ([], {}), '()\n', (42815, 42817), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((43041, 43085), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['execPath'], {}), '(execPath)\n', (43075, 43085), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((43981, 44025), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['execPath'], {}), '(execPath)\n', (44015, 44025), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((44591, 44611), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (44601, 44611), False, 'import json\n'), ((44631, 44654), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (44643, 44654), False, 'from django.shortcuts import HttpResponse, render\n'), ((44884, 44926), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""fetchStatus"""', '(0)'], {}), "('fetchStatus', 0)\n", (44908, 44926), False, 'from plogical.acl import ACLManager\n'), ((44984, 45015), 'plogical.processUtilities.ProcessUtilities.decideServer', 'ProcessUtilities.decideServer', ([], {}), '()\n', (45013, 45015), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((45068, 45140), 'os.path.join', 'os.path.join', (['virtualHostUtilities.Server_root', '"""conf/httpd_config.conf"""'], {}), "(virtualHostUtilities.Server_root, 'conf/httpd_config.conf')\n", (45080, 45140), False, 'import os\n'), ((46368, 46442), 'json.dumps', 'json.dumps', (["{'fetchStatus': 1, 'error_message': 'None', 'data': json_data}"], {}), "({'fetchStatus': 1, 'error_message': 'None', 'data': json_data})\n", (46378, 46442), False, 'import json\n'), ((46466, 46490), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (46478, 46490), False, 'from django.shortcuts import HttpResponse, render\n'), ((46889, 46926), 'plogical.processUtilities.ProcessUtilities.executioner', 'ProcessUtilities.executioner', (['command'], {}), '(command)\n', (46917, 46926), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((47049, 47071), 'os.listdir', 'os.listdir', (['comodoPath'], {}), '(comodoPath)\n', (47059, 47071), False, 'import os\n'), ((48045, 48082), 'plogical.processUtilities.ProcessUtilities.executioner', 'ProcessUtilities.executioner', (['command'], {}), '(command)\n', (48073, 48082), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((48157, 48231), 'json.dumps', 'json.dumps', (["{'fetchStatus': 1, 'error_message': 'None', 'data': json_data}"], {}), "({'fetchStatus': 1, 'error_message': 'None', 'data': json_data})\n", (48167, 48231), False, 'import json\n'), ((48255, 48279), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (48267, 48279), False, 'from django.shortcuts import HttpResponse, render\n'), ((48413, 48434), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (48423, 48434), False, 'import json\n'), ((48454, 48478), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (48466, 48478), False, 'from django.shortcuts import HttpResponse, render\n'), ((48716, 48757), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""saveStatus"""', '(0)'], {}), "('saveStatus', 0)\n", (48740, 48757), False, 'from plogical.acl import ACLManager\n'), ((49470, 49490), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (49480, 49490), False, 'import json\n'), ((49514, 49537), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (49526, 49537), False, 'from django.shortcuts import HttpResponse, render\n'), ((49654, 49674), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (49664, 49674), False, 'import json\n'), ((49698, 49721), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (49710, 49721), False, 'from django.shortcuts import HttpResponse, render\n'), ((49852, 49872), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (49862, 49872), False, 'import json\n'), ((49892, 49915), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (49904, 49915), False, 'from django.shortcuts import HttpResponse, render\n'), ((50159, 50181), 'plogical.acl.ACLManager.loadError', 'ACLManager.loadError', ([], {}), '()\n', (50179, 50181), False, 'from plogical.acl import ACLManager\n'), ((50289, 50332), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (50323, 50332), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((50937, 50981), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""installStatus"""', '(0)'], {}), "('installStatus', 0)\n", (50961, 50981), False, 'from plogical.acl import ACLManager\n'), ((51499, 51520), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (51509, 51520), False, 'import json\n'), ((51540, 51564), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (51552, 51564), False, 'from django.shortcuts import HttpResponse, render\n'), ((51941, 51978), 'plogical.processUtilities.ProcessUtilities.executioner', 'ProcessUtilities.executioner', (['command'], {}), '(command)\n', (51969, 51978), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((52009, 52110), 'json.dumps', 'json.dumps', (["{'error_message': 'None', 'requestStatus': installStatus, 'abort': 1,\n 'installed': 1}"], {}), "({'error_message': 'None', 'requestStatus': installStatus,\n 'abort': 1, 'installed': 1})\n", (52019, 52110), False, 'import json\n'), ((52337, 52361), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (52349, 52361), False, 'from django.shortcuts import HttpResponse, render\n'), ((53394, 53415), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (53404, 53415), False, 'import json\n'), ((53435, 53459), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (53447, 53459), False, 'from django.shortcuts import HttpResponse, render\n'), ((53709, 53753), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""installStatus"""', '(0)'], {}), "('installStatus', 0)\n", (53733, 53753), False, 'from plogical.acl import ACLManager\n'), ((54269, 54290), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (54279, 54290), False, 'import json\n'), ((54310, 54334), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (54322, 54334), False, 'from django.shortcuts import HttpResponse, render\n'), ((54592, 54634), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', (['"""fetchStatus"""', '(0)'], {}), "('fetchStatus', 0)\n", (54616, 54634), False, 'from plogical.acl import ACLManager\n'), ((55356, 55377), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (55366, 55377), False, 'import json\n'), ((55397, 55421), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (55409, 55421), False, 'from django.shortcuts import HttpResponse, render\n'), ((55674, 55700), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', ([], {}), '()\n', (55698, 55700), False, 'from plogical.acl import ACLManager\n'), ((56229, 56249), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (56239, 56249), False, 'import json\n'), ((56273, 56296), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (56285, 56296), False, 'from django.shortcuts import HttpResponse, render\n'), ((56409, 56429), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (56419, 56429), False, 'import json\n'), ((56453, 56476), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (56465, 56476), False, 'from django.shortcuts import HttpResponse, render\n'), ((56605, 56626), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (56615, 56626), False, 'import json\n'), ((56646, 56670), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (56658, 56670), False, 'from django.shortcuts import HttpResponse, render\n'), ((56936, 56962), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', ([], {}), '()\n', (56960, 56962), False, 'from plogical.acl import ACLManager\n'), ((57135, 57155), 'os.remove', 'os.remove', (['portsPath'], {}), '(portsPath)\n', (57144, 57155), False, 'import os\n'), ((57667, 57687), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (57677, 57687), False, 'import json\n'), ((57711, 57734), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (57723, 57734), False, 'from django.shortcuts import HttpResponse, render\n'), ((57847, 57867), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (57857, 57867), False, 'import json\n'), ((57891, 57914), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (57903, 57914), False, 'from django.shortcuts import HttpResponse, render\n'), ((58043, 58064), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (58053, 58064), False, 'import json\n'), ((58084, 58108), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (58096, 58108), False, 'from django.shortcuts import HttpResponse, render\n'), ((58359, 58385), 'plogical.acl.ACLManager.loadErrorJson', 'ACLManager.loadErrorJson', ([], {}), '()\n', (58383, 58385), False, 'from plogical.acl import ACLManager\n'), ((58562, 58584), 'plogical.csf.CSF.allowIP', 'CSF.allowIP', (['ipAddress'], {}), '(ipAddress)\n', (58573, 58584), False, 'from plogical.csf import CSF\n'), ((58914, 58935), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (58924, 58935), False, 'import json\n'), ((58955, 58979), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (58967, 58979), False, 'from django.shortcuts import HttpResponse, render\n'), ((59227, 59249), 'plogical.acl.ACLManager.loadError', 'ACLManager.loadError', ([], {}), '()\n', (59247, 59249), False, 'from plogical.acl import ACLManager\n'), ((59816, 59872), 'django.shortcuts.render', 'render', (['self.request', '"""firewall/notAvailable.html"""', 'data'], {}), "(self.request, 'firewall/notAvailable.html', data)\n", (59822, 59872), False, 'from django.shortcuts import HttpResponse, render\n'), ((60419, 60568), 'plogical.CyberCPLogFileWriter.CyberCPLogFileWriter.statusWriter', 'logging.CyberCPLogFileWriter.statusWriter', (['ServerStatusUtil.lswsInstallStatusPath', '"""Not authorized to install container packages. [404]."""', '(1)'], {}), "(ServerStatusUtil.\n lswsInstallStatusPath,\n 'Not authorized to install container packages. [404].', 1)\n", (60460, 60568), True, 'import plogical.CyberCPLogFileWriter as logging\n'), ((16658, 16677), 'random.randint', 'randint', (['(1000)', '(9999)'], {}), '(1000, 9999)\n', (16665, 16677), False, 'from random import randint\n'), ((20249, 20395), 'json.dumps', 'json.dumps', (["{'error_message': 'Failed to install ModSecurity configurations.',\n 'requestStatus': installStatus, 'abort': 1, 'installed': 0}"], {}), "({'error_message':\n 'Failed to install ModSecurity configurations.', 'requestStatus':\n installStatus, 'abort': 1, 'installed': 0})\n", (20259, 20395), False, 'import json\n'), ((20534, 20558), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (20546, 20558), False, 'from django.shortcuts import HttpResponse, render\n'), ((20915, 21016), 'json.dumps', 'json.dumps', (["{'abort': 1, 'installed': 0, 'error_message': 'None', 'requestStatus':\n installStatus}"], {}), "({'abort': 1, 'installed': 0, 'error_message': 'None',\n 'requestStatus': installStatus})\n", (20925, 21016), False, 'import json\n'), ((21135, 21159), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (21147, 21159), False, 'from django.shortcuts import HttpResponse, render\n'), ((21208, 21293), 'json.dumps', 'json.dumps', (["{'abort': 0, 'error_message': 'None', 'requestStatus': installStatus}"], {}), "({'abort': 0, 'error_message': 'None', 'requestStatus':\n installStatus})\n", (21218, 21293), False, 'import json\n'), ((21392, 21416), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (21404, 21416), False, 'from django.shortcuts import HttpResponse, render\n'), ((30951, 30971), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (30961, 30971), False, 'import json\n'), ((30999, 31022), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (31011, 31022), False, 'from django.shortcuts import HttpResponse, render\n'), ((31151, 31171), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (31161, 31171), False, 'import json\n'), ((31199, 31222), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (31211, 31222), False, 'from django.shortcuts import HttpResponse, render\n'), ((33369, 33389), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (33379, 33389), False, 'import json\n'), ((33417, 33440), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (33429, 33440), False, 'from django.shortcuts import HttpResponse, render\n'), ((33569, 33589), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (33579, 33589), False, 'import json\n'), ((33617, 33640), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (33629, 33640), False, 'from django.shortcuts import HttpResponse, render\n'), ((36120, 36141), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (36130, 36141), False, 'import json\n'), ((36169, 36193), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (36181, 36193), False, 'from django.shortcuts import HttpResponse, render\n'), ((36305, 36326), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (36315, 36326), False, 'import json\n'), ((36354, 36378), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (36366, 36378), False, 'from django.shortcuts import HttpResponse, render\n'), ((41174, 41195), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (41184, 41195), False, 'import json\n'), ((41223, 41247), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (41235, 41247), False, 'from django.shortcuts import HttpResponse, render\n'), ((41358, 41379), 'json.dumps', 'json.dumps', (['final_dic'], {}), '(final_dic)\n', (41368, 41379), False, 'import json\n'), ((41407, 41431), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (41419, 41431), False, 'from django.shortcuts import HttpResponse, render\n'), ((41652, 41695), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (41686, 41695), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((43243, 43263), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (43253, 43263), False, 'import json\n'), ((43291, 43314), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (43303, 43314), False, 'from django.shortcuts import HttpResponse, render\n'), ((43446, 43466), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (43456, 43466), False, 'import json\n'), ((43494, 43517), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (43506, 43517), False, 'from django.shortcuts import HttpResponse, render\n'), ((43646, 43733), 'json.dumps', 'json.dumps', (["{'installStatus': 0, 'error_message': 'OWASP will be available later.'}"], {}), "({'installStatus': 0, 'error_message':\n 'OWASP will be available later.'})\n", (43656, 43733), False, 'import json\n'), ((43759, 43783), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (43771, 43783), False, 'from django.shortcuts import HttpResponse, render\n'), ((44183, 44203), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (44193, 44203), False, 'import json\n'), ((44231, 44254), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (44243, 44254), False, 'from django.shortcuts import HttpResponse, render\n'), ((44386, 44406), 'json.dumps', 'json.dumps', (['data_ret'], {}), '(data_ret)\n', (44396, 44406), False, 'import json\n'), ((44434, 44457), 'django.shortcuts.HttpResponse', 'HttpResponse', (['json_data'], {}), '(json_data)\n', (44446, 44457), False, 'from django.shortcuts import HttpResponse, render\n'), ((46582, 46667), 'json.dumps', 'json.dumps', (["{'fetchStatus': 0, 'error_message': 'OWASP will be available later.'}"], {}), "({'fetchStatus': 0, 'error_message':\n 'OWASP will be available later.'})\n", (46592, 46667), False, 'import json\n'), ((46693, 46717), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (46705, 46717), False, 'from django.shortcuts import HttpResponse, render\n'), ((52490, 52527), 'plogical.processUtilities.ProcessUtilities.executioner', 'ProcessUtilities.executioner', (['command'], {}), '(command)\n', (52518, 52527), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((52557, 52658), 'json.dumps', 'json.dumps', (["{'abort': 1, 'installed': 0, 'error_message': 'None', 'requestStatus':\n installStatus}"], {}), "({'abort': 1, 'installed': 0, 'error_message': 'None',\n 'requestStatus': installStatus})\n", (52567, 52658), False, 'import json\n'), ((52884, 52908), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (52896, 52908), False, 'from django.shortcuts import HttpResponse, render\n'), ((52957, 53042), 'json.dumps', 'json.dumps', (["{'abort': 0, 'error_message': 'None', 'requestStatus': installStatus}"], {}), "({'abort': 0, 'error_message': 'None', 'requestStatus':\n installStatus})\n", (52967, 53042), False, 'import json\n'), ((53228, 53252), 'django.shortcuts.HttpResponse', 'HttpResponse', (['final_json'], {}), '(final_json)\n', (53240, 53252), False, 'from django.shortcuts import HttpResponse, render\n'), ((58637, 58659), 'plogical.csf.CSF.blockIP', 'CSF.blockIP', (['ipAddress'], {}), '(ipAddress)\n', (58648, 58659), False, 'from plogical.csf import CSF\n'), ((59935, 59991), 'django.shortcuts.render', 'render', (['self.request', '"""firewall/notAvailable.html"""', 'data'], {}), "(self.request, 'firewall/notAvailable.html', data)\n", (59941, 59991), False, 'from django.shortcuts import HttpResponse, render\n'), ((60033, 60084), 'django.shortcuts.render', 'render', (['self.request', '"""firewall/imunify.html"""', 'data'], {}), "(self.request, 'firewall/imunify.html', data)\n", (60039, 60084), False, 'from django.shortcuts import HttpResponse, render\n'), ((2463, 2478), 'json.dumps', 'json.dumps', (['dic'], {}), '(dic)\n', (2473, 2478), False, 'import json\n'), ((2583, 2598), 'json.dumps', 'json.dumps', (['dic'], {}), '(dic)\n', (2593, 2598), False, 'import json\n'), ((10047, 10086), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['cat'], {}), '(cat)\n', (10081, 10086), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((10930, 10969), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['cat'], {}), '(cat)\n', (10964, 10969), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((13672, 13715), 'firewall.models.FirewallRules.objects.get', 'FirewallRules.objects.get', ([], {'name': '"""SSHCustom"""'}), "(name='SSHCustom')\n", (13697, 13715), False, 'from firewall.models import FirewallRules\n'), ((13740, 13803), 'plogical.firewallUtilities.FirewallUtilities.deleteRule', 'FirewallUtilities.deleteRule', (['"""tcp"""', 'updateFW.port', '"""0.0.0.0/0"""'], {}), "('tcp', updateFW.port, '0.0.0.0/0')\n", (13768, 13803), False, 'from plogical.firewallUtilities import FirewallUtilities\n'), ((13916, 13970), 'plogical.firewallUtilities.FirewallUtilities.addRule', 'FirewallUtilities.addRule', (['"""tcp"""', 'sshPort', '"""0.0.0.0/0"""'], {}), "('tcp', sshPort, '0.0.0.0/0')\n", (13941, 13970), False, 'from plogical.firewallUtilities import FirewallUtilities\n'), ((18211, 18254), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (18245, 18254), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((25755, 25798), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (25789, 25798), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((29933, 29952), 'random.randint', 'randint', (['(1000)', '(9999)'], {}), '(1000, 9999)\n', (29940, 29952), False, 'from random import randint\n'), ((32407, 32426), 'random.randint', 'randint', (['(1000)', '(9999)'], {}), '(1000, 9999)\n', (32414, 32426), False, 'from random import randint\n'), ((34342, 34385), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (34376, 34385), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((35390, 35433), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (35424, 35433), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((36581, 36624), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (36615, 36624), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((38964, 39007), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (38998, 39007), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((40019, 40062), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (40053, 40062), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((45221, 45264), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (45255, 45264), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((22604, 22647), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (22638, 22647), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((35899, 35942), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (35933, 35942), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((40489, 40532), 'plogical.processUtilities.ProcessUtilities.outputExecutioner', 'ProcessUtilities.outputExecutioner', (['command'], {}), '(command)\n', (40523, 40532), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((47805, 47820), 'json.dumps', 'json.dumps', (['dic'], {}), '(dic)\n', (47815, 47820), False, 'import json\n'), ((47937, 47952), 'json.dumps', 'json.dumps', (['dic'], {}), '(dic)\n', (47947, 47952), False, 'import json\n'), ((11862, 11877), 'json.dumps', 'json.dumps', (['dic'], {}), '(dic)\n', (11872, 11877), False, 'import json\n'), ((12006, 12021), 'json.dumps', 'json.dumps', (['dic'], {}), '(dic)\n', (12016, 12021), False, 'import json\n'), ((14074, 14132), 'firewall.models.FirewallRules', 'FirewallRules', ([], {'name': '"""SSHCustom"""', 'port': 'sshPort', 'proto': '"""tcp"""'}), "(name='SSHCustom', port=sshPort, proto='tcp')\n", (14087, 14132), False, 'from firewall.models import FirewallRules\n'), ((14212, 14266), 'plogical.firewallUtilities.FirewallUtilities.addRule', 'FirewallUtilities.addRule', (['"""tcp"""', 'sshPort', '"""0.0.0.0/0"""'], {}), "('tcp', sshPort, '0.0.0.0/0')\n", (14237, 14266), False, 'from plogical.firewallUtilities import FirewallUtilities\n'), ((14381, 14418), 'plogical.processUtilities.ProcessUtilities.executioner', 'ProcessUtilities.executioner', (['command'], {}), '(command)\n', (14409, 14418), False, 'from plogical.processUtilities import ProcessUtilities\n'), ((46134, 46149), 'json.dumps', 'json.dumps', (['dic'], {}), '(dic)\n', (46144, 46149), False, 'import json\n'), ((46278, 46293), 'json.dumps', 'json.dumps', (['dic'], {}), '(dic)\n', (46288, 46293), False, 'import json\n')] |
import re
import markdown
from django.contrib import messages
from django.db.models import Q
from django.shortcuts import render, get_object_or_404, redirect
from django.utils.text import slugify
from django.views.generic import ListView, DetailView
from markdown.extensions.toc import TocExtension
from pure_pagination.mixins import PaginationMixin
from .models import Post, Category, Tag
class IndexView(PaginationMixin, ListView):
model = Post
template_name = 'blog/index.html'
context_object_name = 'post_list'
# 指定 paginate_by 属性后开启分页功能,其值代表每一页包含多少篇文章
paginate_by = 10
class PostDetailView(DetailView):
# 这些属性的含义和 ListView 是一样的
model = Post
template_name = 'blog/detail.html'
context_object_name = 'post'
def get(self, request, *args, **kwargs):
# 覆写 get 方法的目的是因为每当文章被访问一次,就得将文章阅读量 +1
# get 方法返回的是一个 HttpResponse 实例
# 之所以需要先调用父类的 get 方法,是因为只有当 get 方法被调用后,
# 才有 self.object 属性,其值为 Post 模型实例,即被访问的文章 post
response = super(PostDetailView, self).get(request, *args, **kwargs)
# 将文章阅读量 +1
# 注意 self.object 的值就是被访问的文章 post
self.object.increase_views()
# 视图必须返回一个 HttpResponse 对象
return response
def get_object(self, queryset=None):
# 覆写 get_object 方法的目的是因为需要对 post 的 body 值进行渲染
post = super().get_object(queryset=None)
md = markdown.Markdown(extensions=[
'markdown.extensions.extra',
'markdown.extensions.codehilite',
# 记得在顶部引入 TocExtension 和 slugify
TocExtension(slugify=slugify),
])
post.body = md.convert(post.body)
m = re.search(r'<div class="toc">\s*<ul>(.*)</ul>\s*</div>', md.toc, re.S)
post.toc = m.group(1) if m is not None else ''
return post
class ArchiveView(IndexView):
def get_queryset(self):
year = self.kwargs.get('year')
month = self.kwargs.get('month')
return super(ArchiveView, self).get_queryset().filter(created_time__year=year,
created_time__month=month)
class CategoryView(IndexView):
def get_queryset(self):
cate = get_object_or_404(Category, pk=self.kwargs.get('pk'))
return super(CategoryView, self).get_queryset().filter(category=cate)
class TagView(ListView):
model = Tag
template_name = 'blog/index.html'
context_object_name = 'post_list'
def get_queryset(self):
t = get_object_or_404(Tag, pk=self.kwargs.get('pk'))
return super(TagView, self).get_queryset().filter(tags=t)
def search(request):
q = request.GET.get('q')
if not q:
error_msg = "请输入搜索关键词"
messages.add_message(request, messages.ERROR, error_msg, extra_tags='danger')
return redirect('blog:index')
post_list = Post.objects.filter(Q(title__icontains=q) | Q(body__icontains=q))
return render(request, 'blog/index.html', {'post_list': post_list})
| [
"django.shortcuts.render",
"django.shortcuts.redirect",
"markdown.extensions.toc.TocExtension",
"django.contrib.messages.add_message",
"django.db.models.Q",
"re.search"
] | [((2914, 2974), 'django.shortcuts.render', 'render', (['request', '"""blog/index.html"""', "{'post_list': post_list}"], {}), "(request, 'blog/index.html', {'post_list': post_list})\n", (2920, 2974), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((1652, 1723), 're.search', 're.search', (['"""<div class="toc">\\\\s*<ul>(.*)</ul>\\\\s*</div>"""', 'md.toc', 're.S'], {}), '(\'<div class="toc">\\\\s*<ul>(.*)</ul>\\\\s*</div>\', md.toc, re.S)\n', (1661, 1723), False, 'import re\n'), ((2704, 2781), 'django.contrib.messages.add_message', 'messages.add_message', (['request', 'messages.ERROR', 'error_msg'], {'extra_tags': '"""danger"""'}), "(request, messages.ERROR, error_msg, extra_tags='danger')\n", (2724, 2781), False, 'from django.contrib import messages\n'), ((2797, 2819), 'django.shortcuts.redirect', 'redirect', (['"""blog:index"""'], {}), "('blog:index')\n", (2805, 2819), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2857, 2878), 'django.db.models.Q', 'Q', ([], {'title__icontains': 'q'}), '(title__icontains=q)\n', (2858, 2878), False, 'from django.db.models import Q\n'), ((2881, 2901), 'django.db.models.Q', 'Q', ([], {'body__icontains': 'q'}), '(body__icontains=q)\n', (2882, 2901), False, 'from django.db.models import Q\n'), ((1555, 1584), 'markdown.extensions.toc.TocExtension', 'TocExtension', ([], {'slugify': 'slugify'}), '(slugify=slugify)\n', (1567, 1584), False, 'from markdown.extensions.toc import TocExtension\n')] |
import os
KEY = os.environ['KEY']
from urllib.parse import parse_qs, urlparse
import requests
def getid(url):
if url.startswith("http"):
try:
url_data = urlparse(url)
query = parse_qs(url_data.query)
return query["v"][0]
except KeyError:
return url.split("/")[-1]
else:
return url
def video(id):
url = "https://youtube.googleapis.com/youtube/v3/videos"
params = {
"part": "snippet,contentDetails,statistics",
"id": id,
"key": KEY
}
res = requests.get(url, params=params)
return res.json()
def comment(id, page_token=''):
url = "https://youtube.googleapis.com/youtube/v3/commentThreads"
params = {
"part": "snippet,replies",
"videoId": id,
"next_page_token": page_token,
"key": KEY
}
res = requests.get(url, params=params)
return res.json(), res.json().get("nextPageToken", "")
| [
"urllib.parse.parse_qs",
"urllib.parse.urlparse",
"requests.get"
] | [((565, 597), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (577, 597), False, 'import requests\n'), ((870, 902), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (882, 902), False, 'import requests\n'), ((182, 195), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (190, 195), False, 'from urllib.parse import parse_qs, urlparse\n'), ((216, 240), 'urllib.parse.parse_qs', 'parse_qs', (['url_data.query'], {}), '(url_data.query)\n', (224, 240), False, 'from urllib.parse import parse_qs, urlparse\n')] |
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
# pylint: disable=unused-import
import json
import subprocess
import sys
from pathlib import Path
from typing import Callable, Dict
import pytest
import models_library
pytest_plugins = [
"pytest_simcore.repository_paths",
"pytest_simcore.schemas",
]
@pytest.fixture(scope="session")
def package_dir():
pdir = Path(models_library.__file__).resolve().parent
assert pdir.exists()
return pdir
@pytest.fixture(scope="session")
def json_diff_script(script_dir: Path) -> Path:
json_diff_script = script_dir / "json-schema-diff.bash"
assert json_diff_script.exists()
return json_diff_script
@pytest.fixture(scope="session")
def diff_json_schemas(json_diff_script: Path, tmp_path_factory: Path) -> Callable:
def _run_diff(schema_a: Dict, schema_b: Dict) -> subprocess.CompletedProcess:
tmp_path = tmp_path_factory.mktemp(__name__)
schema_a_path = tmp_path / "schema_a.json"
schema_a_path.write_text(json.dumps(schema_a))
schema_b_path = tmp_path / "schema_b.json"
schema_b_path.write_text(json.dumps(schema_b))
command = [json_diff_script, schema_a_path, schema_b_path]
return subprocess.run(
command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
check=False,
cwd=tmp_path,
)
yield _run_diff
| [
"pytest.fixture",
"json.dumps",
"subprocess.run",
"pathlib.Path"
] | [((337, 368), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (351, 368), False, 'import pytest\n'), ((490, 521), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (504, 521), False, 'import pytest\n'), ((698, 729), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (712, 729), False, 'import pytest\n'), ((1242, 1346), 'subprocess.run', 'subprocess.run', (['command'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'check': '(False)', 'cwd': 'tmp_path'}), '(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,\n check=False, cwd=tmp_path)\n', (1256, 1346), False, 'import subprocess\n'), ((1032, 1052), 'json.dumps', 'json.dumps', (['schema_a'], {}), '(schema_a)\n', (1042, 1052), False, 'import json\n'), ((1138, 1158), 'json.dumps', 'json.dumps', (['schema_b'], {}), '(schema_b)\n', (1148, 1158), False, 'import json\n'), ((399, 428), 'pathlib.Path', 'Path', (['models_library.__file__'], {}), '(models_library.__file__)\n', (403, 428), False, 'from pathlib import Path\n')] |
# Copyright (c) 2017, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
"""
Forest Fire Parameter Exploration
=================================
This example shows parallel execution of multiple forest fire
simulations with parameters swept over a range of values to
collect and display statistics about the model.
In this example, we use a modified version of a forest fire
simulation with the following states:
- Empty cell: 0
- Healthy tree: 1
- Burning tree: 2
- Moldy tree: 3
Every tick:
- an empty cell can grow a tree
- fires are randomly started and burn down all connected trees
- crowded trees have a chance of contracting and dying from mold
infection
The script runs the simulation with different values for the
likelihood of mold infection. As the probability grows, a qualitative
decrease can be seen in the size and effect of fires, as the deaths
due to mold have the effect of breaking up large groups of trees into
less-connected groves, making it harder for fire to spread.
"""
import numpy as np
from cellular_automata.automata_recorder import AutomataRecorder, count_states
from cellular_automata.cellular_automaton import CellularAutomaton
from cellular_automata.rules.change_state_rule import ChangeStateRule
from cellular_automata.rules.forest import BurnGrovesRule, MoldRule
# State values
EMPTY = 0
TREE = 1
FIRE = 2
MOLD = 3
def simulation(p_mold, size, steps):
""" Perform a simulation of a moldy forest, returning statistics.
Parameters
----------
p_mold : probability
The probability that a crowded tree dies of mold.
size : size tuple
The number of cells in each direction for the simulation.
steps : int
The number of ticks to run the simulation for.
Returns
-------
counts : array
Array with shape (4, steps) of counts of each state at
each tick.
"""
np.random.seed(None)
# trees grow
grow = ChangeStateRule(
from_state=EMPTY,
to_state=TREE,
p_change=0.0025
)
# fires are started, and all connected trees burn
burn_groves = BurnGrovesRule()
# crowded trees have a chance to be infected with mold
mold = MoldRule(dead_state=MOLD, p_mold=p_mold)
# trees which are infected with mold die
mold_die = ChangeStateRule(
from_state=MOLD,
to_state=EMPTY,
p_change=1.0
)
# fires are extinguished
fire_out = ChangeStateRule(
from_state=FIRE,
to_state=EMPTY,
p_change=1.0
)
forest = CellularAutomaton(
shape=size,
rules=[mold_die, fire_out, grow, burn_groves, mold],
)
# record the number of each state
recorder = AutomataRecorder(automaton=forest, transform=count_states)
forest.start()
for i in range(steps):
forest.step()
return recorder.as_array()
if __name__ == '__main__':
from joblib import Parallel, delayed
import matplotlib.pyplot as plt
SHAPE = (256, 256)
N_STEPS = 4096
N_SIMULATIONS = 16
results = Parallel(n_jobs=4)(
delayed(simulation)(p_mold, SHAPE, N_STEPS, count_states)
for p_mold in np.logspace(-4, -1, N_SIMULATIONS)
)
for i, result in enumerate(results):
# plot count of each non-empty state over time
plt.subplot(N_SIMULATIONS, 2, 2*i+1)
for state, color in [(TREE, 'g'), (FIRE, 'r'), (MOLD, 'c')]:
plt.plot(result[state, :], c=color)
# plot histogram
plt.subplot(N_SIMULATIONS, 2, 2*i+2)
plt.hist(
np.log(result[result[1] != 0, 1]),
bins=np.linspace(0, 10, 21)
)
plt.show()
| [
"cellular_automata.rules.forest.BurnGrovesRule",
"matplotlib.pyplot.plot",
"numpy.log",
"numpy.logspace",
"joblib.Parallel",
"matplotlib.pyplot.subplot",
"cellular_automata.rules.forest.MoldRule",
"numpy.linspace",
"numpy.random.seed",
"cellular_automata.automata_recorder.AutomataRecorder",
"job... | [((2204, 2224), 'numpy.random.seed', 'np.random.seed', (['None'], {}), '(None)\n', (2218, 2224), True, 'import numpy as np\n'), ((2254, 2319), 'cellular_automata.rules.change_state_rule.ChangeStateRule', 'ChangeStateRule', ([], {'from_state': 'EMPTY', 'to_state': 'TREE', 'p_change': '(0.0025)'}), '(from_state=EMPTY, to_state=TREE, p_change=0.0025)\n', (2269, 2319), False, 'from cellular_automata.rules.change_state_rule import ChangeStateRule\n'), ((2423, 2439), 'cellular_automata.rules.forest.BurnGrovesRule', 'BurnGrovesRule', ([], {}), '()\n', (2437, 2439), False, 'from cellular_automata.rules.forest import BurnGrovesRule, MoldRule\n'), ((2511, 2551), 'cellular_automata.rules.forest.MoldRule', 'MoldRule', ([], {'dead_state': 'MOLD', 'p_mold': 'p_mold'}), '(dead_state=MOLD, p_mold=p_mold)\n', (2519, 2551), False, 'from cellular_automata.rules.forest import BurnGrovesRule, MoldRule\n'), ((2613, 2675), 'cellular_automata.rules.change_state_rule.ChangeStateRule', 'ChangeStateRule', ([], {'from_state': 'MOLD', 'to_state': 'EMPTY', 'p_change': '(1.0)'}), '(from_state=MOLD, to_state=EMPTY, p_change=1.0)\n', (2628, 2675), False, 'from cellular_automata.rules.change_state_rule import ChangeStateRule\n'), ((2751, 2813), 'cellular_automata.rules.change_state_rule.ChangeStateRule', 'ChangeStateRule', ([], {'from_state': 'FIRE', 'to_state': 'EMPTY', 'p_change': '(1.0)'}), '(from_state=FIRE, to_state=EMPTY, p_change=1.0)\n', (2766, 2813), False, 'from cellular_automata.rules.change_state_rule import ChangeStateRule\n'), ((2858, 2944), 'cellular_automata.cellular_automaton.CellularAutomaton', 'CellularAutomaton', ([], {'shape': 'size', 'rules': '[mold_die, fire_out, grow, burn_groves, mold]'}), '(shape=size, rules=[mold_die, fire_out, grow, burn_groves,\n mold])\n', (2875, 2944), False, 'from cellular_automata.cellular_automaton import CellularAutomaton\n'), ((3018, 3076), 'cellular_automata.automata_recorder.AutomataRecorder', 'AutomataRecorder', ([], {'automaton': 'forest', 'transform': 'count_states'}), '(automaton=forest, transform=count_states)\n', (3034, 3076), False, 'from cellular_automata.automata_recorder import AutomataRecorder, count_states\n'), ((3963, 3973), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3971, 3973), True, 'import matplotlib.pyplot as plt\n'), ((3365, 3383), 'joblib.Parallel', 'Parallel', ([], {'n_jobs': '(4)'}), '(n_jobs=4)\n', (3373, 3383), False, 'from joblib import Parallel, delayed\n'), ((3619, 3659), 'matplotlib.pyplot.subplot', 'plt.subplot', (['N_SIMULATIONS', '(2)', '(2 * i + 1)'], {}), '(N_SIMULATIONS, 2, 2 * i + 1)\n', (3630, 3659), True, 'import matplotlib.pyplot as plt\n'), ((3807, 3847), 'matplotlib.pyplot.subplot', 'plt.subplot', (['N_SIMULATIONS', '(2)', '(2 * i + 2)'], {}), '(N_SIMULATIONS, 2, 2 * i + 2)\n', (3818, 3847), True, 'import matplotlib.pyplot as plt\n'), ((3737, 3772), 'matplotlib.pyplot.plot', 'plt.plot', (['result[state, :]'], {'c': 'color'}), '(result[state, :], c=color)\n', (3745, 3772), True, 'import matplotlib.pyplot as plt\n'), ((3874, 3907), 'numpy.log', 'np.log', (['result[result[1] != 0, 1]'], {}), '(result[result[1] != 0, 1])\n', (3880, 3907), True, 'import numpy as np\n'), ((3393, 3412), 'joblib.delayed', 'delayed', (['simulation'], {}), '(simulation)\n', (3400, 3412), False, 'from joblib import Parallel, delayed\n'), ((3473, 3507), 'numpy.logspace', 'np.logspace', (['(-4)', '(-1)', 'N_SIMULATIONS'], {}), '(-4, -1, N_SIMULATIONS)\n', (3484, 3507), True, 'import numpy as np\n'), ((3926, 3948), 'numpy.linspace', 'np.linspace', (['(0)', '(10)', '(21)'], {}), '(0, 10, 21)\n', (3937, 3948), True, 'import numpy as np\n')] |